diff --git a/daemon/src/daemon/adapters/camera/onvif_client.py b/daemon/src/daemon/adapters/camera/onvif_client.py index c3b0a43d..5ed221b1 100644 --- a/daemon/src/daemon/adapters/camera/onvif_client.py +++ b/daemon/src/daemon/adapters/camera/onvif_client.py @@ -2,7 +2,7 @@ from collections.abc import Awaitable, Callable, Mapping from dataclasses import dataclass -from typing import Protocol +from typing import Protocol, cast @dataclass(frozen=True) @@ -74,7 +74,7 @@ def parse_motion_event(payload: Mapping[str, object]) -> MotionEvent | None: topic = str(payload.get("topic", "")) message = payload.get("message") if isinstance(message, Mapping): - state_source: Mapping[str, object] = message + state_source: Mapping[str, object] = cast("Mapping[str, object]", message) else: state_source = payload diff --git a/daemon/src/daemon/adapters/llm/event_parser.py b/daemon/src/daemon/adapters/llm/event_parser.py index f95006b4..b669225d 100644 --- a/daemon/src/daemon/adapters/llm/event_parser.py +++ b/daemon/src/daemon/adapters/llm/event_parser.py @@ -2,7 +2,7 @@ import json from dataclasses import dataclass -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast if TYPE_CHECKING: from collections.abc import Iterable, Iterator, Mapping @@ -216,21 +216,24 @@ def _parse_payload(raw_data: str) -> dict[str, JsonValue] | None: payload: object = json.loads(raw_data) if not isinstance(payload, dict): return None - return _normalize_json_object(payload) + return _normalize_json_object(cast("dict[str, JsonValue]", payload)) def _parse_arguments(arguments_json: str) -> dict[str, JsonValue] | None: payload: object = json.loads(arguments_json) if not isinstance(payload, dict): return None - return _normalize_json_object(payload) + return _normalize_json_object(cast("dict[str, JsonValue]", payload)) def _dict_field(payload: dict[str, JsonValue], key: str) -> dict[str, JsonValue] | None: value = payload.get(key) if not isinstance(value, dict): return None - return _normalize_json_object(value) + normalized: dict[str, JsonValue] = {} + for nested_key, nested_value in cast("dict[object, object]", value).items(): + normalized[str(nested_key)] = _normalize_json_value(nested_value) + return normalized def _string_field(payload: dict[str, JsonValue], key: str) -> str | None: @@ -255,9 +258,9 @@ def _normalize_json_value(value: object) -> JsonValue: if value is None or isinstance(value, str | int | float | bool): return value if isinstance(value, list): - return _normalize_json_array(value) + return _normalize_json_array(cast("list[object]", value)) if isinstance(value, dict): - return _normalize_json_object(value) + return _normalize_json_object(cast("dict[str, JsonValue]", value)) return str(value) diff --git a/daemon/src/daemon/adapters/llm/openai_transport.py b/daemon/src/daemon/adapters/llm/openai_transport.py index b86e2f49..01e3d148 100644 --- a/daemon/src/daemon/adapters/llm/openai_transport.py +++ b/daemon/src/daemon/adapters/llm/openai_transport.py @@ -358,12 +358,14 @@ def _assert_clean_payload(value: object, *, prefix: str) -> None: return if isinstance(value, list): - for index, item in enumerate(value): + items = cast("list[object]", value) + for index, item in enumerate(items): _assert_clean_payload(item, prefix=f"{prefix}[{index}]") return if isinstance(value, dict): - for key, item in value.items(): + mapping = cast("dict[object, object]", value) + for key, item in mapping.items(): if key in SKIP_PRIVACY_KEYS and isinstance(item, str): continue _assert_clean_payload(item, prefix=f"{prefix}.{key}") diff --git a/daemon/src/daemon/app.py b/daemon/src/daemon/app.py index 46eb31d3..1a519cf2 100644 --- a/daemon/src/daemon/app.py +++ b/daemon/src/daemon/app.py @@ -527,7 +527,8 @@ async def post_debug_inject(payload: dict[str, object]) -> dict[str, object]: raw_data = payload.get("data", {}) if not isinstance(raw_data, dict): raise HTTPException(status_code=400, detail="data must be an object") - data = {str(key): value for key, value in raw_data.items()} + raw_mapping = cast("dict[object, object]", raw_data) + data: dict[str, object] = {str(key): value for key, value in raw_mapping.items()} log_buffer.log("INFO", "debug", f"inject:{event_type}", {"payload": data}) await event_bus.publish(event_type, data) return {"injected": True, "event_id": f"debug-{len(recent_events) + 1}"} diff --git a/daemon/src/daemon/audit/langfuse_export.py b/daemon/src/daemon/audit/langfuse_export.py index 59d097d4..a48c2bc7 100644 --- a/daemon/src/daemon/audit/langfuse_export.py +++ b/daemon/src/daemon/audit/langfuse_export.py @@ -70,7 +70,7 @@ class LangfuseConfig: class LangfuseMirror: client: LangfuseClientLike logger: LoggerLike = field(default_factory=lambda: logging.getLogger("daemon.audit.langfuse")) - _llm_span_ids: dict[str, str] = field(default_factory=dict) + _llm_span_ids: dict[str, str] = field(default_factory=lambda: cast("dict[str, str]", {})) async def export_record(self, *, record_type: str, payload: dict[str, JSONValue]) -> None: trace_context = { diff --git a/daemon/src/daemon/audit/local_sink.py b/daemon/src/daemon/audit/local_sink.py index f80aaa82..f6affb19 100644 --- a/daemon/src/daemon/audit/local_sink.py +++ b/daemon/src/daemon/audit/local_sink.py @@ -787,14 +787,16 @@ def _default_role(kind: str) -> str: def _json_dict(value: object) -> dict[str, JSONValue]: if isinstance(value, dict): - return {str(key): _coerce_json_value(item) for key, item in value.items()} + mapping = cast("dict[object, object]", value) + return {str(key): _coerce_json_value(item) for key, item in mapping.items()} return {} def _json_dict_list(value: object) -> list[dict[str, JSONValue]]: if not isinstance(value, list): return [] - return [_json_dict(item) for item in value] + items = cast("list[object]", value) + return [_json_dict(item) for item in items] def _optional_json(value: object) -> dict[str, JSONValue] | list[JSONValue] | None: @@ -818,9 +820,11 @@ def _coerce_json_value(value: object) -> JSONValue: if isinstance(value, bool | int | float | str): return value if isinstance(value, list): - return [_coerce_json_value(item) for item in value] + items = cast("list[object]", value) + return [_coerce_json_value(item) for item in items] if isinstance(value, dict): - return {str(key): _coerce_json_value(item) for key, item in value.items()} + mapping = cast("dict[object, object]", value) + return {str(key): _coerce_json_value(item) for key, item in mapping.items()} return str(value) diff --git a/daemon/src/daemon/audit/redactor.py b/daemon/src/daemon/audit/redactor.py index 1fdb3b82..16573d78 100644 --- a/daemon/src/daemon/audit/redactor.py +++ b/daemon/src/daemon/audit/redactor.py @@ -1,7 +1,7 @@ from __future__ import annotations import re -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from daemon.privacy import anonymize, is_clean @@ -55,11 +55,13 @@ def mask_langfuse_value(value: object) -> object: if isinstance(value, str): return mask_langfuse_text(value) if isinstance(value, list): - return [mask_langfuse_value(item) for item in value] + items = cast("list[object]", value) + return [mask_langfuse_value(item) for item in items] if isinstance(value, dict): + mapping = cast("dict[object, object]", value) return { str(mask_langfuse_text(str(key))): mask_langfuse_value(item) - for key, item in value.items() + for key, item in mapping.items() } return value diff --git a/daemon/src/daemon/config/loader.py b/daemon/src/daemon/config/loader.py index 95f40ebc..1ae62215 100644 --- a/daemon/src/daemon/config/loader.py +++ b/daemon/src/daemon/config/loader.py @@ -2,6 +2,7 @@ import logging from pathlib import Path +from typing import cast import yaml from pydantic import BaseModel @@ -68,7 +69,7 @@ def parse_persona_document(text: str) -> PersonaConfigDocument: _, raw_frontmatter, body = text.split("---\n", 2) except ValueError as exc: raise ValueError("persona.md frontmatter is malformed") from exc - frontmatter = yaml.safe_load(raw_frontmatter) or {} + frontmatter = cast("dict[str, object]", yaml.safe_load(raw_frontmatter) or {}) parsed_frontmatter = PersonaFrontmatter.model_validate(frontmatter) return PersonaConfigDocument.model_validate({ "name": parsed_frontmatter.guardian_name, @@ -85,7 +86,7 @@ def _load_yaml_document[YamlDocument: BaseModel]( path: Path, model_cls: type[YamlDocument], ) -> YamlDocument: - payload = yaml.safe_load(path.read_text(encoding="utf-8")) or {} + payload = cast("dict[str, object]", yaml.safe_load(path.read_text(encoding="utf-8")) or {}) return model_cls.model_validate(payload) diff --git a/daemon/src/daemon/contracts/adapters.py b/daemon/src/daemon/contracts/adapters.py index 291de2f7..063d89d9 100644 --- a/daemon/src/daemon/contracts/adapters.py +++ b/daemon/src/daemon/contracts/adapters.py @@ -46,7 +46,9 @@ class DialogueResponse(ContractModel): text: str model: str finish_reason: str - tool_calls: list[DialogueToolCall] = Field(default_factory=list) + tool_calls: list[DialogueToolCall] = Field( + default_factory=lambda: cast("list[DialogueToolCall]", []) + ) def _freeze_json_value(value: JSONValue) -> object: @@ -59,9 +61,11 @@ def _freeze_json_value(value: JSONValue) -> object: def _thaw_json_value(value: object) -> JSONValue: if isinstance(value, Mapping): - return {key: _thaw_json_value(item) for key, item in value.items()} + mapping = cast("Mapping[str, object]", value) + return {key: _thaw_json_value(item) for key, item in mapping.items()} if isinstance(value, tuple): - return [_thaw_json_value(item) for item in value] + items = cast("tuple[object, ...]", value) + return [_thaw_json_value(item) for item in items] if value is None or isinstance(value, str | int | float | bool): return cast("JSONValue", value) msg = f"Unsupported JSON-like value: {type(value)!r}" diff --git a/daemon/src/daemon/contracts/archive.py b/daemon/src/daemon/contracts/archive.py index 96a21514..fc2808a5 100644 --- a/daemon/src/daemon/contracts/archive.py +++ b/daemon/src/daemon/contracts/archive.py @@ -2,7 +2,7 @@ from dataclasses import dataclass, field from datetime import date, datetime -from typing import Protocol +from typing import Protocol, cast from uuid import UUID @@ -25,7 +25,7 @@ class DailyReportRecord: place_id: str summary_text: str session_refs: tuple[dict[str, object], ...] = () - fact_stats: dict[str, object] = field(default_factory=dict) + fact_stats: dict[str, object] = field(default_factory=lambda: cast("dict[str, object]", {})) class ArchiveRepositoryLike(Protocol): diff --git a/daemon/src/daemon/contracts/audio.py b/daemon/src/daemon/contracts/audio.py index 23975857..9741a7ee 100644 --- a/daemon/src/daemon/contracts/audio.py +++ b/daemon/src/daemon/contracts/audio.py @@ -2,7 +2,7 @@ from datetime import datetime from enum import StrEnum -from typing import Literal +from typing import Literal, cast from pydantic import Field, field_validator @@ -26,8 +26,10 @@ class ActivationEvent(ContractModel): def _coerce_source_signals(cls, value: object) -> object: if not isinstance(value, list): return value + items = cast("list[object]", value) return [ - item if isinstance(item, ActivationSignal) else ActivationSignal(item) for item in value + item if isinstance(item, ActivationSignal) else ActivationSignal(str(item)) + for item in items ] @@ -47,7 +49,9 @@ class AudioCapability(StrEnum): class AudioDeviceInfo(ContractModel): device_id: str device_type: str - capabilities: list[AudioCapability] = Field(default_factory=list) + capabilities: list[AudioCapability] = Field( + default_factory=lambda: cast("list[AudioCapability]", []) + ) sample_rate: int = Field(gt=0) channels: int = Field(gt=0) @@ -56,14 +60,18 @@ class AudioDeviceInfo(ContractModel): def _coerce_capabilities(cls, value: object) -> object: if not isinstance(value, list): return value + items = cast("list[object]", value) return [ - item if isinstance(item, AudioCapability) else AudioCapability(item) for item in value + item if isinstance(item, AudioCapability) else AudioCapability(str(item)) + for item in items ] class AudioStatusResponse(ContractModel): device: Literal["single_mic", "mic_array"] - capabilities: list[AudioCapability] = Field(default_factory=list) + capabilities: list[AudioCapability] = Field( + default_factory=lambda: cast("list[AudioCapability]", []) + ) sample_rate: int = Field(gt=0) channels: int = Field(gt=0) doa_available: bool @@ -73,8 +81,10 @@ class AudioStatusResponse(ContractModel): def _coerce_status_capabilities(cls, value: object) -> object: if not isinstance(value, list): return value + items = cast("list[object]", value) return [ - item if isinstance(item, AudioCapability) else AudioCapability(item) for item in value + item if isinstance(item, AudioCapability) else AudioCapability(str(item)) + for item in items ] diff --git a/daemon/src/daemon/contracts/config_docs.py b/daemon/src/daemon/contracts/config_docs.py index 1f61bbe3..fe800401 100644 --- a/daemon/src/daemon/contracts/config_docs.py +++ b/daemon/src/daemon/contracts/config_docs.py @@ -2,6 +2,7 @@ from datetime import datetime from enum import StrEnum +from typing import cast from pydantic import Field @@ -89,7 +90,9 @@ class ConfigDocumentWriteRequest[TDocument](ContractModel): class ConfigDocumentWriteResponse(ContractModel): status: ConfigWriteStatus = Field(strict=False) - validation_errors: list[ValidationErrorItem] = Field(default_factory=list) + validation_errors: list[ValidationErrorItem] = Field( + default_factory=lambda: cast("list[ValidationErrorItem]", []) + ) audit_id: str diff --git a/daemon/src/daemon/contracts/memory.py b/daemon/src/daemon/contracts/memory.py index 034c4769..ef56a4f7 100644 --- a/daemon/src/daemon/contracts/memory.py +++ b/daemon/src/daemon/contracts/memory.py @@ -34,7 +34,7 @@ class MemoryMatch(ContractModel): class MemoryResult(ContractModel): - matches: list[MemoryMatch] = Field(default_factory=list) + matches: list[MemoryMatch] = Field(default_factory=list[MemoryMatch]) class ReviewDecision(StrEnum): diff --git a/daemon/src/daemon/core/logging.py b/daemon/src/daemon/core/logging.py index 0efc2fc6..030e7ff2 100644 --- a/daemon/src/daemon/core/logging.py +++ b/daemon/src/daemon/core/logging.py @@ -3,6 +3,7 @@ import re from collections import deque from datetime import UTC, datetime +from typing import cast import structlog @@ -12,9 +13,11 @@ def _sanitize(value: object) -> object: if isinstance(value, dict): - return {key: _sanitize(item) for key, item in value.items()} + mapping = cast("dict[str, object]", value) + return {key: _sanitize(item) for key, item in mapping.items()} if isinstance(value, list): - return [_sanitize(item) for item in value] + items = cast("list[object]", value) + return [_sanitize(item) for item in items] if isinstance(value, str): redacted = PHONE_RE.sub("", value) return JAPANESE_NAME_RE.sub("", redacted) diff --git a/daemon/src/daemon/harness/tool_loop.py b/daemon/src/daemon/harness/tool_loop.py index 71620a4b..8c9de73e 100644 --- a/daemon/src/daemon/harness/tool_loop.py +++ b/daemon/src/daemon/harness/tool_loop.py @@ -970,14 +970,13 @@ def _truncate_json_value(value: JSONValue, *, max_bytes: int) -> JSONValue: if isinstance(normalized, str): return normalized[: max(1, max_bytes // 4)] if isinstance(normalized, list): - items: list[JSONValue] = [cast("JSONValue", item) for item in normalized] + items = cast("list[JSONValue]", normalized) while items and len(json.dumps(items, ensure_ascii=False).encode()) > max_bytes: items.pop() return cast("JSONValue", items) if isinstance(normalized, dict): - trimmed: dict[str, JSONValue] = { - str(key): cast("JSONValue", nested_value) for key, nested_value in normalized.items() - } + mapping = cast("dict[str, JSONValue]", normalized) + trimmed = dict(mapping) while trimmed and len(json.dumps(trimmed, ensure_ascii=False).encode()) > max_bytes: last_key = next(reversed(trimmed.keys())) trimmed.pop(last_key) @@ -987,19 +986,22 @@ def _truncate_json_value(value: JSONValue, *, max_bytes: int) -> JSONValue: def _normalize_json_value(value: object) -> object: if isinstance(value, dict): + mapping = cast("dict[object, object]", value) return { str(key): cast("JSONValue", _normalize_json_value(nested_value)) - for key, nested_value in list(value.items())[:10] + for key, nested_value in list(mapping.items())[:10] } if isinstance(value, list): - return [_normalize_json_value(item) for item in value[:20]] + items = cast("list[object]", value) + return [_normalize_json_value(item) for item in items[:20]] return value def _coerce_json_dict(value: object) -> dict[str, JSONValue]: if not isinstance(value, dict): return {} + mapping = cast("dict[object, object]", value) return { str(key): cast("JSONValue", _normalize_json_value(nested_value)) - for key, nested_value in value.items() + for key, nested_value in mapping.items() } diff --git a/daemon/src/daemon/harness/tool_registry.py b/daemon/src/daemon/harness/tool_registry.py index 19024de5..580bd9fc 100644 --- a/daemon/src/daemon/harness/tool_registry.py +++ b/daemon/src/daemon/harness/tool_registry.py @@ -3,7 +3,7 @@ import re from dataclasses import dataclass, field from enum import StrEnum -from typing import TYPE_CHECKING, Protocol +from typing import TYPE_CHECKING, Protocol, cast import jsonschema from pydantic import BaseModel, ValidationError @@ -13,6 +13,8 @@ if TYPE_CHECKING: from collections.abc import Mapping + from daemon.contracts.memory import JSONValue + MAX_STRING_LENGTH = 512 _PATH_FIELD_PATTERN = re.compile(r"(?:^|_)(?:paths?|files?|dirs?|directories)(?:$|_)") @@ -53,7 +55,9 @@ class RegisteredTool: arguments_model: type[BaseModel] executor: ToolExecutor pre_execution_phrase_id: str | None = None - path_allowlist: dict[str, tuple[str, ...]] = field(default_factory=dict) + path_allowlist: dict[str, tuple[str, ...]] = field( + default_factory=lambda: cast("dict[str, tuple[str, ...]]", {}) + ) def definition(self) -> ToolDefinition: return ToolDefinition( @@ -100,7 +104,8 @@ async def dispatch(self, name: str, arguments: Mapping[str, object]) -> object: if not isinstance(normalized, dict): msg = "tool arguments must normalize to an object" raise ToolArgumentValidationError(msg) - return _coerce_json_value(await tool.executor(normalized)) + normalized_arguments = cast("dict[str, object]", normalized) + return _coerce_json_value(await tool.executor(normalized_arguments)) def _sanitize_object( @@ -132,22 +137,24 @@ def _sanitize_value( ) return sanitized if isinstance(value, list): + items = cast("list[object]", value) return [ _sanitize_value( item, field_name=field_name, path_allowlist=path_allowlist, ) - for item in value + for item in items ] if isinstance(value, dict): + mapping = cast("dict[str, object]", value) return { nested_key: _sanitize_value( nested_value, field_name=nested_key, path_allowlist=path_allowlist, ) - for nested_key, nested_value in value.items() + for nested_key, nested_value in mapping.items() } return value @@ -181,11 +188,13 @@ def _is_path_under_base(path_value: str, base_prefix: str) -> bool: return normalized_path == normalized_base or normalized_path.startswith(f"{normalized_base}/") -def _coerce_json_value(value: object) -> object: +def _coerce_json_value(value: object) -> JSONValue: if value is None or isinstance(value, str | int | float | bool): return value if isinstance(value, list): - return [_coerce_json_value(item) for item in value] + items = cast("list[object]", value) + return [_coerce_json_value(item) for item in items] if isinstance(value, dict): - return {str(key): _coerce_json_value(nested_value) for key, nested_value in value.items()} + mapping = cast("dict[object, object]", value) + return {str(key): _coerce_json_value(nested_value) for key, nested_value in mapping.items()} return str(value) diff --git a/daemon/src/daemon/memory/mem0_bridge/backfill.py b/daemon/src/daemon/memory/mem0_bridge/backfill.py index f38f6da2..3426eb6a 100644 --- a/daemon/src/daemon/memory/mem0_bridge/backfill.py +++ b/daemon/src/daemon/memory/mem0_bridge/backfill.py @@ -3,7 +3,7 @@ import logging from dataclasses import dataclass from decimal import Decimal -from typing import TYPE_CHECKING, Protocol +from typing import TYPE_CHECKING, Protocol, cast import sqlalchemy as sa @@ -210,8 +210,9 @@ async def _fetch_draft_event_ids( for row in rows: raw = row.get("source_event_ids") if isinstance(raw, list) and raw: - draft_event_map[row["id"]] = raw - all_event_ids.extend(raw) + event_ids = cast("list[object]", raw) + draft_event_map[row["id"]] = event_ids + all_event_ids.extend(event_ids) return draft_event_map, all_event_ids @@ -270,7 +271,7 @@ def _float_or_none(value: object) -> float | None: return float(value) try: return float(str(value)) - except (TypeError, ValueError): + except (TypeError, ValueError) as _exc: return None diff --git a/daemon/src/daemon/memory/mem0_bridge/store.py b/daemon/src/daemon/memory/mem0_bridge/store.py index b43bc113..2df05534 100644 --- a/daemon/src/daemon/memory/mem0_bridge/store.py +++ b/daemon/src/daemon/memory/mem0_bridge/store.py @@ -268,7 +268,8 @@ def _coerce_results(payload: object) -> tuple[dict[str, object], ...]: for item in raw_results: if not isinstance(item, dict): raise Mem0ShapeError.item_not_dict() - normalized.append({str(key): value for key, value in item.items()}) + item_mapping = cast("dict[object, object]", item) + normalized.append({str(key): value for key, value in item_mapping.items()}) return tuple(normalized) diff --git a/daemon/src/daemon/memory/read/search.py b/daemon/src/daemon/memory/read/search.py index b80d7b2b..6735edc8 100644 --- a/daemon/src/daemon/memory/read/search.py +++ b/daemon/src/daemon/memory/read/search.py @@ -781,8 +781,12 @@ def _days_since(now: datetime, then: datetime | None) -> float | None: def _uuid_strings(values: object) -> tuple[str, ...]: - if isinstance(values, list | tuple): - return tuple(str(value) for value in values) + if isinstance(values, list): + list_items = cast("list[object]", values) + return tuple(str(value) for value in list_items) + if isinstance(values, tuple): + tuple_items = cast("tuple[object, ...]", values) + return tuple(str(value) for value in tuple_items) return () diff --git a/daemon/src/daemon/memory/retention/rollup_generator.py b/daemon/src/daemon/memory/retention/rollup_generator.py index 87fea914..5376ea3f 100644 --- a/daemon/src/daemon/memory/retention/rollup_generator.py +++ b/daemon/src/daemon/memory/retention/rollup_generator.py @@ -5,7 +5,7 @@ from contextlib import AbstractAsyncContextManager from dataclasses import dataclass from datetime import UTC, date, datetime, timedelta -from typing import Protocol +from typing import Protocol, cast from uuid import uuid4 import sqlalchemy as sa @@ -205,7 +205,8 @@ def _aggregate_child_rollup_counts(self, rows: list[dict[str, object]]) -> dict[ parsed: object = json.loads(raw_counts) if isinstance(raw_counts, str) else raw_counts if not isinstance(parsed, dict): continue - for key, val in parsed.items(): + parsed_counts = cast("dict[object, object]", parsed) + for key, val in parsed_counts.items(): if isinstance(key, str) and isinstance(val, int): aggregated[key] = aggregated.get(key, 0) + val return aggregated @@ -312,7 +313,7 @@ def _result_to_rows(result: object) -> list[dict[str, object]]: def _parse_json_object(raw: object) -> dict[str, object] | None: parsed: object = json.loads(raw) if isinstance(raw, str) else raw - return parsed if isinstance(parsed, dict) else None + return cast("dict[str, object]", parsed) if isinstance(parsed, dict) else None def _merge_trends(target: dict[str, object], incoming: dict[str, object]) -> None: @@ -328,6 +329,6 @@ def _merge_trends(target: dict[str, object], incoming: dict[str, object]) -> Non if isinstance(value, dict): nested = target.get(key) if not isinstance(nested, dict): - nested = {} + nested = cast("dict[str, object]", {}) target[key] = nested - _merge_trends(nested, value) + _merge_trends(cast("dict[str, object]", nested), cast("dict[str, object]", value)) diff --git a/daemon/src/daemon/memory/review/commands.py b/daemon/src/daemon/memory/review/commands.py index cd77f673..4c5cea65 100644 --- a/daemon/src/daemon/memory/review/commands.py +++ b/daemon/src/daemon/memory/review/commands.py @@ -319,20 +319,22 @@ async def _source_session_id( raw_event_ids = draft_rows[0].get("source_event_ids") if not isinstance(raw_event_ids, list) or not raw_event_ids: return None + event_ids = cast("list[object]", raw_event_ids) session_statement = sa.select( observation_events.c.id.label("event_id"), observation_events.c.payload["session_id"].astext.label("session_id"), ).where( - observation_events.c.id.in_(raw_event_ids), + observation_events.c.id.in_(event_ids), observation_events.c.payload["session_id"].astext.isnot(None), ) session_rows = result_to_rows(await session.execute(session_statement)) event_session_map: dict[object, str] = {} for row in session_rows: + event_id = row.get("event_id") session_id = row.get("session_id") - if isinstance(session_id, str) and session_id.strip(): - event_session_map[row["event_id"]] = session_id - for event_id in raw_event_ids: + if event_id is not None and isinstance(session_id, str) and session_id.strip(): + event_session_map[event_id] = session_id + for event_id in event_ids: session_id = event_session_map.get(event_id) if session_id is not None: return session_id diff --git a/daemon/src/daemon/memory/write/embeddings.py b/daemon/src/daemon/memory/write/embeddings.py index 2e644431..f2746e83 100644 --- a/daemon/src/daemon/memory/write/embeddings.py +++ b/daemon/src/daemon/memory/write/embeddings.py @@ -170,9 +170,11 @@ def _load_sentence_transformer(model_name: str, device: str | None) -> SentenceE def _coerce_float_matrix(encoded: object) -> list[list[float]]: raw_value = encoded.tolist() if isinstance(encoded, SupportsToList) else encoded if isinstance(raw_value, list) and raw_value and isinstance(raw_value[0], list): - return [[float(value) for value in row] for row in raw_value] + rows = cast("list[list[object]]", raw_value) + return [[float(cast("int | float | str", value)) for value in row] for row in rows] if isinstance(raw_value, list): - return [[float(value) for value in raw_value]] + row = cast("list[object]", raw_value) + return [[float(cast("int | float | str", value)) for value in row]] msg = "Embedding model returned an unsupported matrix payload" raise TypeError(msg) diff --git a/daemon/src/daemon/memory/write/extractor.py b/daemon/src/daemon/memory/write/extractor.py index 62099a0a..1baae1cd 100644 --- a/daemon/src/daemon/memory/write/extractor.py +++ b/daemon/src/daemon/memory/write/extractor.py @@ -266,7 +266,8 @@ def _parse_json_array(payload: str) -> list[dict[str, object]]: if not isinstance(parsed, list): msg = "Expected pass-1 extractor JSON array" raise ValueError(msg) - return [cast("dict[str, object]", item) for item in parsed if isinstance(item, dict)] + items = cast("list[object]", parsed) + return [cast("dict[str, object]", item) for item in items if isinstance(item, dict)] def _parse_json_object(payload: str) -> dict[str, object]: diff --git a/daemon/src/daemon/memory/write/tool.py b/daemon/src/daemon/memory/write/tool.py index 022ac82d..463505d0 100644 --- a/daemon/src/daemon/memory/write/tool.py +++ b/daemon/src/daemon/memory/write/tool.py @@ -100,7 +100,8 @@ async def __call__(self, payload: dict[str, object]) -> None: self._default_source_summary_draft_id, ) - normalized = _normalize_transcript([_coerce_message(item) for item in raw_transcript]) + transcript_items = cast("list[object]", raw_transcript) + normalized = _normalize_transcript([_coerce_message(item) for item in transcript_items]) self._spawn( place_id=place_id, session_transcript=normalized, @@ -181,7 +182,8 @@ def _coerce_evidence_event_ids( if value is None: return default if isinstance(value, list): - return tuple(value) + items = cast("list[object]", value) + return tuple(items) return default diff --git a/daemon/src/daemon/pipeline/latency_metrics.py b/daemon/src/daemon/pipeline/latency_metrics.py index e813e6b1..e713bf5d 100644 --- a/daemon/src/daemon/pipeline/latency_metrics.py +++ b/daemon/src/daemon/pipeline/latency_metrics.py @@ -3,7 +3,7 @@ import math import time from dataclasses import dataclass, field -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast if TYPE_CHECKING: from collections.abc import Callable @@ -22,7 +22,7 @@ class LatencyMetrics: response_completed_at: float | None = None tts_finished_at: float | None = None _last_sentence_end_at: float | None = None - _sentence_gaps_ms: list[int] = field(default_factory=list) + _sentence_gaps_ms: list[int] = field(default_factory=lambda: cast("list[int]", [])) def mark_activation(self, *, activation_monotonic: float | None = None) -> None: if self.activation_at is None: diff --git a/daemon/src/daemon/security/websocket_auth.py b/daemon/src/daemon/security/websocket_auth.py index bf2195a4..6b3392a4 100644 --- a/daemon/src/daemon/security/websocket_auth.py +++ b/daemon/src/daemon/security/websocket_auth.py @@ -1,6 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, field +from typing import cast from fastapi import WebSocket, status @@ -14,7 +15,9 @@ @dataclass(slots=True) class ActiveWebSocketRegistry: - _connections: dict[str, list[WebSocket]] = field(default_factory=dict) + _connections: dict[str, list[WebSocket]] = field( + default_factory=lambda: cast("dict[str, list[WebSocket]]", {}) + ) def register(self, token_id: str, websocket: WebSocket) -> None: connections = self._connections.setdefault(token_id, []) diff --git a/daemon/src/daemon/tools/evaluate_detection.py b/daemon/src/daemon/tools/evaluate_detection.py index be3f9c94..141c1af6 100644 --- a/daemon/src/daemon/tools/evaluate_detection.py +++ b/daemon/src/daemon/tools/evaluate_detection.py @@ -6,6 +6,7 @@ import wave from collections.abc import Callable from pathlib import Path +from typing import cast import numpy as np import numpy.typing as npt @@ -35,9 +36,9 @@ class EvaluationResult(ContractModel): # type: ignore[explicit-any] recording_id: str metrics: DetectionMetrics event_count: int - detection_latencies_ms: list[float] = Field(default_factory=list) - false_positive_times_s: list[float] = Field(default_factory=list) - vad_overlap_ratios: list[float] = Field(default_factory=list) + detection_latencies_ms: list[float] = Field(default_factory=lambda: cast("list[float]", [])) + false_positive_times_s: list[float] = Field(default_factory=lambda: cast("list[float]", [])) + vad_overlap_ratios: list[float] = Field(default_factory=lambda: cast("list[float]", [])) class DetectionEventResult(ContractModel): # type: ignore[explicit-any] diff --git a/docs/adr/decision-log.md b/docs/adr/decision-log.md index 39b92ac2..c457cfe1 100644 --- a/docs/adr/decision-log.md +++ b/docs/adr/decision-log.md @@ -127,3 +127,4 @@ individual decision artifact under `docs/adr/decisions/`. - 2026-04-07T23:23:27Z | adr_required=false | Address CodeRabbit review findings on PR #161 | [details](decisions/2026-04-07-address-coderabbit-review-findings-on-pr-161.md) - 2026-04-08T02:19:19Z | adr_required=false | Implement OTel pipeline span instrumentation (PR 3/5 of #95) | [details](decisions/2026-04-08-implement-otel-pipeline-span-instrumentation-pr-3-5-of-95.md) - 2026-04-08T03:08:59Z | adr_required=false | Clear stale abort reason in VoicePipeline CancelledError failure path for PR #163 CodeRabbit finding | [details](decisions/2026-04-08-clear-stale-abort-reason-in-voicepipeline-cancellederror-failure-path-for-pr-163-coderabbit-finding.md) +- 2026-04-08T04:07:57Z | adr_required=false | Promote pyright reportUnknown* warnings to errors and fix 116 latent issues | [details](decisions/2026-04-08-promote-pyright-reportunknown-warnings-to-errors-and-fix-116-latent-issues.md) diff --git a/docs/adr/decisions/2026-04-08-promote-pyright-reportunknown-warnings-to-errors-and-fix-116-latent-issues.md b/docs/adr/decisions/2026-04-08-promote-pyright-reportunknown-warnings-to-errors-and-fix-116-latent-issues.md new file mode 100644 index 00000000..091689fd --- /dev/null +++ b/docs/adr/decisions/2026-04-08-promote-pyright-reportunknown-warnings-to-errors-and-fix-116-latent-issues.md @@ -0,0 +1,8 @@ +# ADR Decision Record + +timestamp: 2026-04-08T04:07:57Z +change: Promote pyright reportUnknown* warnings to errors and fix 116 latent issues +adr_required: false +rationale: Restores the strict mode contract that was silently relaxed in pyrightconfig.json. No new architectural boundary; pure type-annotation cleanup. +files: [] +adr_paths: [] diff --git a/pyrightconfig.json b/pyrightconfig.json index cb3c8b22..e2eeb24a 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -1,10 +1,7 @@ { - "include": ["daemon/src", "tests", "scripts"], - "typeCheckingMode": "strict", - "pythonVersion": "3.14", - "reportMissingTypeStubs": false, - "reportUnusedFunction": false, - "reportUnknownVariableType": "warning", - "reportUnknownArgumentType": "warning" + "include": ["daemon/src", "tests", "scripts"], + "typeCheckingMode": "strict", + "pythonVersion": "3.14", + "reportMissingTypeStubs": false, + "reportUnusedFunction": false, } - diff --git a/tests/memory/mem0_bridge/test_backfill.py b/tests/memory/mem0_bridge/test_backfill.py index 1fd6e24b..4e3432a0 100644 --- a/tests/memory/mem0_bridge/test_backfill.py +++ b/tests/memory/mem0_bridge/test_backfill.py @@ -1,5 +1,6 @@ from __future__ import annotations +import math from contextlib import asynccontextmanager from dataclasses import dataclass, field from decimal import Decimal @@ -377,3 +378,45 @@ def test_float_or_none_with_int() -> None: from daemon.memory.mem0_bridge.backfill import _float_or_none assert _float_or_none(42) == 42.0 + + +def test_float_or_none_with_none() -> None: + from daemon.memory.mem0_bridge.backfill import _float_or_none + + assert _float_or_none(None) is None + + +def test_float_or_none_with_float() -> None: + from daemon.memory.mem0_bridge.backfill import _float_or_none + + assert _float_or_none(1.23) == pytest.approx(1.23) + + +def test_float_or_none_with_decimal() -> None: + from daemon.memory.mem0_bridge.backfill import _float_or_none + + assert _float_or_none(Decimal("1.23")) == pytest.approx(float(Decimal("1.23"))) + + +@pytest.mark.parametrize("special", [float("inf"), float("-inf")]) +def test_float_or_none_with_infinity(special: float) -> None: + from daemon.memory.mem0_bridge.backfill import _float_or_none + + result = _float_or_none(special) + assert result == special + + +def test_float_or_none_with_nan() -> None: + from daemon.memory.mem0_bridge.backfill import _float_or_none + + result = _float_or_none(float("nan")) + assert result is not None + assert math.isnan(result) + + +def test_float_or_none_with_high_precision_decimal() -> None: + from daemon.memory.mem0_bridge.backfill import _float_or_none + + high_precision = Decimal("1.23456789012345678901234567890") + result = _float_or_none(high_precision) + assert result == pytest.approx(float(high_precision))