2
0
Просмотр исходного кода

feat(core): Implement state persistence (Story 1.1.3)

- Add StateMachinePersistence with JSON format
- Add PersistentStateMachine and PersistentTransitionEvent dataclasses
- Atomic write guarantee (temp file + rename)
- Automatic parent directory creation
- Add save_to_file() method to StateMachine
- Add load_from_file() class method to StateMachine
- Add load_and_restore() method to restore into existing instance
- Add exists() and delete() helper methods
- Unit tests covering:
  * Save and load roundtrip
  * JSON format validation
  * Atomic write verification
  * Parent directory creation
  * Error handling (invalid JSON, missing files)
  * Data integrity preservation
  * Edge cases (empty state, large context)

Part of Epic 1.1: State Machine (Phase 1a)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
d8dfun 2 дней назад
Родитель
Сommit
9a97b8d872
4 измененных файлов с 774 добавлено и 2 удалено
  1. 12 1
      src/core/__init__.py
  2. 287 0
      src/core/persistence.py
  3. 77 1
      src/core/state_machine.py
  4. 398 0
      tests/test_core_persistence.py

+ 12 - 1
src/core/__init__.py

@@ -2,7 +2,8 @@
 Core module for pipeline state management.
 
 This module provides the state machine infrastructure for managing
-translation pipeline lifecycle, including states, transitions, and validation.
+translation pipeline lifecycle, including states, transitions, validation,
+and persistence.
 """
 
 from .states import PipelineState
@@ -13,6 +14,12 @@ from .state_machine import (
     InvalidTransitionError,
     TransitionEvent,
 )
+from .persistence import (
+    StateMachinePersistence,
+    StateMachinePersistenceError,
+    PersistentStateMachine,
+    PersistentTransitionEvent,
+)
 
 __all__ = [
     "PipelineState",
@@ -22,4 +29,8 @@ __all__ = [
     "StateMachineError",
     "InvalidTransitionError",
     "TransitionEvent",
+    "StateMachinePersistence",
+    "StateMachinePersistenceError",
+    "PersistentStateMachine",
+    "PersistentTransitionEvent",
 ]

+ 287 - 0
src/core/persistence.py

@@ -0,0 +1,287 @@
+"""
+State machine persistence module.
+
+This module provides functionality for saving and loading state machine
+state to/from files using JSON format with atomic write guarantees.
+"""
+
+import json
+from dataclasses import dataclass, field, asdict
+from datetime import datetime
+from pathlib import Path
+from typing import Optional, Dict, Any, List
+
+from .states import PipelineState
+from .state_machine import StateMachine, TransitionEvent
+
+
+# Version of the persistence format
+PERSISTENCE_VERSION = "1.0"
+
+
+@dataclass
+class PersistentTransitionEvent:
+    """
+    Serializable version of TransitionEvent for persistence.
+
+    Attributes:
+        from_state: Source state name
+        to_state: Target state name
+        context: Context data at time of transition
+        timestamp: ISO format timestamp of when transition occurred
+    """
+
+    from_state: str
+    to_state: str
+    context: Dict[str, Any] = field(default_factory=dict)
+    timestamp: str = ""
+
+    def __post_init__(self):
+        """Set default timestamp if not provided."""
+        if not self.timestamp:
+            self.timestamp = datetime.utcnow().isoformat()
+
+
+@dataclass
+class PersistentStateMachine:
+    """
+    Serializable state machine data for persistence.
+
+    Attributes:
+        version: Persistence format version
+        state: Current state name
+        context: Current context data
+        history: List of transition events
+        metadata: Additional metadata (saved_at, etc.)
+    """
+
+    version: str = PERSISTENCE_VERSION
+    state: str = PipelineState.IDLE.value
+    context: Dict[str, Any] = field(default_factory=dict)
+    history: List[PersistentTransitionEvent] = field(default_factory=list)
+    metadata: Dict[str, Any] = field(default_factory=dict)
+
+    def __post_init__(self):
+        """Set default metadata timestamp."""
+        if not self.metadata:
+            self.metadata = {}
+
+    def to_dict(self) -> Dict[str, Any]:
+        """Convert to dictionary for JSON serialization."""
+        return {
+            "version": self.version,
+            "state": self.state,
+            "context": self.context,
+            "history": [
+                {
+                    "from_state": event.from_state,
+                    "to_state": event.to_state,
+                    "context": event.context,
+                    "timestamp": event.timestamp,
+                }
+                for event in self.history
+            ],
+            "metadata": self.metadata,
+        }
+
+    @classmethod
+    def from_dict(cls, data: Dict[str, Any]) -> "PersistentStateMachine":
+        """Create from dictionary deserialized from JSON."""
+        history_data = data.get("history", [])
+        history = [
+            PersistentTransitionEvent(
+                from_state=h["from_state"],
+                to_state=h["to_state"],
+                context=h.get("context", {}),
+                timestamp=h.get("timestamp", ""),
+            )
+            for h in history_data
+        ]
+
+        return cls(
+            version=data.get("version", PERSISTENCE_VERSION),
+            state=data.get("state", PipelineState.IDLE.value),
+            context=data.get("context", {}),
+            history=history,
+            metadata=data.get("metadata", {}),
+        )
+
+
+class StateMachinePersistenceError(Exception):
+    """Base exception for persistence errors."""
+
+    pass
+
+
+class StateMachinePersistence:
+    """
+    Handles saving and loading state machine state to/from files.
+
+    Features:
+    - JSON format for human readability
+    - Atomic writes using temporary file + rename
+    - Automatic parent directory creation
+    - Version tracking for format migration
+
+    Example:
+        >>> sm = StateMachine()
+        >>> sm.transition_to(PipelineState.FINGERPRINTING)
+        >>> persistence = StateMachinePersistence(Path("/tmp/state.json"))
+        >>> persistence.save(sm)
+        >>> loaded_data = persistence.load()
+    """
+
+    def __init__(self, state_file: Path):
+        """
+        Initialize persistence handler.
+
+        Args:
+            state_file: Path to the state file (will be created if doesn't exist)
+        """
+        self.state_file = Path(state_file)
+
+    def save(self, state_machine: StateMachine) -> None:
+        """
+        Save state machine state to file.
+
+        Uses atomic write: write to temporary file, then rename.
+        This ensures that a crash during write won't corrupt the file.
+
+        Args:
+            state_machine: The StateMachine instance to save
+
+        Raises:
+            StateMachinePersistenceError: If save fails
+        """
+        try:
+            # Create persistent data structure
+            persistent_data = PersistentStateMachine(
+                state=state_machine.state.value,
+                context=state_machine.context,
+                history=[
+                    PersistentTransitionEvent(
+                        from_state=event.from_state.value,
+                        to_state=event.to_state.value,
+                        context=event.context,
+                    )
+                    for event in state_machine.history
+                ],
+                metadata={
+                    "saved_at": datetime.utcnow().isoformat(),
+                    "transition_count": len(state_machine.history),
+                },
+            )
+
+            # Ensure parent directory exists
+            self.state_file.parent.mkdir(parents=True, exist_ok=True)
+
+            # Atomic write: write to temp file first
+            temp_file = self.state_file.with_suffix(self.state_file.suffix + ".tmp")
+
+            with open(temp_file, "w", encoding="utf-8") as f:
+                json.dump(persistent_data.to_dict(), f, indent=2, ensure_ascii=False)
+
+            # Atomic rename (overwrites existing if present)
+            temp_file.replace(self.state_file)
+
+        except Exception as e:
+            raise StateMachinePersistenceError(f"Failed to save state: {e}") from e
+
+    def load(self) -> Optional[PersistentStateMachine]:
+        """
+        Load state machine data from file.
+
+        Args:
+            None
+
+        Returns:
+            PersistentStateMachine if file exists and is valid
+            None if file doesn't exist
+
+        Raises:
+            StateMachinePersistenceError: If file exists but is invalid
+        """
+        if not self.state_file.exists():
+            return None
+
+        try:
+            with open(self.state_file, "r", encoding="utf-8") as f:
+                data = json.load(f)
+
+            return PersistentStateMachine.from_dict(data)
+
+        except json.JSONDecodeError as e:
+            raise StateMachinePersistenceError(
+                f"Invalid JSON in state file: {e}"
+            ) from e
+        except Exception as e:
+            raise StateMachinePersistenceError(f"Failed to load state: {e}") from e
+
+    def load_and_restore(self, state_machine: StateMachine) -> bool:
+        """
+        Load state from file and restore it into a StateMachine instance.
+
+        Args:
+            state_machine: The StateMachine instance to restore into
+
+        Returns:
+            True if state was loaded and restored
+            False if state file doesn't exist
+
+        Raises:
+            StateMachinePersistenceError: If file exists but is invalid
+        """
+        data = self.load()
+        if data is None:
+            return False
+
+        try:
+            # Restore state
+            state = PipelineState(data.state)
+            state_machine._state = state
+
+            # Restore context
+            state_machine._context.clear()
+            state_machine._context.update(data.context)
+
+            # Restore history
+            state_machine._history.clear()
+            for event_data in data.history:
+                event = TransitionEvent(
+                    from_state=PipelineState(event_data.from_state),
+                    to_state=PipelineState(event_data.to_state),
+                    context=event_data.context.copy(),
+                )
+                state_machine._history.append(event)
+
+            return True
+
+        except Exception as e:
+            raise StateMachinePersistenceError(
+                f"Failed to restore state machine: {e}"
+            ) from e
+
+    def delete(self) -> bool:
+        """
+        Delete the state file if it exists.
+
+        Args:
+            None
+
+        Returns:
+            True if file was deleted
+            False if file didn't exist
+        """
+        if self.state_file.exists():
+            self.state_file.unlink()
+            return True
+        return False
+
+    def exists(self) -> bool:
+        """
+        Check if state file exists.
+
+        Returns:
+            True if state file exists, False otherwise
+        """
+        return self.state_file.exists()

+ 77 - 1
src/core/state_machine.py

@@ -5,12 +5,16 @@ This module provides a state machine with transition validation,
 callbacks, and context storage for managing translation pipeline state.
 """
 
-from typing import Callable, Dict, List, Any, Optional
+from typing import Callable, Dict, List, Any, Optional, TYPE_CHECKING
 from dataclasses import dataclass, field
+from pathlib import Path
 
 from .states import PipelineState
 from .transitions import is_transition_allowed
 
+if TYPE_CHECKING:
+    pass
+
 
 @dataclass
 class TransitionEvent:
@@ -297,3 +301,75 @@ class StateMachine:
         """Get list of allowed transitions from current state."""
         from .transitions import get_allowed_transitions
         return list(get_allowed_transitions(self._state))
+
+    def save_to_file(self, path: Path) -> None:
+        """
+        Save state machine state to a file.
+
+        Args:
+            path: Path to save the state file
+
+        Raises:
+            StateMachinePersistenceError: If save fails
+
+        Example:
+            >>> sm = StateMachine()
+            >>> sm.transition_to(PipelineState.FINGERPRINTING)
+            >>> sm.save_to_file(Path("/tmp/state.json"))
+        """
+        from .persistence import StateMachinePersistence
+        persistence = StateMachinePersistence(path)
+        persistence.save(self)
+
+    @classmethod
+    def load_from_file(cls, path: Path) -> Optional["StateMachine"]:
+        """
+        Load state machine state from a file.
+
+        Args:
+            path: Path to the state file
+
+        Returns:
+            StateMachine instance with restored state if file exists
+            None if file doesn't exist
+
+        Raises:
+            StateMachinePersistenceError: If file exists but is invalid
+
+        Example:
+            >>> sm = StateMachine.load_from_file(Path("/tmp/state.json"))
+            >>> if sm:
+            ...     print(f"Restored to state: {sm.state}")
+        """
+        from .persistence import StateMachinePersistence
+        persistence = StateMachinePersistence(path)
+
+        data = persistence.load()
+        if data is None:
+            return None
+
+        # Create new instance
+        sm = cls()
+
+        # Restore state
+        try:
+            sm._state = PipelineState(data.state)
+            sm._context.clear()
+            sm._context.update(data.context)
+            sm._history.clear()
+
+            for event_data in data.history:
+                event = TransitionEvent(
+                    from_state=PipelineState(event_data.from_state),
+                    to_state=PipelineState(event_data.to_state),
+                    context=event_data.context.copy(),
+                )
+                sm._history.append(event)
+
+            return sm
+
+        except Exception as e:
+            from .persistence import StateMachinePersistenceError
+            raise StateMachinePersistenceError(
+                f"Failed to restore StateMachine: {e}"
+            ) from e

+ 398 - 0
tests/test_core_persistence.py

@@ -0,0 +1,398 @@
+"""
+Unit tests for state machine persistence.
+
+Tests cover saving, loading, atomic writes, and error handling.
+"""
+
+import json
+import tempfile
+from pathlib import Path
+
+import pytest
+
+from src.core.states import PipelineState
+from src.core.state_machine import StateMachine, TransitionEvent
+from src.core.persistence import (
+    StateMachinePersistence,
+    StateMachinePersistenceError,
+    PersistentStateMachine,
+    PersistentTransitionEvent,
+)
+
+
+class TestPersistentTransitionEvent:
+    """Test PersistentTransitionEvent dataclass."""
+
+    def test_creation(self):
+        """Test creating a PersistentTransitionEvent."""
+        event = PersistentTransitionEvent(
+            from_state="idle",
+            to_state="fingerprinting",
+            context={"file": "test.txt"},
+        )
+        assert event.from_state == "idle"
+        assert event.to_state == "fingerprinting"
+        assert event.context == {"file": "test.txt"}
+
+    def test_default_timestamp(self):
+        """Test that timestamp is set automatically."""
+        event = PersistentTransitionEvent(
+            from_state="idle",
+            to_state="fingerprinting",
+        )
+        assert event.timestamp != ""
+        assert len(event.timestamp) > 0
+
+
+class TestPersistentStateMachine:
+    """Test PersistentStateMachine dataclass."""
+
+    def test_default_values(self):
+        """Test default values."""
+        psm = PersistentStateMachine()
+        assert psm.state == "idle"
+        assert psm.context == {}
+        assert psm.history == []
+        assert psm.metadata == {}
+
+    def test_to_dict(self):
+        """Test converting to dictionary."""
+        psm = PersistentStateMachine(
+            state="translating",
+            context={"progress": 50},
+            history=[
+                PersistentTransitionEvent(
+                    from_state="idle",
+                    to_state="translating",
+                )
+            ],
+        )
+
+        data = psm.to_dict()
+        assert data["state"] == "translating"
+        assert data["context"]["progress"] == 50
+        assert len(data["history"]) == 1
+
+    def test_from_dict(self):
+        """Test creating from dictionary."""
+        data = {
+            "version": "1.0",
+            "state": "translating",
+            "context": {"progress": 50},
+            "history": [
+                {
+                    "from_state": "idle",
+                    "to_state": "translating",
+                    "context": {},
+                    "timestamp": "2026-03-15T10:00:00",
+                }
+            ],
+            "metadata": {},
+        }
+
+        psm = PersistentStateMachine.from_dict(data)
+        assert psm.state == "translating"
+        assert psm.context["progress"] == 50
+        assert len(psm.history) == 1
+
+
+class TestStateMachinePersistence:
+    """Test StateMachinePersistence class."""
+
+    def test_init_with_path(self):
+        """Test initialization with path."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+            persistence = StateMachinePersistence(path)
+            assert persistence.state_file == path
+
+    def test_save_and_load(self):
+        """Test saving and loading state machine."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            # Create and configure state machine
+            sm = StateMachine()
+            sm.transition_to(PipelineState.FINGERPRINTING, file="novel.txt")
+            sm.transition_to(PipelineState.CLEANING, mode="deep")
+
+            # Save
+            persistence = StateMachinePersistence(path)
+            persistence.save(sm)
+
+            # Load
+            loaded_data = persistence.load()
+            assert loaded_data is not None
+            assert loaded_data.state == "cleaning"
+            assert loaded_data.context["file"] == "novel.txt"
+            assert loaded_data.context["mode"] == "deep"
+            assert len(loaded_data.history) == 2
+
+    def test_save_creates_parent_directories(self):
+        """Test that save creates parent directories."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            nested_path = Path(tmpdir) / "nested" / "dir" / "state.json"
+
+            sm = StateMachine()
+            persistence = StateMachinePersistence(nested_path)
+            persistence.save(sm)
+
+            assert nested_path.exists()
+
+    def test_save_creates_valid_json(self):
+        """Test that save creates valid JSON file."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            sm.transition_to(PipelineState.TRANSLATING, progress=75)
+
+            persistence = StateMachinePersistence(path)
+            persistence.save(sm)
+
+            # Read and verify JSON
+            with open(path, "r") as f:
+                data = json.load(f)
+
+            assert data["state"] == "translating"
+            assert data["context"]["progress"] == 75
+
+    def test_atomic_write(self):
+        """Test that save uses atomic write (temp file + rename)."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            persistence = StateMachinePersistence(path)
+            persistence.save(sm)
+
+            # Temp file should not exist after successful save
+            temp_file = path.with_suffix(path.suffix + ".tmp")
+            assert not temp_file.exists()
+
+            # Final file should exist
+            assert path.exists()
+
+    def test_load_nonexistent_file(self):
+        """Test loading a non-existent file returns None."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "nonexistent.json"
+            persistence = StateMachinePersistence(path)
+            result = persistence.load()
+            assert result is None
+
+    def test_load_invalid_json(self):
+        """Test loading invalid JSON raises error."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "invalid.json"
+
+            # Write invalid JSON
+            with open(path, "w") as f:
+                f.write("{ invalid json }")
+
+            persistence = StateMachinePersistence(path)
+            with pytest.raises(StateMachinePersistenceError):
+                persistence.load()
+
+    def test_load_and_restore(self):
+        """Test load_and_restore method."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            # Save original state
+            sm1 = StateMachine()
+            sm1.transition_to(PipelineState.TRANSLATING, progress=50)
+
+            persistence = StateMachinePersistence(path)
+            persistence.save(sm1)
+
+            # Restore into new state machine
+            sm2 = StateMachine()
+            result = persistence.load_and_restore(sm2)
+
+            assert result is True
+            assert sm2.state == PipelineState.TRANSLATING
+            assert sm2.get_context_value("progress") == 50
+            assert len(sm2.history) == 1
+
+    def test_load_and_restore_nonexistent(self):
+        """Test load_and_restore with non-existent file."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "nonexistent.json"
+
+            sm = StateMachine()
+            persistence = StateMachinePersistence(path)
+            result = persistence.load_and_restore(sm)
+
+            assert result is False
+            assert sm.state == PipelineState.IDLE
+
+    def test_exists(self):
+        """Test exists method."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+            persistence = StateMachinePersistence(path)
+
+            assert persistence.exists() is False
+
+            sm = StateMachine()
+            persistence.save(sm)
+
+            assert persistence.exists() is True
+
+    def test_delete(self):
+        """Test delete method."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            persistence = StateMachinePersistence(path)
+            persistence.save(sm)
+
+            assert path.exists()
+            assert persistence.delete() is True
+            assert not path.exists()
+
+    def test_delete_nonexistent(self):
+        """Test deleting non-existent file."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "nonexistent.json"
+            persistence = StateMachinePersistence(path)
+
+            result = persistence.delete()
+            assert result is False
+
+
+class TestStateMachinePersistenceMethods:
+    """Test StateMachine save_to_file and load_from_file methods."""
+
+    def test_save_to_file(self):
+        """Test StateMachine.save_to_file method."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            sm.transition_to(PipelineState.UPLOADING, target="web")
+            sm.save_to_file(path)
+
+            # Verify file was created
+            assert path.exists()
+
+            # Verify content
+            with open(path, "r") as f:
+                data = json.load(f)
+
+            assert data["state"] == "uploading"
+            assert data["context"]["target"] == "web"
+
+    def test_load_from_file(self):
+        """Test StateMachine.load_from_file class method."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            # Save original
+            sm1 = StateMachine()
+            sm1.transition_to(PipelineState.COMPLETED, output="/path/to/file.txt")
+            sm1.save_to_file(path)
+
+            # Load
+            sm2 = StateMachine.load_from_file(path)
+
+            assert sm2 is not None
+            assert sm2.state == PipelineState.COMPLETED
+            assert sm2.get_context_value("output") == "/path/to/file.txt"
+
+    def test_load_from_file_nonexistent(self):
+        """Test load_from_file with non-existent file."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "nonexistent.json"
+            sm = StateMachine.load_from_file(path)
+            assert sm is None
+
+    def test_roundtrip_preserves_all_data(self):
+        """Test that save/load roundtrip preserves all data."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            # Create state machine with various states
+            sm1 = StateMachine()
+            sm1.transition_to(PipelineState.FINGERPRINTING, file="novel.txt")
+            sm1.transition_to(PipelineState.CLEANING)
+            sm1.transition_to(PipelineState.TERM_EXTRACTION, terms=5)
+            sm1.transition_to(PipelineState.TRANSLATING, progress=25, chapter=1)
+            sm1.transition_to(PipelineState.TRANSLATING, progress=50, chapter=2)
+
+            # Save and load
+            sm1.save_to_file(path)
+            sm2 = StateMachine.load_from_file(path)
+
+            # Verify all data
+            assert sm2.state == sm1.state
+            assert sm2.context == sm1.context
+            assert len(sm2.history) == len(sm1.history)
+
+            for i, (h1, h2) in enumerate(zip(sm1.history, sm2.history)):
+                assert h1.from_state == h2.from_state
+                assert h1.to_state == h2.to_state
+                assert h1.context == h2.context
+
+
+class TestPersistenceEdgeCases:
+    """Test edge cases and error conditions."""
+
+    def test_save_empty_state_machine(self):
+        """Test saving an empty (initial) state machine."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            sm.save_to_file(path)
+
+            sm2 = StateMachine.load_from_file(path)
+            assert sm2 is not None
+            assert sm2.state == PipelineState.IDLE
+            assert sm2.context == {}
+            assert sm2.history == []
+
+    def test_save_with_large_context(self):
+        """Test saving with large context data."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            large_context = {f"key_{i}": f"value_{i}" * 100 for i in range(50)}
+            sm.transition_to(PipelineState.TRANSLATING, **large_context)
+            sm.save_to_file(path)
+
+            sm2 = StateMachine.load_from_file(path)
+            assert sm2 is not None
+            assert len(sm2.context) == 50
+
+    def test_version_field_preserved(self):
+        """Test that version field is saved and can be checked."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            sm.save_to_file(path)
+
+            with open(path, "r") as f:
+                data = json.load(f)
+
+            assert "version" in data
+            assert data["version"] == "1.0"
+
+    def test_metadata_includes_saved_at(self):
+        """Test that metadata includes timestamp."""
+        with tempfile.TemporaryDirectory() as tmpdir:
+            path = Path(tmpdir) / "state.json"
+
+            sm = StateMachine()
+            sm.save_to_file(path)
+
+            persistence = StateMachinePersistence(path)
+            data = persistence.load()
+
+            assert "metadata" in data.to_dict()
+            assert "saved_at" in data.metadata
+            assert data.metadata["saved_at"] != ""