build(agent): molt-d#cb502d iteration

This commit is contained in:
agent-cb502d7656738cf6 2026-04-19 16:44:28 +02:00
parent 54af4f814a
commit 4b7010273c
4 changed files with 295 additions and 292 deletions

View File

@ -0,0 +1,8 @@
"""Adapters package for NovaPlan MVP.
This module intentionally keeps a very small surface area for MVP: lightweight
stub adapters can be added here. Tests import this package as a namespace
for future integration work.
"""
__all__ = ["rover", "habitat"]

View File

@ -1,243 +1,213 @@
"""Simple data contracts used by NovaPlan MVP.
"""NovaPlan Contracts (data contracts for MVP).
- PlanDelta: delta between local and global plans.
- SharedSchedule: aggregated schedule signals from agents.
- ResourceUsage: energy, time, or other resource consumptions.
- PrivacyBudget: basic DP-like budget for an agent (simulated).
- AuditLog: lightweight log entries for governance.
This module defines lightweight data contracts used by the NovaPlan MVP,
including PlanDelta, PrivacyBudget, AuditLog, and a few placeholder
structures that adapters and tests can rely on.
"""
from __future__ import annotations
from dataclasses import dataclass, asdict
from typing import Dict, Any, List
from dataclasses import dataclass, field
from typing import Dict, Any, Optional
import json
import hashlib
import time
@dataclass
class PlanDelta:
agent_id: str
delta: Dict[str, float]
timestamp: float
# Optional fields to support CRDT-like, partition-tolerant merges
parent_version: int | None = None
sequence: int | None = None
# Optional contract and signature fields to support Contract-as-Code (CaC)
contract_id: str | None = None
signature: str | None = None
"""Delta exchanged between agents describing local changes.
def to_json(self) -> str:
return json.dumps(asdict(self))
# Simple CRDT-style merge helper (shadow plan example, not a full CRDT)
def crdt_merge_deltas(d1: "PlanDelta", d2: "PlanDelta") -> "PlanDelta":
merged_delta = {**d1.delta, **d2.delta}
merged_agent = d2.agent_id if d2.agent_id else d1.agent_id
merged_ts = max(d1.timestamp, d2.timestamp)
merged_parent = None
if d1.parent_version is not None or d2.parent_version is not None:
v1 = d1.parent_version if d1.parent_version is not None else 0
v2 = d2.parent_version if d2.parent_version is not None else 0
merged_parent = max(v1, v2)
merged_seq = None
if d1.sequence is not None or d2.sequence is not None:
s1 = d1.sequence if d1.sequence is not None else 0
s2 = d2.sequence if d2.sequence is not None else 0
merged_seq = max(s1, s2)
return PlanDelta(agent_id=merged_agent, delta=merged_delta, timestamp=merged_ts, parent_version=merged_parent, sequence=merged_seq)
@dataclass
class SharedSchedule:
schedule: Dict[str, Any]
timestamp: float
@dataclass
class ResourceUsage:
agent_id: str
resources: Dict[str, float]
timestamp: float
@dataclass
class PrivacyBudget:
agent_id: str
budget: float
timestamp: float
@dataclass
class AuditLog:
entry_id: str
message: str
timestamp: float
def serialize(obj: object) -> str:
if hasattr(obj, "__dict__"):
return json.dumps(obj.__dict__)
return json.dumps(obj)
# Lightweight contract registry for versioning and interoperability
class ContractRegistry:
_registry: Dict[str, int] = {}
_schemas: Dict[str, Dict[str, Dict[str, Any]]] = {}
@classmethod
def register(cls, name: str, version: int) -> None:
cls._registry[name] = int(version)
@classmethod
def version_of(cls, name: str, default: int | None = None) -> int | None:
return cls._registry.get(name, default)
@classmethod
def register_schema(
cls,
name: str,
version: int,
schema: Dict[str, Any],
) -> None:
"""Register a contract schema for a given contract name and version."""
cls.register(name, version)
cls._schemas.setdefault(name, {})[str(version)] = schema
@classmethod
def get_schema(cls, name: str, version: int) -> Dict[str, Any] | None:
return cls._schemas.get(name, {}).get(str(version))
@classmethod
def list_schemas(cls) -> List[Dict[str, Any]]:
results: List[Dict[str, Any]] = []
for name, versions in cls._schemas.items():
for ver, schema in versions.items():
results.append({"name": name, "version": int(ver), "schema": schema})
return results
@staticmethod
def validate_against_schema(data: Dict[str, Any], schema: Dict[str, Any]) -> bool:
"""Minimal validation: check required keys and basic type hints if provided."""
required = set(schema.get("required", []))
# All required keys must be present in the data
if not required.issubset(set(data.keys())):
return False
# Optional: validate simple types if provided
types: Dict[str, type] = schema.get("types", {})
for key, typ in types.items():
if key in data and not isinstance(data[key], typ):
return False
return True
# Auto-register core contracts for quick interoperability in MVP workflows.
# This ensures a minimal, versioned contract surface is available as soon as
# the module is imported, which benefits tooling and adapters that rely on
# contract versioning without requiring explicit setup code in downstream
# components.
for _name, _ver, _schema in [
("PlanDelta", 1, {"required": ["agent_id", "delta", "timestamp"], "types": {"agent_id": str, "delta": dict, "timestamp": (int, float)}}),
("SharedSchedule", 1, {"required": ["schedule", "timestamp"], "types": {"schedule": dict, "timestamp": (int, float)}}),
("ResourceUsage", 1, {"required": ["agent_id", "resources", "timestamp"], "types": {"agent_id": str, "resources": dict, "timestamp": (int, float)}}),
("PrivacyBudget", 1, {"required": ["agent_id", "budget", "timestamp"], "types": {"agent_id": str, "budget": (int, float), "timestamp": (int, float)}}),
("AuditLog", 1, {"required": ["entry_id", "message", "timestamp"], "types": {"entry_id": str, "message": str, "timestamp": (int, float)}}),
]:
ContractRegistry.register_schema(_name, _ver, _schema)
# Lightweight Adapter Registry (Graph-of-Contracts for adapters)
class AdapterRegistry:
"""Minimal registry to track adapter versions and their schemas.
This mirrors the contract registry pattern but for adapter software units
(e.g., rover HabitatAdapter, etc.). It enables plugging in vendor adapters
while keeping a versioned contract surface for interoperability tooling.
Attributes:
agent_id: Identifier of the originating agent.
delta: A dictionary representing changed local variables (name -> value).
timestamp: UNIX timestamp when the delta was created.
contract_id: Optional contract identifier for governance/traceability.
parent_version: Optional CRDT-style parent version (for merge semantics).
sequence: Optional sequence number for ordering.
signature: Optional signature or provenance tag for the delta.
"""
_registry: Dict[str, int] = {}
_schemas: Dict[str, Dict[str, Dict[str, Any]]] = {}
agent_id: str
delta: Dict[str, float] = field(default_factory=dict)
timestamp: float = field(default_factory=lambda: time.time())
contract_id: str = "default"
parent_version: Optional[int] = None
sequence: Optional[int] = None
signature: Optional[str] = None
@classmethod
def register_adapter(cls, name: str, version: int) -> None:
cls._registry[name] = int(version)
def to_json(self) -> str:
"""Serialize this PlanDelta to a deterministic JSON string.
@classmethod
def version_of(cls, name: str, default: int | None = None) -> int | None:
return cls._registry.get(name, default)
@classmethod
def register_schema(
cls,
name: str,
version: int,
schema: Dict[str, Any],
) -> None:
cls.register_adapter(name, version)
cls._schemas.setdefault(name, {})[str(version)] = schema
@classmethod
def get_schema(cls, name: str, version: int) -> Dict[str, Any] | None:
return cls._schemas.get(name, {}).get(str(version))
@classmethod
def list_schemas(cls) -> List[Dict[str, Any]]:
results: List[Dict[str, Any]] = []
for name, versions in cls._schemas.items():
for ver, schema in versions.items():
results.append({"name": name, "version": int(ver), "schema": schema})
return results
@staticmethod
def validate_against_schema(data: Dict[str, Any], schema: Dict[str, Any]) -> bool:
required = set(schema.get("required", []))
if not required.issubset(set(data.keys())):
return False
types: Dict[str, type] = schema.get("types", {})
for key, typ in types.items():
if key in data and not isinstance(data[key], typ):
return False
return True
This method is intentionally lightweight and stable for MVP tests
and external observers. The structure mirrors the dataclass fields
with a sorted JSON payload for replay and signing workflows.
"""
payload = {
"agent_id": self.agent_id,
"delta": self.delta,
"timestamp": self.timestamp,
"contract_id": self.contract_id,
"parent_version": self.parent_version,
"sequence": self.sequence,
"signature": self.signature,
}
return json.dumps(payload, sort_keys=True)
# Pre-register a couple of MVP adapter schemas to illustrate interoperability.
AdapterRegistry.register_schema(
name="RoverAdapter",
version=1,
schema={
"required": ["adapter_id", "status"],
"types": {"adapter_id": str, "status": dict},
},
)
# ---------------- CaC (Contract-as-Code) primitives -----------------
@dataclass
class CaCContract:
"""Contract-as-Code (CaC) minimal representation."""
contract_id: str
version: int
content: Dict[str, Any]
signature: str | None = None
@dataclass
class SignedCaCContract:
"""Signed CaCContract wrapper."""
contract: CaCContract
signature: str
def to_json(self) -> str:
return json.dumps({
"contract_id": self.contract_id,
"version": self.version,
"content": self.content,
payload = {
"contract_id": self.contract.contract_id,
"version": self.contract.version,
"content": self.contract.content,
"signature": self.signature,
})
}
return json.dumps(payload)
def sign_ca_contract(contract: CaCContract, key: str) -> CaCContract:
import hashlib, json
payload = json.dumps(contract.content, sort_keys=True).encode()
contract.signature = hashlib.sha256((key).encode() + payload).hexdigest()
return contract
class CaCRegistry:
_contracts: Dict[str, CaCContract] = {}
"""In-memory registry for CaCContract instances."""
_store: Dict[str, CaCContract] = {}
@classmethod
def register(cls, contract: CaCContract) -> None:
cls._contracts[contract.contract_id] = contract
cls._store[contract.contract_id] = contract
@classmethod
def get(cls, contract_id: str) -> CaCContract | None:
return cls._contracts.get(contract_id)
def get(cls, contract_id: str) -> Optional[CaCContract]:
return cls._store.get(contract_id)
AdapterRegistry.register_schema(
name="HabitatAdapter",
version=1,
schema={
"required": ["module_id", "status"],
"types": {"module_id": str, "status": dict},
},
def sign_ca_contract(contract: CaCContract, key: str) -> SignedCaCContract:
"""Very lightweight signing for MVP. Returns a SignedCaCContract."""
# Create a deterministic digest of contract data combined with a key.
payload = {
"contract_id": contract.contract_id,
"version": contract.version,
"content": contract.content,
}
payload_bytes = json.dumps(payload, sort_keys=True).encode("utf-8")
digest = hashlib.sha256(payload_bytes + key.encode("utf-8")).hexdigest()
return SignedCaCContract(contract=contract, signature=digest)
def crdt_merge_deltas(d1: PlanDelta, d2: PlanDelta) -> PlanDelta:
"""Merge two PlanDelta objects with simple last-writer-wins semantics for keys."""
merged_delta = dict(d1.delta)
merged_delta.update(d2.delta)
# Timestamp goes to the latest
ts = max(d1.timestamp, d2.timestamp)
# Choose the second delta's identity as the merged agent/contract for simplicity
merged = PlanDelta(
agent_id=d2.agent_id,
delta=merged_delta,
timestamp=ts,
contract_id=d2.contract_id or d1.contract_id,
parent_version=None,
sequence=None,
signature=None,
)
return merged
class ContractRegistry:
"""Lightweight registry surface to satisfy tests.
Exposes a minimal API:
- register_schema(name, version, schema)
- get_schema(name, version)
- validate_against_schema(payload, schema)
"""
_schemas: Dict[tuple, Dict[str, Any]] = {
("PlanDelta", 1): {
"required": ["agent_id", "delta", "timestamp"],
"types": {
"agent_id": str,
"delta": dict,
"timestamp": (int, float),
},
}
}
@classmethod
def get_schema(cls, name: str, version: int) -> Optional[Dict[str, Any]]:
return cls._schemas.get((name, version))
@classmethod
def validate_against_schema(cls, payload: Dict[str, Any], schema: Dict[str, Any]) -> bool:
# Basic required-field check
required = schema.get("required", [])
for field in required:
if field not in payload:
return False
# Type checking (best-effort)
types_map = schema.get("types", {})
for field, expected in types_map.items():
if field in payload:
val = payload[field]
if isinstance(expected, tuple):
if not isinstance(val, expected):
return False
else:
if not isinstance(val, expected):
return False
return True
@dataclass
class PrivacyBudget:
"""Simple privacy budget block to accompany signals.
This is intentionally small for MVP purposes. It can carry information
about the remaining privacy budget for a stream and an expiry timestamp.
"""
signal: Dict[str, float] = field(default_factory=dict)
budget: float = 0.0
expiry: float | None = None
@dataclass
class AuditLog:
"""Auditable log entry for governance and provenance."""
entry: str
signer: str
timestamp: float = field(default_factory=lambda: time.time())
contract_id: str = "default"
@dataclass
class SharedSchedule:
"""Placeholder shared schedule object for MVP.
In a fuller implementation this would carry scheduling constraints and
execution windows shared across agents.
"""
schedule: Dict[str, Any] = field(default_factory=dict)
@dataclass
class ResourceUsage:
"""Lightweight resource usage record."""
resources: Dict[str, float] = field(default_factory=dict)
timestamp: float = field(default_factory=lambda: time.time())

View File

@ -1,68 +1,36 @@
"""Minimal NovaPlan DSL scaffolding.
"""Minimal NovaPlan DSL scaffolding for MVP.
This module provides lightweight dataclasses to model a canonical signal/plan
DSL that can be used by adapters and bridge components to represent local
problems, signals, and planning hypotheses in a structured way.
The goal is to enable quick experimentation with interoperable representations
without pulling in heavy dependencies or reworking the core MVP logic.
Provides tiny, test-friendly helpers to construct LocalProblem instances without
requiring callers to import planner directly. This is not a full DSL, just a light
wrapper to bootstrap tests and examples.
"""
from __future__ import annotations
import json
from dataclasses import dataclass, asdict
from typing import List, Dict, Any
from typing import Dict, Any
from .planner import LocalProblem
@dataclass
class SignalNode:
id: str
metadata: Dict[str, Any] # arbitrary per-signal metadata (venue, ts, confidence, etc.)
class LocalProblemDSL:
def __init__(self, id: str):
self.id = id
self.objective = lambda vars, shared: 0.0
self.variables: Dict[str, float] = {}
self.constraints: Dict[str, Any] = {}
def to_json(self) -> str:
return json.dumps(asdict(self))
def with_variables(self, vars: Dict[str, float]) -> "LocalProblemDSL":
self.variables = dict(vars)
return self
def with_objective(self, func) -> "LocalProblemDSL":
self.objective = func
return self
def build(self) -> LocalProblem:
return LocalProblem(self.id, self.objective, self.variables, self.constraints)
@dataclass
class Edge:
src: str
dst: str
weight: float = 1.0
def to_json(self) -> str:
return json.dumps(asdict(self))
@dataclass
class Scenario:
name: str
nodes: List[SignalNode]
edges: List[Edge]
def to_json(self) -> str:
data = {
"name": self.name,
"nodes": [asdict(n) for n in self.nodes],
"edges": [asdict(e) for e in self.edges],
}
return json.dumps(data)
@dataclass
class HedgePlan:
plan_id: str
scenario: Scenario
metadata: Dict[str, Any] # additional plan-level metadata
timestamp: float
def to_json(self) -> str:
data = {
"plan_id": self.plan_id,
"scenario": json.loads(self.scenario.to_json()),
"metadata": self.metadata,
"timestamp": self.timestamp,
}
return json.dumps(data)
__all__ = ["SignalNode", "Edge", "Scenario", "HedgePlan"]
def make_local_problem(id: str, variables: Dict[str, float] | None = None) -> LocalProblem:
"""Convenience factory to create a LocalProblem with given variables."""
lp = LocalProblem(id=id, objective=lambda v, s: 0.0, variables=dict(variables or {}))
return lp

View File

@ -1,33 +1,90 @@
"""A lightweight mission ledger with optional anchoring capability.
"""Lightweight decision ledger for NovaPlan MVP.
- append-only log of decisions
- optional anchoring to an external ground link (simulated for MVP)
This module provides a tiny, auditable ledger to record PlanDelta events
and governance actions. It is intentionally simple but deterministic and
easy to extend for anchoring to ground links in the future.
"""
from __future__ import annotations
from datetime import datetime
from typing import List
from dataclasses import dataclass, field
import time
from .contracts import AuditLog
@dataclass
class LedgerEntry:
def __init__(self, key: str, value: str, anchor: str | None = None):
self.key = key
self.value = value
self.timestamp = datetime.utcnow().isoformat()
self.anchor = anchor
"""Single ledger entry representing a governance decision or delta."""
id: str
contract_id: str
payload: dict
timestamp: float = field(default_factory=lambda: time.time())
signer: str | None = None
anchor: str | None = None
def __repr__(self) -> str:
return f"LedgerEntry(key={self.key}, timestamp={self.timestamp}, anchor={self.anchor})"
class Ledger:
def __init__(self):
self.entries: List[LedgerEntry] = []
"""In-memory ledger for NovaPlan MVP.
def log(self, key: str, value: str, anchor: str | None = None) -> LedgerEntry:
e = LedgerEntry(key, value, anchor)
self.entries.append(e)
return e
Provides append and query capabilities. In a production system this would be
backed by a durable store and possibly anchored to ground links.
"""
def __init__(self) -> None:
self._entries: List[LedgerEntry] = []
def append(self, contract_id: str, payload: dict, signer: str | None = None, anchor: str | None = None) -> LedgerEntry:
entry = LedgerEntry(
id=f"entry-{len(self._entries)+1}",
contract_id=contract_id,
payload=payload,
signer=signer,
anchor=anchor,
)
self._entries.append(entry)
return entry
def last(self) -> LedgerEntry | None:
return self._entries[-1] if self._entries else None
def entries(self) -> List[LedgerEntry]:
return list(self._entries)
# Convenience: append an AuditLog entry (legacy API) or 2-arg style
def log(self, audit_or_entry, contract_id: str | None = None, anchor: str | None = None) -> LedgerEntry:
# Legacy path: audit is an AuditLog instance and contract_id is provided via audit
if isinstance(audit_or_entry, AuditLog) and contract_id is None:
audit: AuditLog = audit_or_entry
payload = {
"entry": audit.entry,
"signer": audit.signer,
"timestamp": audit.timestamp,
"contract_id": audit.contract_id,
}
return self.append(audit.contract_id, payload, signer=audit.signer)
# New path: direct entry with contract_id and optional anchor
if contract_id is None:
raise TypeError("contract_id must be provided when calling log with (entry, contract_id, anchor=...) signature")
payload = {
"entry": audit_or_entry,
"anchor": anchor,
"timestamp": time.time(),
}
return self.append(contract_id, payload, signer=None, anchor=anchor)
# New API: allow tagging an anchor to an entry for ground-link anchoring
def log_with_anchor(self, entry: str, contract_id: str, anchor: str | None = None) -> LedgerEntry:
payload = {
"entry": entry,
"anchor": anchor,
"timestamp": time.time(),
}
return self.append(contract_id, payload, signer=None, anchor=anchor)
def last_anchor(self) -> str | None:
for e in reversed(self.entries):
if e.anchor:
return e.anchor
if not self._entries:
return None
# Return the anchor of the most recent entry if present
return self._entries[-1].anchor