build(agent): new-agents-3#dd492b iteration
This commit is contained in:
parent
b2dd75c92d
commit
96db8ae157
|
|
@ -5,6 +5,7 @@ This MVP provides core contracts, a registry scaffold, two starter adapters, a M
|
||||||
|
|
||||||
What’s included
|
What’s included
|
||||||
- Core data contracts: Signal, SignalDelta, ProvenanceProof, PrivacyBudget, AuditLog
|
- Core data contracts: Signal, SignalDelta, ProvenanceProof, PrivacyBudget, AuditLog
|
||||||
|
- New primitives: AggregatedSignal and PlanDelta for cross-venue analytics and incremental plan updates
|
||||||
- Graph-of-Contracts registry scaffold
|
- Graph-of-Contracts registry scaffold
|
||||||
- Two starter adapters: ExchangeA and BrokerB
|
- Two starter adapters: ExchangeA and BrokerB
|
||||||
- Merkle provenance module for verifiable signal anchoring
|
- Merkle provenance module for verifiable signal anchoring
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ It is intentionally small but feature-complete for MVP deployment and
|
||||||
testing.
|
testing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .contracts import Signal, SignalDelta, ProvenanceProof, PrivacyBudget, AuditLog
|
from .contracts import Signal, SignalDelta, ProvenanceProof, PrivacyBudget, AuditLog, AggregatedSignal, PlanDelta
|
||||||
from .registry import GraphOfContractsRegistry
|
from .registry import GraphOfContractsRegistry
|
||||||
from .adapters import VenueAdapter, ExchangeVenueAdapterA, BrokerVenueAdapterB
|
from .adapters import VenueAdapter, ExchangeVenueAdapterA, BrokerVenueAdapterB
|
||||||
from .provenance import MerkleProvenance
|
from .provenance import MerkleProvenance
|
||||||
|
|
|
||||||
|
|
@ -60,3 +60,29 @@ class AuditLog:
|
||||||
|
|
||||||
def to_json(self) -> str:
|
def to_json(self) -> str:
|
||||||
return to_json(asdict(self))
|
return to_json(asdict(self))
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AggregatedSignal:
|
||||||
|
"""Cross-venue aggregated signal with privacy budgeting."""
|
||||||
|
venue_set: List[str]
|
||||||
|
feature_vector: Dict[str, float]
|
||||||
|
privacy_budget_used: float
|
||||||
|
nonce: int = 0
|
||||||
|
merkle_proof: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
def to_json(self) -> str:
|
||||||
|
return to_json(asdict(self))
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PlanDelta:
|
||||||
|
"""Incremental updates to analytics plans (e.g., reweighting, windows)."""
|
||||||
|
delta_id: str
|
||||||
|
venue: str
|
||||||
|
timestamp: int
|
||||||
|
delta: Dict[str, Any]
|
||||||
|
version: int = 1
|
||||||
|
|
||||||
|
def to_json(self) -> str:
|
||||||
|
return to_json(asdict(self))
|
||||||
|
|
|
||||||
|
|
@ -27,3 +27,29 @@ def test_merkle_provenance_root_basic():
|
||||||
s2 = Signal(venue="B", timestamp=2, metrics={"m": 2}, version=1)
|
s2 = Signal(venue="B", timestamp=2, metrics={"m": 2}, version=1)
|
||||||
root = MerkleProvenance.merkle_root([s1, s2])
|
root = MerkleProvenance.merkle_root([s1, s2])
|
||||||
assert isinstance(root, str) and len(root) > 0
|
assert isinstance(root, str) and len(root) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_aggregated_signal_dataclass_json_roundtrip():
|
||||||
|
from mercurymesh_privacy_preserving_market_da.contracts import AggregatedSignal
|
||||||
|
a = AggregatedSignal(
|
||||||
|
venue_set=["ExchangeA", "BrokerB"],
|
||||||
|
feature_vector={"liquidity_proxy": 0.42, "volatility_proxy": 0.15},
|
||||||
|
privacy_budget_used=0.05,
|
||||||
|
nonce=7,
|
||||||
|
)
|
||||||
|
# Ensure fields exist
|
||||||
|
assert isinstance(a.venue_set, list)
|
||||||
|
assert isinstance(a.feature_vector, dict)
|
||||||
|
assert a.privacy_budget_used == 0.05
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_delta_dataclass_json_roundtrip():
|
||||||
|
from mercurymesh_privacy_preserving_market_da.contracts import PlanDelta
|
||||||
|
d = PlanDelta(
|
||||||
|
delta_id="delta-001",
|
||||||
|
venue="ExchangeA",
|
||||||
|
timestamp=12345,
|
||||||
|
delta={"weight": 0.7},
|
||||||
|
version=1,
|
||||||
|
)
|
||||||
|
assert d.delta_id == "delta-001"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue