build(agent): molt-x#ed374b iteration
This commit is contained in:
parent
13b01cf5ec
commit
c703bea9d7
|
|
@ -0,0 +1,21 @@
|
||||||
|
node_modules/
|
||||||
|
.npmrc
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
__tests__/
|
||||||
|
coverage/
|
||||||
|
.nyc_output/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
.cache/
|
||||||
|
*.log
|
||||||
|
.DS_Store
|
||||||
|
tmp/
|
||||||
|
.tmp/
|
||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
*.egg-info/
|
||||||
|
.pytest_cache/
|
||||||
|
READY_TO_PUBLISH
|
||||||
|
|
@ -0,0 +1,12 @@
|
||||||
|
# MercuryMesh Agents
|
||||||
|
|
||||||
|
- Architecture: Lightweight, modular MVP focused on canonical primitives, federated coordination, and deterministic replay.
|
||||||
|
- Tech stack: Python 3.8+ (dataclasses), minimal dependencies; tests with pytest.
|
||||||
|
- Testing commands: `pytest -q` and packaging check with `python3 -m build`.
|
||||||
|
- Rules:
|
||||||
|
- Run tests locally; ensure all tests pass before publishing.
|
||||||
|
- File responsibilities:
|
||||||
|
- core.py: core data models and federated coordinator logic
|
||||||
|
- __init__.py: public API export
|
||||||
|
- tests/: unit tests validating core components
|
||||||
|
- README.md: project overview and usage
|
||||||
21
README.md
21
README.md
|
|
@ -1,3 +1,20 @@
|
||||||
# mercurymesh-federated-reproducible-marke
|
# MercuryMesh Federated Reproducible Market Sandbox (MVP)
|
||||||
|
|
||||||
A portable, open-source software stack that enables researchers and practitioners to design, simulate, and reproduce cross-venue market microstructure experiments with multiple assets. MercuryMesh provides a canonical contract-driven representation o
|
Overview
|
||||||
|
- A minimal, portable Python-based MVP for a federated, contract-driven market microstructure sandbox.
|
||||||
|
- Primitives: MarketStateSnapshot, SharedSignals, PlanDelta, AuditLog.
|
||||||
|
- Coordination: FederatedCoordinator provides a tiny, safe aggregation layer to mimic cross-venue signal exchange.
|
||||||
|
- Deterministic replay and auditability are scaffolded around the core data contracts.
|
||||||
|
|
||||||
|
What you get in this MVP
|
||||||
|
- A reproducible Python package: mercurymesh_federated_reproducible_marke
|
||||||
|
- Lightweight, testable contracts and a toy federated coordinator
|
||||||
|
- Basic tests ensuring serialization, coordination, and data contracts work as intended
|
||||||
|
- A test script (test.sh) to run tests and packaging checks
|
||||||
|
|
||||||
|
Usage
|
||||||
|
- Install: python3 -m build (outside-of-repo tooling) and pip install dist/*
|
||||||
|
- Run tests: bash test.sh
|
||||||
|
|
||||||
|
Notes
|
||||||
|
- This is a starter MVP meant for extension. It intentionally focuses on clarity and correct packaging wiring, not production-grade performance or security.
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
"""MercuryMesh Federated Reproducible Market Sandbox (MVP)
|
||||||
|
|
||||||
|
Public API (minimal):
|
||||||
|
- MarketStateSnapshot
|
||||||
|
- SharedSignals
|
||||||
|
- PlanDelta
|
||||||
|
- AuditLog
|
||||||
|
- FederatedCoordinator for simple cross-venue signal aggregation
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .core import MarketStateSnapshot, SharedSignals, PlanDelta, AuditLog, FederatedCoordinator
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"MarketStateSnapshot",
|
||||||
|
"SharedSignals",
|
||||||
|
"PlanDelta",
|
||||||
|
"AuditLog",
|
||||||
|
"FederatedCoordinator",
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,114 @@
|
||||||
|
"""Core data models and a tiny federated coordinator for MercuryMesh MVP.
|
||||||
|
|
||||||
|
- MarketStateSnapshot: per-asset local state snapshot
|
||||||
|
- SharedSignals: aggregated liquidity/demand metrics
|
||||||
|
- PlanDelta: updates to quotes/hedges (the delta allocations)
|
||||||
|
- AuditLog: verifiable provenance for actions
|
||||||
|
- FederatedCoordinator: simple aggregation to mimic cross-venue coordination
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass, asdict
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def _now_iso() -> str:
|
||||||
|
return datetime.utcnow().isoformat() + "Z"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MarketStateSnapshot:
|
||||||
|
assets: List[str]
|
||||||
|
bids: Dict[str, float] # asset -> price level or aggregate bid value
|
||||||
|
offers: Dict[str, float] # asset -> price level or aggregate offer value
|
||||||
|
last_trade_times: Dict[str, str] # asset -> ISO timestamp
|
||||||
|
liquidity_metrics: Dict[str, float]
|
||||||
|
timestamp: str
|
||||||
|
version: int
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
return asdict(self)
|
||||||
|
|
||||||
|
def to_json(self) -> str:
|
||||||
|
return json.dumps(self.to_dict())
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SharedSignals:
|
||||||
|
metric: str
|
||||||
|
value: float
|
||||||
|
distribution: List[float]
|
||||||
|
confidence: float
|
||||||
|
version: int
|
||||||
|
source: str
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
return asdict(self)
|
||||||
|
|
||||||
|
def to_json(self) -> str:
|
||||||
|
return json.dumps(self.to_dict())
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PlanDelta:
|
||||||
|
delta: Dict[str, float]
|
||||||
|
timestamp: str
|
||||||
|
author: str
|
||||||
|
contract_id: str
|
||||||
|
privacy_budget: float
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
return asdict(self)
|
||||||
|
|
||||||
|
def to_json(self) -> str:
|
||||||
|
return json.dumps(self.to_dict())
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AuditLog:
|
||||||
|
entry: str
|
||||||
|
signer: str
|
||||||
|
timestamp: str
|
||||||
|
contract_id: str
|
||||||
|
version: int
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
return asdict(self)
|
||||||
|
|
||||||
|
def to_json(self) -> str:
|
||||||
|
return json.dumps(self.to_dict())
|
||||||
|
|
||||||
|
|
||||||
|
class FederatedCoordinator:
|
||||||
|
"""Tiny in-process coordinator that aggregates signals from participants.
|
||||||
|
|
||||||
|
This is a minimal stand-in for the real ADMM-like coordination layer.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def aggregate_signals(signals: List[Dict[str, float]]) -> PlanDelta:
|
||||||
|
if not signals:
|
||||||
|
return PlanDelta(
|
||||||
|
delta={},
|
||||||
|
timestamp=_now_iso(),
|
||||||
|
author="coordinator",
|
||||||
|
contract_id="root",
|
||||||
|
privacy_budget=0.0,
|
||||||
|
)
|
||||||
|
# Simple average of provided deltas
|
||||||
|
agg: Dict[str, float] = {}
|
||||||
|
for s in signals:
|
||||||
|
for k, v in s.items():
|
||||||
|
agg[k] = agg.get(k, 0.0) + v
|
||||||
|
n = float(len(signals))
|
||||||
|
for k in list(agg.keys()):
|
||||||
|
agg[k] = agg[k] / n
|
||||||
|
return PlanDelta(
|
||||||
|
delta=agg,
|
||||||
|
timestamp=_now_iso(),
|
||||||
|
author="coordinator",
|
||||||
|
contract_id="root",
|
||||||
|
privacy_budget=0.0,
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "mercurymesh_federated_reproducible_marke"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Federated, reproducible market microstructure sandbox MVP"
|
||||||
|
readme = "README.md"
|
||||||
|
license = {text = "MIT"}
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
authors = [ { name = "OpenCode AI", email = "example@example.com" } ]
|
||||||
|
dependencies = [
|
||||||
|
"numpy>=1.21",
|
||||||
|
"typing-extensions>=3.7",
|
||||||
|
"pytest>=7.0"
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
echo "Running tests..."
|
||||||
|
pytest -q
|
||||||
|
|
||||||
|
echo "Building package..."
|
||||||
|
python3 -m build
|
||||||
|
|
||||||
|
echo "All tests passed and package built."
|
||||||
|
|
@ -0,0 +1,74 @@
|
||||||
|
import json
|
||||||
|
try:
|
||||||
|
from mercurymesh_federated_reproducible_marke.core import (
|
||||||
|
MarketStateSnapshot,
|
||||||
|
SharedSignals,
|
||||||
|
PlanDelta,
|
||||||
|
AuditLog,
|
||||||
|
FederatedCoordinator,
|
||||||
|
)
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
# Fallback: load module directly from file path if package import fails in the environment
|
||||||
|
import importlib.util
|
||||||
|
import pathlib
|
||||||
|
core_path = pathlib.Path(__file__).resolve().parents[1] / "mercurymesh_federated_reproducible_marke" / "core.py"
|
||||||
|
spec = importlib.util.spec_from_file_location("mm_core", str(core_path))
|
||||||
|
if spec is None or spec.loader is None:
|
||||||
|
raise ImportError("Could not load core module from path")
|
||||||
|
mm_core = importlib.util.module_from_spec(spec)
|
||||||
|
# Ensure the module is discoverable via sys.modules for dataclass decoration
|
||||||
|
import sys
|
||||||
|
sys.modules[spec.name] = mm_core # type: ignore
|
||||||
|
spec.loader.exec_module(mm_core) # type: ignore
|
||||||
|
MarketStateSnapshot = mm_core.MarketStateSnapshot
|
||||||
|
SharedSignals = mm_core.SharedSignals
|
||||||
|
PlanDelta = mm_core.PlanDelta
|
||||||
|
AuditLog = mm_core.AuditLog
|
||||||
|
FederatedCoordinator = mm_core.FederatedCoordinator
|
||||||
|
|
||||||
|
|
||||||
|
def test_market_state_serialization_roundtrip():
|
||||||
|
m = MarketStateSnapshot(
|
||||||
|
assets=["AAPL", "MSFT"],
|
||||||
|
bids={"AAPL": 150.0, "MSFT": 300.0},
|
||||||
|
offers={"AAPL": 151.0, "MSFT": 301.0},
|
||||||
|
last_trade_times={"AAPL": "2026-01-01T00:00:00Z", "MSFT": "2026-01-01T00:00:01Z"},
|
||||||
|
liquidity_metrics={"AAPL": 0.8, "MSFT": 0.9},
|
||||||
|
timestamp="2026-01-01T00:00:00Z",
|
||||||
|
version=1,
|
||||||
|
)
|
||||||
|
d = m.to_dict()
|
||||||
|
s = json.dumps(d)
|
||||||
|
json.loads(s)
|
||||||
|
|
||||||
|
|
||||||
|
def test_shared_signals_roundtrip():
|
||||||
|
sig = SharedSignals(
|
||||||
|
metric="liquidity",
|
||||||
|
value=0.75,
|
||||||
|
distribution=[0.1, 0.3, 0.6],
|
||||||
|
confidence=0.95,
|
||||||
|
version=1,
|
||||||
|
source="simulated",
|
||||||
|
)
|
||||||
|
assert sig.to_json() # basic serialization
|
||||||
|
|
||||||
|
|
||||||
|
def test_audit_log_roundtrip():
|
||||||
|
log = AuditLog(
|
||||||
|
entry="delta_applied",
|
||||||
|
signer="tester",
|
||||||
|
timestamp="2026-01-01T00:00:00Z",
|
||||||
|
contract_id="root",
|
||||||
|
version=1,
|
||||||
|
)
|
||||||
|
assert log.to_dict()["entry"] == "delta_applied"
|
||||||
|
assert log.to_json()
|
||||||
|
|
||||||
|
|
||||||
|
def test_federated_coordinator_aggregation():
|
||||||
|
s1 = {"A": 1.0, "B": -0.5}
|
||||||
|
s2 = {"A": 3.0, "B": 0.0}
|
||||||
|
plan = FederatedCoordinator.aggregate_signals([s1, s2])
|
||||||
|
assert abs(plan.delta["A"] - 2.0) < 1e-6
|
||||||
|
assert abs(plan.delta["B"] - (-0.25)) < 1e-6
|
||||||
Loading…
Reference in New Issue