build(agent): molt-z#db0ec5 iteration

This commit is contained in:
agent-db0ec53c058f1326 2026-04-15 21:38:43 +02:00
parent 13b6b62880
commit 24bc9e4bc1
19 changed files with 344 additions and 3 deletions

21
.gitignore vendored Normal file
View File

@ -0,0 +1,21 @@
node_modules/
.npmrc
.env
.env.*
__tests__/
coverage/
.nyc_output/
dist/
build/
.cache/
*.log
.DS_Store
tmp/
.tmp/
__pycache__/
*.pyc
.venv/
venv/
*.egg-info/
.pytest_cache/
READY_TO_PUBLISH

1
AGENTS.md Normal file
View File

@ -0,0 +1 @@
# SignalCanvas Agents

View File

@ -1,3 +1 @@
# signalcanvas-graph-based-market-signal-s
Problem: Traders and risk teams struggle to visually compose, replay, and audit cross-venue market signals (price, depth, order flow, volatility, liquidity) and their impact on multi-asset hedging without opaque, siloed tools. Current dashboards are
# SignalCanvas Graph-Based Market Signal Studio (MVP)

16
pyproject.toml Normal file
View File

@ -0,0 +1,16 @@
[build-system]
requires = ["setuptools>=42", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "signalcanvas_graph_based_market_signal_s"
version = "0.1.0"
description = "Graph-based market signal studio MVP for visualizing, replaying, and auditing cross-venue signals"
readme = "README.md"
requires-python = ">=3.8"
license = {file = "LICENSE"}
authors = [ { name = "OpenCode AI" } ]
dependencies = []
[tool.setuptools.packages.find]
where = ["."]

View File

@ -0,0 +1,20 @@
from .graph import Graph
from .models import SignalNode, Link, Scenario, HedgePlan
from .registry import GraphRegistry
from .backtester import replay_deltas
from .privacy import aggregate_signals
from .nlp import generate_narrative
from .ledger import GovernanceLedger
__all__ = [
"Graph",
"SignalNode",
"Link",
"Scenario",
"HedgePlan",
"GraphRegistry",
"replay_deltas",
"aggregate_signals",
"generate_narrative",
"GovernanceLedger",
]

View File

@ -0,0 +1,41 @@
from typing import Dict, Any, Generator
import random
import time
class FixFeedAdapter:
"""Minimal FIX/WebSocket-like feed adapter (simulated)."""
def __init__(self, asset: str, venue: str = "FIX-Feed"):
self.asset = asset
self.venue = venue
def stream(self) -> Generator[Dict[str, Any], None, None]:
t = 0.0
while t < 0.5: # short finite run for testability
t += random.uniform(0.05, 0.15)
yield {
"type": "price",
"asset": self.asset,
"venue": self.venue,
"timestamp": time.time(),
"value": random.uniform(100, 200),
}
time.sleep(0.01)
class SimulatedVenueAdapter:
"""Tiny simulated venue feed."""
def __init__(self, assets: list[str]):
self.assets = assets
def stream(self) -> Generator[Dict[str, Any], None, None]:
for _ in range(5):
a = random.choice(self.assets)
yield {
"type": "depth",
"asset": a,
"venue": "SimVenue",
"timestamp": time.time(),
"value": random.uniform(-1.0, 1.0),
}
time.sleep(0.02)

View File

@ -0,0 +1,10 @@
from typing import Dict, List
def replay_deltas(state: Dict[str, float], deltas: List[Dict[str, float]]) -> Dict[str, float]:
"""Simple additive replay of delta dictionaries onto a state map."""
new_state = dict(state)
for d in deltas:
for k, v in d.items():
new_state[k] = new_state.get(k, 0.0) + v
return new_state

View File

@ -0,0 +1,62 @@
from typing import Dict, List, Optional
from dataclasses import dataclass, field
@dataclass
class SignalNode:
id: str
type: str # e.g., price, depth, volatility, liquidity
asset: str
venue: str
timestamp: float
value: float
uncertainty: float = 0.0
@dataclass
class Link:
from_id: str
to_id: str
relation: str # e.g., lead-lag, dependency
weight: float = 1.0
@dataclass
class Scenario:
name: str
node_ids: List[str] = field(default_factory=list)
link_ids: List[Link] = field(default_factory=list)
@dataclass
class HedgePlan:
id: str
delta: Dict[str, float] # asset->delta amount
scenario_name: str
timestamp: float
class Graph:
def __init__(self):
self.nodes: Dict[str, SignalNode] = {}
self.links: List[Link] = []
self.scenarios: Dict[str, Scenario] = {}
self.hedges: Dict[str, HedgePlan] = {}
# Node ops
def add_node(self, node: SignalNode) -> None:
self.nodes[node.id] = node
def get_node(self, node_id: str) -> Optional[SignalNode]:
return self.nodes.get(node_id)
# Link ops
def add_link(self, link: Link) -> None:
self.links.append(link)
# Scenario ops
def add_scenario(self, scenario: Scenario) -> None:
self.scenarios[scenario.name] = scenario
def add_hedge(self, hedge: HedgePlan) -> None:
self.hedges[hedge.id] = hedge

View File

@ -0,0 +1,29 @@
from dataclasses import dataclass
from datetime import datetime
import hashlib
@dataclass
class LedgerEntry:
timestamp: str
plan_id: str
delta_summary: dict
signature: str # simplified verifier
class GovernanceLedger:
def __init__(self, signer_key: str = "mock-signer"):
self.entries: list[LedgerEntry] = []
self.signer_key = signer_key
def _sign(self, data: str) -> str:
# Simple deterministic mock signature
return hashlib.sha256((data + self.signer_key).encode()).hexdigest()
def append(self, plan_id: str, delta_summary: dict) -> LedgerEntry:
ts = datetime.utcnow().isoformat()
data = f"{plan_id}:{ts}:{delta_summary}"
sig = self._sign(data)
entry = LedgerEntry(timestamp=ts, plan_id=plan_id, delta_summary=delta_summary, signature=sig)
self.entries.append(entry)
return entry

View File

@ -0,0 +1,36 @@
from dataclasses import dataclass, field
from typing import Dict, List
@dataclass
class SignalNode:
id: str
type: str # e.g., price, depth, volatility, liquidity
asset: str
venue: str
timestamp: float
value: float
uncertainty: float = 0.0
@dataclass
class Link:
from_id: str
to_id: str
relation: str
weight: float = 1.0
@dataclass
class Scenario:
name: str
nodes: List[str] = field(default_factory=list)
links: List[Link] = field(default_factory=list)
@dataclass
class HedgePlan:
id: str
delta: Dict[str, float]
scenario_name: str
timestamp: float

View File

@ -0,0 +1,13 @@
from datetime import datetime
def generate_narrative(plan_delta: dict, context: str = "") -> str:
"""Very lightweight narrative template anchored on plan deltas."""
if not plan_delta:
return "No changes in this run."
parts = [f"Delta changes at {datetime.utcnow().isoformat()}:"]
for asset, delta in plan_delta.items():
parts.append(f"- {asset}: delta {delta:+.4f}")
if context:
parts.append(f"Context: {context}")
return "\n".join(parts)

View File

@ -0,0 +1,19 @@
import math
import random
from typing import List, Dict
def aggregate_signals(signals: List[Dict[str, float]], budgets: float = 0.0) -> Dict[str, float]:
"""Simple privacy-preserving aggregation: sum signals, optionally add Gaussian DP noise."""
aggregate: Dict[str, float] = {}
for s in signals:
for k, v in s.items():
aggregate[k] = aggregate.get(k, 0.0) + v
if budgets and budgets > 0:
# apply simple DP noise to each key
noisy = {}
for k, v in aggregate.items():
noise = random.gauss(0, max(1.0, budgets))
noisy[k] = v + noise
aggregate = noisy
return aggregate

View File

@ -0,0 +1,12 @@
from typing import Type, Dict
class GraphRegistry:
def __init__(self):
self.adapters: Dict[str, Type] = {}
def register(self, name: str, adapter_cls: Type) -> None:
self.adapters[name] = adapter_cls
def get(self, name: str):
return self.adapters.get(name)

8
test.sh Normal file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
export PYTHONPATH="${PYTHONPATH:+$PYTHONPATH:}/workspace/repo"
echo "Running unit tests..."
pytest -q
echo "Running Python build (PEP 517/520) to verify packaging..."
python3 -m build --wheel --no-isolation
echo "Tests and build completed."

9
tests/test_backtester.py Normal file
View File

@ -0,0 +1,9 @@
from signalcanvas_graph_based_market_signal_s.backtester import replay_deltas
def test_replay_deltas_basic():
state = {"AAPL": 100.0, "MSFT": 200.0}
deltas = [{"AAPL": -1.0}, {"MSFT": 5.0}, {"AAPL": 2.0}]
new_state = replay_deltas(state, deltas)
assert new_state["AAPL"] == 101.0
assert new_state["MSFT"] == 205.0

14
tests/test_delta_sync.py Normal file
View File

@ -0,0 +1,14 @@
from signalcanvas_graph_based_market_signal_s.backtester import replay_deltas
from signalcanvas_graph_based_market_signal_s.privacy import aggregate_signals
from signalcanvas_graph_based_market_signal_s.nlp import generate_narrative
def test_aggregate_and_narrative():
signals = [{"AAPL": 1.0}, {"AAPL": -0.2, "GOOG": 0.5}]
aggregated = aggregate_signals(signals, budgets=0.0)
# Without DP, we expect sums
assert abs(aggregated["AAPL"] - (1.0 - 0.2)) < 1e-9
assert aggregated["GOOG"] == 0.5
narrative = generate_narrative({"AAPL": 0.8})
assert isinstance(narrative, str)

17
tests/test_graph.py Normal file
View File

@ -0,0 +1,17 @@
import time
from signalcanvas_graph_based_market_signal_s import Graph, SignalNode, Link, HedgePlan
def test_graph_basic():
g = Graph()
n1 = SignalNode(id="n1", type="price", asset="AAPL", venue="X", timestamp=time.time(), value=150.0)
n2 = SignalNode(id="n2", type="depth", asset="AAPL", venue="X", timestamp=time.time(), value=1.5)
g.add_node(n1)
g.add_node(n2)
l = Link(from_id="n1", to_id="n2", relation="lead-lag", weight=1.0)
g.add_link(l)
s = HedgePlan(id="h1", delta={"AAPL": 2.0}, scenario_name="sc1", timestamp=time.time())
g.add_hedge(s)
assert g.get_node("n1").id == "n1"
assert g.links[0].from_id == "n1"
assert g.hedges["h1"].delta["AAPL"] == 2.0

8
tests/test_nlp.py Normal file
View File

@ -0,0 +1,8 @@
from signalcanvas_graph_based_market_signal_s.nlp import generate_narrative
def test_narrative_generation_basic():
plan = {"AAPL": 1.23}
narrative = generate_narrative(plan)
assert isinstance(narrative, str)
assert "AAPL" in narrative

7
tests/test_privacy.py Normal file
View File

@ -0,0 +1,7 @@
from signalcanvas_graph_based_market_signal_s.privacy import aggregate_signals
def test_privacy_aggregation_no_noise():
signals = [{"x": 1.0}, {"x": 2.0}]
agg = aggregate_signals(signals, budgets=0.0)
assert agg["x"] == 3.0