build(agent): new-agents-3#dd492b iteration
This commit is contained in:
parent
cac2eba47d
commit
707203954c
10
README.md
10
README.md
|
|
@ -39,3 +39,13 @@ Roadmap (high level)
|
||||||
- A small DSL sketch (LocalProblem/SharedVariables/PlanDelta) and a Graph-of-Contracts registry.
|
- A small DSL sketch (LocalProblem/SharedVariables/PlanDelta) and a Graph-of-Contracts registry.
|
||||||
|
|
||||||
This project aims for clean, production-grade code with strong test coverage and clear extension points.
|
This project aims for clean, production-grade code with strong test coverage and clear extension points.
|
||||||
|
|
||||||
|
Adapters (Starter)
|
||||||
|
- This repo now includes two starter adapters to bootstrap cross-framework optimization flows:
|
||||||
|
- PyTorchAdapter and TensorFlowAdapter
|
||||||
|
- Path: catopt_flow_category_theoretic_compositi/adapters
|
||||||
|
- Each adapter implements a minimal adapt(local_problems) -> GlobalProblem using the core Planner.
|
||||||
|
- Example usage:
|
||||||
|
- from catopt_flow_category_theoretic_compositi.adapters import PyTorchAdapter
|
||||||
|
- adapter = PyTorchAdapter(budget_gpu=2.0)
|
||||||
|
- global_plan = adapter.adapt(local_problems)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
from .pytorch_adapter import PyTorchAdapter
|
||||||
|
from .tensorflow_adapter import TensorFlowAdapter
|
||||||
|
|
||||||
|
__all__ = ["PyTorchAdapter", "TensorFlowAdapter"]
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from ..core import LocalProblem, GlobalProblem, Planner
|
||||||
|
|
||||||
|
|
||||||
|
class PyTorchAdapter:
|
||||||
|
"""Starter PyTorch adapter that maps a collection of LocalProblems to a GlobalProblem
|
||||||
|
using the planner with a given GPU budget.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = "pytorch"
|
||||||
|
|
||||||
|
def __init__(self, budget_gpu: float = 1.0) -> None:
|
||||||
|
self.budget_gpu = budget_gpu
|
||||||
|
|
||||||
|
def adapt(self, local_problems: List[LocalProblem]) -> GlobalProblem:
|
||||||
|
# Simple, vendor-agnostic mapping via Planner. Could be extended with PyTorch-specific
|
||||||
|
# resource modeling, hooks, and data-plane metadata in the future.
|
||||||
|
return Planner.build_global_plan(local_problems, self.budget_gpu)
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from ..core import LocalProblem, GlobalProblem, Planner
|
||||||
|
|
||||||
|
|
||||||
|
class TensorFlowAdapter:
|
||||||
|
"""Starter TensorFlow adapter that maps a collection of LocalProblems to a GlobalProblem
|
||||||
|
using the planner with a given GPU budget.
|
||||||
|
"""
|
||||||
|
|
||||||
|
name = "tensorflow"
|
||||||
|
|
||||||
|
def __init__(self, budget_gpu: float = 1.0) -> None:
|
||||||
|
self.budget_gpu = budget_gpu
|
||||||
|
|
||||||
|
def adapt(self, local_problems: List[LocalProblem]) -> GlobalProblem:
|
||||||
|
return Planner.build_global_plan(local_problems, self.budget_gpu)
|
||||||
Loading…
Reference in New Issue