File size: 6,945 Bytes
c62560e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 | """
Plugin Registry β Drop-in extension system for backends, tools, callbacks, and evaluators.
Makes the framework extensible without editing core code:
- Register new LLM backends
- Register new tools
- Register new callbacks
- Register new SLM models
- Register new evaluation metrics
Adding a new component = 1 file + 1 register() call.
Extension points:
BackendRegistry β LLM/SLM backends
ToolRegistry β Already exists in tools.py, re-exported here
CallbackRegistry β Observability callbacks
ModelRegistry β SLM model definitions (extends SLM_REGISTRY)
EmbeddingRegistry β Shared embedding backends (deduplicates embed logic)
Usage:
# In your extension file:
from purpose_agent.registry import backend_registry, model_registry
backend_registry.register("my_backend", MyCustomBackend)
model_registry.register("my-slm", ollama_name="my-model:latest", context_window=32768, description="My custom SLM")
# Then use it:
backend = backend_registry.create("my_backend", model="my-model")
slm = model_registry.create_backend("my-slm")
"""
from __future__ import annotations
import logging
import math
from typing import Any, Callable, Type
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Generic Plugin Registry
# ---------------------------------------------------------------------------
class PluginRegistry:
"""
Generic registry for named plugins.
Supports:
- Register by name + class/factory
- Create instances by name
- List available plugins
- Discover plugins via entry points (future)
"""
def __init__(self, kind: str):
self.kind = kind
self._plugins: dict[str, Type | Callable] = {}
self._metadata: dict[str, dict[str, Any]] = {}
def register(
self, name: str, cls_or_factory: Type | Callable, **metadata
) -> "PluginRegistry":
"""Register a plugin by name."""
self._plugins[name] = cls_or_factory
self._metadata[name] = metadata
logger.debug(f"{self.kind} registry: registered '{name}'")
return self
def create(self, name: str, **kwargs) -> Any:
"""Create an instance of a registered plugin."""
if name not in self._plugins:
available = ", ".join(self._plugins.keys())
raise ValueError(
f"Unknown {self.kind} '{name}'. Available: {available}"
)
return self._plugins[name](**kwargs)
def get_class(self, name: str) -> Type | Callable | None:
"""Get the class/factory without instantiating."""
return self._plugins.get(name)
def list(self) -> list[dict[str, Any]]:
"""List all registered plugins with metadata."""
return [
{"name": name, **self._metadata.get(name, {})}
for name in self._plugins
]
def names(self) -> list[str]:
return list(self._plugins.keys())
def __contains__(self, name: str) -> bool:
return name in self._plugins
def __len__(self) -> int:
return len(self._plugins)
# ---------------------------------------------------------------------------
# Shared Embedding Utility β deduplicated from ExperienceReplay + ToolRegistry
# ---------------------------------------------------------------------------
class EmbeddingBackend:
"""
Abstract embedding backend. Swap in sentence-transformers, OpenAI, etc.
Default: lightweight trigram hashing (no dependencies, fast, approximate).
"""
def __init__(self, dim: int = 128):
self.dim = dim
def embed(self, text: str) -> list[float]:
"""Compute embedding for text. Override for real embeddings."""
vec = [0.0] * self.dim
text_lower = text.lower()
for i in range(len(text_lower) - 2):
trigram = text_lower[i:i + 3]
h = hash(trigram) % self.dim
vec[h] += 1.0
magnitude = math.sqrt(sum(x * x for x in vec))
if magnitude > 0:
vec = [x / magnitude for x in vec]
return vec
@staticmethod
def cosine_similarity(a: list[float], b: list[float]) -> float:
if not a or not b or len(a) != len(b):
return 0.0
dot = sum(x * y for x, y in zip(a, b))
mag_a = math.sqrt(sum(x * x for x in a))
mag_b = math.sqrt(sum(x * x for x in b))
if mag_a == 0 or mag_b == 0:
return 0.0
return dot / (mag_a * mag_b)
# Singleton shared instance (override with embedding_backend = SentenceTransformerBackend(...))
default_embedding = EmbeddingBackend(dim=128)
# ---------------------------------------------------------------------------
# Pre-built Registries
# ---------------------------------------------------------------------------
# Backend registry β for LLM/SLM backends
backend_registry = PluginRegistry("Backend")
# Callback registry β for observability callbacks
callback_registry = PluginRegistry("Callback")
# Model registry β extensible SLM model definitions
model_registry = PluginRegistry("Model")
def _register_defaults():
"""Register built-in plugins. Called once at import time."""
# Register built-in backends
from purpose_agent.llm_backend import (
MockLLMBackend, HFInferenceBackend, OpenAICompatibleBackend,
)
backend_registry.register("mock", MockLLMBackend, description="Deterministic mock for testing")
backend_registry.register("hf_inference", HFInferenceBackend, description="HuggingFace Inference Providers")
backend_registry.register("openai", OpenAICompatibleBackend, description="OpenAI-compatible API")
from purpose_agent.slm_backends import OllamaBackend, LlamaCppBackend, SLM_REGISTRY
backend_registry.register("ollama", OllamaBackend, description="Local Ollama serving")
backend_registry.register("llama_cpp", LlamaCppBackend, description="Direct llama-cpp-python")
# Register SLM models from the built-in registry
for key, (ollama_name, ctx, desc) in SLM_REGISTRY.items():
model_registry.register(
key,
lambda host="http://localhost:11434", _m=ollama_name, _c=ctx: OllamaBackend(
model=_m, host=host, context_window=_c, compress_prompts=True,
),
ollama_name=ollama_name,
context_window=ctx,
description=desc,
)
# Register built-in callbacks
from purpose_agent.observability import LoggingCallback, MetricsCollector, JSONFileCallback
callback_registry.register("logging", LoggingCallback, description="Log all events")
callback_registry.register("metrics", MetricsCollector, description="Collect aggregate metrics")
callback_registry.register("jsonfile", JSONFileCallback, description="Write events to JSONL file")
# Auto-register defaults on import
_register_defaults()
|