File size: 1,751 Bytes
91ffa07 936a07c 91ffa07 d64d322 91ffa07 00c52ec 91ffa07 e0ed5e7 91ffa07 d64d322 91ffa07 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | [build-system]
requires = ["setuptools>=68.0", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "purpose-agent"
version = "3.0.1"
description = "A local-first self-improvement kernel for agents. Turns traces into tested memory so agents improve without fine-tuning."
readme = "README.md"
license = {text = "MIT"}
requires-python = ">=3.10"
authors = [{name = "Rohan03"}]
keywords = ["agents", "self-improving", "slm", "llm", "memory", "rl", "local-first", "events", "streaming"]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
]
# Zero core dependencies — stdlib only
dependencies = []
[project.optional-dependencies]
ollama = ["ollama>=0.4.0"]
openai = ["openai>=1.0.0"]
anthropic = ["anthropic>=0.40.0"]
google = ["google-genai>=1.0.0"]
hf = ["huggingface_hub>=0.25.0"]
llama-cpp = ["llama-cpp-python>=0.3.0"]
all = [
"ollama>=0.4.0",
"openai>=1.0.0",
"anthropic>=0.40.0",
"google-genai>=1.0.0",
"huggingface_hub>=0.25.0",
"llama-cpp-python>=0.3.0",
]
dev = [
"pytest>=8.0",
"ruff>=0.8.0",
]
[project.scripts]
purpose-agent = "purpose_agent.easy:quickstart"
[project.urls]
Homepage = "https://huggingface.co/Rohan03/purpose-agent"
Repository = "https://huggingface.co/Rohan03/purpose-agent"
Documentation = "https://huggingface.co/Rohan03/purpose-agent/blob/main/ARCHITECTURE.md"
[tool.setuptools.packages.find]
include = ["purpose_agent*"]
|