feat: Phase 1 — Spore Agent Commons foundation
koi-net FullNode integration with custom Spore RID types (Holon, Claim, Evidence, Attestation, Intent, Commitment). FastAPI app with health, holons, and governance endpoints. PostgreSQL schema with pgvector for entities, governance DAG, holons, events, and federation peers. Governance memory layer with YAML frontmatter parser, DAG construction, and cycle detection. Docker Compose with pgvector, Redis, Traefik labels for commons.jeffemmett.com. Infisical entrypoint for secret injection. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
commit
82ac55624d
|
|
@ -0,0 +1,4 @@
|
|||
# Infisical (only these two go in .env — everything else fetched at runtime)
|
||||
INFISICAL_CLIENT_ID=
|
||||
INFISICAL_CLIENT_SECRET=
|
||||
INFISICAL_PROJECT_SLUG=spore-commons
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
__pycache__/
|
||||
*.py[cod]
|
||||
*.egg-info/
|
||||
dist/
|
||||
build/
|
||||
.eggs/
|
||||
*.egg
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
.rid_cache/
|
||||
priv_key.pem
|
||||
*.pem
|
||||
.venv/
|
||||
venv/
|
||||
node_modules/
|
||||
.DS_Store
|
||||
*.log
|
||||
postgres_data/
|
||||
redis_data/
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
Peer Production License (PPL)
|
||||
Version 1.0
|
||||
|
||||
Copyright (c) 2026 Jeff Emmett
|
||||
|
||||
This work is free to use, modify, and distribute under the following conditions:
|
||||
|
||||
1. NON-COMMERCIAL USE: Any individual or organization may use, copy, modify, and
|
||||
distribute this work and derivative works for non-commercial purposes.
|
||||
|
||||
2. COOPERATIVE/COMMONS USE: Worker-owned cooperatives, collectives, and commons-based
|
||||
peer production communities may use this work for any purpose, including commercial.
|
||||
|
||||
3. COMMERCIAL USE RESTRICTION: Commercial entities that are not worker-owned
|
||||
cooperatives must obtain a separate license. All gains from commercial use must
|
||||
be distributed among worker-owners of the entity.
|
||||
|
||||
4. SHARE-ALIKE: Derivative works must be licensed under this same license or a
|
||||
compatible copyfair license.
|
||||
|
||||
5. ATTRIBUTION: You must give appropriate credit, provide a link to the license,
|
||||
and indicate if changes were made.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED.
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
services:
|
||||
# ============================================================
|
||||
# Spore Node (FastAPI + koi-net)
|
||||
# ============================================================
|
||||
spore-node:
|
||||
build:
|
||||
context: ./node
|
||||
dockerfile: Dockerfile
|
||||
container_name: spore-node
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
# Infisical
|
||||
INFISICAL_CLIENT_ID: ${INFISICAL_CLIENT_ID:?INFISICAL_CLIENT_ID must be set}
|
||||
INFISICAL_CLIENT_SECRET: ${INFISICAL_CLIENT_SECRET:?INFISICAL_CLIENT_SECRET must be set}
|
||||
INFISICAL_PROJECT_SLUG: spore-commons
|
||||
# Database (injected by Infisical, fallback for local dev)
|
||||
POSTGRES_HOST: spore-db
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_USER: spore
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-spore_dev_password}
|
||||
POSTGRES_DB: spore_commons
|
||||
# Redis
|
||||
REDIS_URL: redis://spore-redis:6379
|
||||
# Node config
|
||||
NODE_NAME: spore-commons
|
||||
NODE_PORT: 8351
|
||||
# AI (LiteLLM on ai-internal network)
|
||||
EMBEDDING_API_URL: http://litellm:4000
|
||||
volumes:
|
||||
- node_data:/data
|
||||
depends_on:
|
||||
spore-db:
|
||||
condition: service_healthy
|
||||
spore-redis:
|
||||
condition: service_healthy
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.spore-commons.rule=Host(`commons.jeffemmett.com`)"
|
||||
- "traefik.http.routers.spore-commons.entrypoints=web"
|
||||
- "traefik.http.services.spore-commons.loadbalancer.server.port=8351"
|
||||
- "traefik.docker.network=traefik-public"
|
||||
networks:
|
||||
- spore-internal
|
||||
- traefik-public
|
||||
- ai-internal
|
||||
|
||||
# ============================================================
|
||||
# PostgreSQL with pgvector
|
||||
# ============================================================
|
||||
spore-db:
|
||||
image: pgvector/pgvector:pg16
|
||||
container_name: spore-db
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: spore
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-spore_dev_password}
|
||||
POSTGRES_DB: spore_commons
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U spore -d spore_commons"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- spore-internal
|
||||
|
||||
# ============================================================
|
||||
# Redis (job queue + federation relay)
|
||||
# ============================================================
|
||||
spore-redis:
|
||||
image: redis:7-alpine
|
||||
container_name: spore-redis
|
||||
restart: unless-stopped
|
||||
command: redis-server --appendonly yes
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- spore-internal
|
||||
|
||||
volumes:
|
||||
node_data:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
|
||||
networks:
|
||||
spore-internal:
|
||||
driver: bridge
|
||||
traefik-public:
|
||||
external: true
|
||||
ai-internal:
|
||||
external: true
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# System deps for asyncpg and cryptography
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gcc libpq-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
COPY spore_node/ ./spore_node/
|
||||
|
||||
# Data directory for koi-net cache + keys
|
||||
RUN mkdir -p /data
|
||||
|
||||
EXPOSE 8351
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
CMD ["python", "-m", "spore_node", "--standalone"]
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
#!/bin/sh
|
||||
# Infisical secret injection entrypoint (Python)
|
||||
# Fetches secrets from Infisical API and injects them as env vars before starting the app.
|
||||
# Required env vars: INFISICAL_CLIENT_ID, INFISICAL_CLIENT_SECRET
|
||||
# Optional: INFISICAL_PROJECT_SLUG, INFISICAL_ENV (default: prod),
|
||||
# INFISICAL_URL (default: http://infisical:8080)
|
||||
|
||||
set -e
|
||||
|
||||
export INFISICAL_URL="${INFISICAL_URL:-http://infisical:8080}"
|
||||
export INFISICAL_ENV="${INFISICAL_ENV:-prod}"
|
||||
# IMPORTANT: Set INFISICAL_PROJECT_SLUG in your docker-compose.yml
|
||||
export INFISICAL_PROJECT_SLUG="${INFISICAL_PROJECT_SLUG:?INFISICAL_PROJECT_SLUG must be set}"
|
||||
|
||||
if [ -z "$INFISICAL_CLIENT_ID" ] || [ -z "$INFISICAL_CLIENT_SECRET" ]; then
|
||||
echo "[infisical] No credentials set, starting without secret injection"
|
||||
exec "$@"
|
||||
fi
|
||||
|
||||
echo "[infisical] Fetching secrets from ${INFISICAL_PROJECT_SLUG}/${INFISICAL_ENV}..."
|
||||
|
||||
EXPORTS=$(python3 -c "
|
||||
import urllib.request, json, os, sys
|
||||
|
||||
base = os.environ['INFISICAL_URL']
|
||||
slug = os.environ['INFISICAL_PROJECT_SLUG']
|
||||
env = os.environ['INFISICAL_ENV']
|
||||
|
||||
try:
|
||||
data = json.dumps({'clientId': os.environ['INFISICAL_CLIENT_ID'], 'clientSecret': os.environ['INFISICAL_CLIENT_SECRET']}).encode()
|
||||
req = urllib.request.Request(f'{base}/api/v1/auth/universal-auth/login', data=data, headers={'Content-Type': 'application/json'})
|
||||
auth = json.loads(urllib.request.urlopen(req).read())
|
||||
token = auth.get('accessToken')
|
||||
if not token:
|
||||
print('[infisical] Auth failed', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
req = urllib.request.Request(f'{base}/api/v3/secrets/raw?workspaceSlug={slug}&environment={env}&secretPath=/&recursive=true')
|
||||
req.add_header('Authorization', f'Bearer {token}')
|
||||
secrets = json.loads(urllib.request.urlopen(req).read())
|
||||
|
||||
if 'secrets' not in secrets:
|
||||
print('[infisical] No secrets returned', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
for s in secrets['secrets']:
|
||||
key = s['secretKey']
|
||||
val = s['secretValue'].replace(\"'\", \"'\\\\'\")
|
||||
print(f\"export {key}='{val}'\")
|
||||
except Exception as e:
|
||||
print(f'[infisical] Error: {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
" 2>&1) || {
|
||||
echo "[infisical] WARNING: Failed to fetch secrets, starting with existing env vars"
|
||||
exec "$@"
|
||||
}
|
||||
|
||||
if echo "$EXPORTS" | grep -q "^export "; then
|
||||
COUNT=$(echo "$EXPORTS" | grep -c "^export ")
|
||||
eval "$EXPORTS"
|
||||
echo "[infisical] Injected ${COUNT} secrets"
|
||||
else
|
||||
echo "[infisical] WARNING: $EXPORTS"
|
||||
echo "[infisical] Starting with existing env vars"
|
||||
fi
|
||||
|
||||
exec "$@"
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
koi-net>=2.0.4
|
||||
asyncpg>=0.29.0
|
||||
arq>=0.26.0
|
||||
python-frontmatter>=1.1.0
|
||||
pydantic>=2.0.0
|
||||
pydantic-settings>=2.0.0
|
||||
httpx>=0.27.0
|
||||
pyyaml>=6.0
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
"""Spore Agent Commons — koi-net node implementation."""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
"""Entry point for Spore Agent Commons node.
|
||||
|
||||
Supports two modes:
|
||||
- Full mode (default): koi-net FullNode + Spore API
|
||||
- Standalone mode (--standalone): FastAPI only, no koi-net
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import uvicorn
|
||||
|
||||
from spore_node.config import SporeConfig, build_node_config
|
||||
from spore_node.db.connection import init_pool, close_pool
|
||||
from spore_node.api.main import mount_routers, create_standalone_app
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
||||
)
|
||||
log = logging.getLogger("spore_node")
|
||||
|
||||
|
||||
def run_standalone(cfg: SporeConfig) -> None:
|
||||
"""Run in standalone mode — FastAPI only."""
|
||||
app = create_standalone_app(cfg)
|
||||
uvicorn.run(app, host="0.0.0.0", port=cfg.node_port, log_level="info")
|
||||
|
||||
|
||||
def run_full(cfg: SporeConfig) -> None:
|
||||
"""Run in full mode — koi-net node + Spore API."""
|
||||
from spore_node.node import create_node
|
||||
|
||||
node = create_node(cfg)
|
||||
|
||||
# Mount Spore routers on the koi-net FastAPI app
|
||||
mount_routers(node.server.app)
|
||||
|
||||
# Initialize DB before node starts
|
||||
async def _init_db():
|
||||
await init_pool(cfg.database_url)
|
||||
|
||||
asyncio.get_event_loop().run_until_complete(_init_db())
|
||||
|
||||
log.info(f"Starting Spore Commons node on port {cfg.node_port}")
|
||||
try:
|
||||
node.run()
|
||||
finally:
|
||||
asyncio.get_event_loop().run_until_complete(close_pool())
|
||||
|
||||
|
||||
def main() -> None:
|
||||
cfg = SporeConfig()
|
||||
|
||||
if "--standalone" in sys.argv:
|
||||
log.info("Running in standalone mode (no koi-net)")
|
||||
run_standalone(cfg)
|
||||
else:
|
||||
log.info("Running in full mode (koi-net + Spore API)")
|
||||
run_full(cfg)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
"""FastAPI application factory for Spore Agent Commons.
|
||||
|
||||
This mounts our custom routers onto the koi-net node's FastAPI app.
|
||||
If running standalone (without koi-net), creates its own app.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI
|
||||
|
||||
from spore_node.config import SporeConfig
|
||||
from spore_node.db.connection import init_pool, close_pool
|
||||
from spore_node.api.routers import health, holons, governance
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def mount_routers(app: FastAPI) -> None:
|
||||
"""Mount Spore-specific routers onto a FastAPI app."""
|
||||
app.include_router(health.router)
|
||||
app.include_router(holons.router)
|
||||
app.include_router(governance.router)
|
||||
|
||||
|
||||
def create_standalone_app(cfg: SporeConfig | None = None) -> FastAPI:
|
||||
"""Create a standalone FastAPI app (without koi-net node).
|
||||
|
||||
Used for development/testing or when koi-net is optional.
|
||||
"""
|
||||
if cfg is None:
|
||||
cfg = SporeConfig()
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
log.info("Starting Spore Commons API...")
|
||||
await init_pool(cfg.database_url)
|
||||
yield
|
||||
await close_pool()
|
||||
log.info("Spore Commons API stopped.")
|
||||
|
||||
app = FastAPI(
|
||||
title="Spore Agent Commons",
|
||||
version="0.1.0",
|
||||
description="Agent Commons — governance memory, knowledge graph, and federation layer",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
mount_routers(app)
|
||||
return app
|
||||
|
|
@ -0,0 +1,170 @@
|
|||
"""Governance document and DAG endpoints."""
|
||||
|
||||
import json
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from spore_node.db.connection import get_pool
|
||||
from spore_node.governance.parser import parse_governance_doc, GovernanceDoc
|
||||
from spore_node.governance.dag import build_dag
|
||||
|
||||
router = APIRouter(prefix="/governance", tags=["governance"])
|
||||
|
||||
|
||||
class DocIngest(BaseModel):
|
||||
content: str # Raw markdown with YAML frontmatter
|
||||
|
||||
|
||||
class DocResponse(BaseModel):
|
||||
doc_id: str
|
||||
doc_kind: str
|
||||
title: str
|
||||
status: str
|
||||
depends_on: list[str]
|
||||
body: str
|
||||
|
||||
|
||||
@router.post("/docs", response_model=DocResponse, status_code=201)
|
||||
async def ingest_doc(data: DocIngest):
|
||||
"""Ingest a governance document (markdown with YAML frontmatter)."""
|
||||
try:
|
||||
doc = parse_governance_doc(data.content)
|
||||
except ValueError as e:
|
||||
raise HTTPException(422, str(e))
|
||||
|
||||
pool = get_pool()
|
||||
|
||||
# Upsert doc
|
||||
await pool.execute(
|
||||
"""
|
||||
INSERT INTO governance_docs (doc_id, doc_kind, title, status, body, frontmatter)
|
||||
VALUES ($1, $2, $3, $4, $5, $6::jsonb)
|
||||
ON CONFLICT (doc_id) DO UPDATE SET
|
||||
doc_kind = EXCLUDED.doc_kind,
|
||||
title = EXCLUDED.title,
|
||||
status = EXCLUDED.status,
|
||||
body = EXCLUDED.body,
|
||||
frontmatter = EXCLUDED.frontmatter,
|
||||
updated_at = now()
|
||||
""",
|
||||
doc.doc_id, doc.doc_kind, doc.title, doc.status, doc.body,
|
||||
json.dumps(doc.frontmatter),
|
||||
)
|
||||
|
||||
# Replace dependency edges
|
||||
await pool.execute(
|
||||
"DELETE FROM governance_deps WHERE from_doc = $1", doc.doc_id
|
||||
)
|
||||
for dep in doc.depends_on:
|
||||
# Ensure target exists (placeholder if needed)
|
||||
await pool.execute(
|
||||
"""
|
||||
INSERT INTO governance_docs (doc_id, doc_kind, title, status)
|
||||
VALUES ($1, 'unknown', '', 'placeholder')
|
||||
ON CONFLICT (doc_id) DO NOTHING
|
||||
""",
|
||||
dep,
|
||||
)
|
||||
await pool.execute(
|
||||
"INSERT INTO governance_deps (from_doc, to_doc) VALUES ($1, $2) ON CONFLICT DO NOTHING",
|
||||
doc.doc_id, dep,
|
||||
)
|
||||
|
||||
# Log event
|
||||
await pool.execute(
|
||||
"""
|
||||
INSERT INTO events (entity_rid, event_kind, payload)
|
||||
VALUES ($1, 'governance.doc.ingested', $2::jsonb)
|
||||
""",
|
||||
f"governance:{doc.doc_id}",
|
||||
json.dumps({"doc_id": doc.doc_id, "doc_kind": doc.doc_kind}),
|
||||
)
|
||||
|
||||
return doc.model_dump()
|
||||
|
||||
|
||||
@router.get("/docs", response_model=list[DocResponse])
|
||||
async def list_docs(doc_kind: str | None = None, status: str | None = None):
|
||||
"""List governance documents with optional filters."""
|
||||
pool = get_pool()
|
||||
query = "SELECT * FROM governance_docs WHERE doc_kind != 'unknown'"
|
||||
params = []
|
||||
idx = 1
|
||||
|
||||
if doc_kind:
|
||||
query += f" AND doc_kind = ${idx}"
|
||||
params.append(doc_kind)
|
||||
idx += 1
|
||||
if status:
|
||||
query += f" AND status = ${idx}"
|
||||
params.append(status)
|
||||
idx += 1
|
||||
|
||||
query += " ORDER BY created_at"
|
||||
rows = await pool.fetch(query, *params)
|
||||
|
||||
result = []
|
||||
for row in rows:
|
||||
deps = await pool.fetch(
|
||||
"SELECT to_doc FROM governance_deps WHERE from_doc = $1", row["doc_id"]
|
||||
)
|
||||
result.append(DocResponse(
|
||||
doc_id=row["doc_id"],
|
||||
doc_kind=row["doc_kind"],
|
||||
title=row["title"],
|
||||
status=row["status"],
|
||||
depends_on=[d["to_doc"] for d in deps],
|
||||
body=row["body"],
|
||||
))
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/docs/{doc_id}", response_model=DocResponse)
|
||||
async def get_doc(doc_id: str):
|
||||
"""Get a single governance document."""
|
||||
pool = get_pool()
|
||||
row = await pool.fetchrow(
|
||||
"SELECT * FROM governance_docs WHERE doc_id = $1", doc_id
|
||||
)
|
||||
if not row:
|
||||
raise HTTPException(404, f"Document '{doc_id}' not found")
|
||||
|
||||
deps = await pool.fetch(
|
||||
"SELECT to_doc FROM governance_deps WHERE from_doc = $1", doc_id
|
||||
)
|
||||
return DocResponse(
|
||||
doc_id=row["doc_id"],
|
||||
doc_kind=row["doc_kind"],
|
||||
title=row["title"],
|
||||
status=row["status"],
|
||||
depends_on=[d["to_doc"] for d in deps],
|
||||
body=row["body"],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/dag")
|
||||
async def get_dag():
|
||||
"""Get the full governance DAG."""
|
||||
pool = get_pool()
|
||||
rows = await pool.fetch(
|
||||
"SELECT * FROM governance_docs WHERE doc_kind != 'unknown' ORDER BY created_at"
|
||||
)
|
||||
|
||||
docs = []
|
||||
for row in rows:
|
||||
deps = await pool.fetch(
|
||||
"SELECT to_doc FROM governance_deps WHERE from_doc = $1", row["doc_id"]
|
||||
)
|
||||
docs.append(GovernanceDoc(
|
||||
doc_id=row["doc_id"],
|
||||
doc_kind=row["doc_kind"],
|
||||
title=row["title"],
|
||||
status=row["status"],
|
||||
depends_on=[d["to_doc"] for d in deps],
|
||||
body=row["body"],
|
||||
))
|
||||
|
||||
dag = build_dag(docs)
|
||||
result = dag.to_dict()
|
||||
result["errors"] = dag.validate()
|
||||
return result
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
"""Health check endpoint."""
|
||||
|
||||
from fastapi import APIRouter
|
||||
from spore_node.db.connection import get_pool
|
||||
|
||||
router = APIRouter(tags=["health"])
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def health():
|
||||
"""Node health check with DB connectivity."""
|
||||
db_ok = False
|
||||
try:
|
||||
pool = get_pool()
|
||||
async with pool.acquire() as conn:
|
||||
await conn.fetchval("SELECT 1")
|
||||
db_ok = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return {
|
||||
"status": "ok" if db_ok else "degraded",
|
||||
"db": "connected" if db_ok else "disconnected",
|
||||
}
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
"""Holon management endpoints."""
|
||||
|
||||
import uuid
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from spore_node.db.connection import get_pool
|
||||
from spore_node.rid_types import SporeHolon
|
||||
|
||||
router = APIRouter(prefix="/holons", tags=["holons"])
|
||||
|
||||
|
||||
class HolonCreate(BaseModel):
|
||||
slug: str
|
||||
name: str
|
||||
holon_type: str = "agent"
|
||||
description: str = ""
|
||||
membrane_config: dict = {}
|
||||
metadata: dict = {}
|
||||
|
||||
|
||||
class HolonResponse(BaseModel):
|
||||
id: str
|
||||
rid: str
|
||||
slug: str
|
||||
name: str
|
||||
holon_type: str
|
||||
description: str
|
||||
membrane_config: dict
|
||||
metadata: dict
|
||||
|
||||
|
||||
@router.post("", response_model=HolonResponse, status_code=201)
|
||||
async def create_holon(data: HolonCreate):
|
||||
"""Register a new holon in the commons."""
|
||||
pool = get_pool()
|
||||
rid = str(SporeHolon(data.slug))
|
||||
|
||||
try:
|
||||
row = await pool.fetchrow(
|
||||
"""
|
||||
INSERT INTO holons (rid, slug, name, holon_type, description, membrane_config, metadata)
|
||||
VALUES ($1, $2, $3, $4, $5, $6::jsonb, $7::jsonb)
|
||||
RETURNING id, rid, slug, name, holon_type, description, membrane_config, metadata
|
||||
""",
|
||||
rid, data.slug, data.name, data.holon_type, data.description,
|
||||
_json(data.membrane_config), _json(data.metadata),
|
||||
)
|
||||
except Exception as e:
|
||||
if "unique" in str(e).lower():
|
||||
raise HTTPException(409, f"Holon '{data.slug}' already exists")
|
||||
raise
|
||||
|
||||
# Log event
|
||||
await pool.execute(
|
||||
"""
|
||||
INSERT INTO events (entity_rid, event_kind, payload)
|
||||
VALUES ($1, 'holon.created', $2::jsonb)
|
||||
""",
|
||||
rid, _json({"slug": data.slug, "name": data.name}),
|
||||
)
|
||||
|
||||
return _row_to_dict(row)
|
||||
|
||||
|
||||
@router.get("", response_model=list[HolonResponse])
|
||||
async def list_holons(holon_type: str | None = None):
|
||||
"""List all holons, optionally filtered by type."""
|
||||
pool = get_pool()
|
||||
if holon_type:
|
||||
rows = await pool.fetch(
|
||||
"SELECT * FROM holons WHERE holon_type = $1 ORDER BY created_at",
|
||||
holon_type,
|
||||
)
|
||||
else:
|
||||
rows = await pool.fetch("SELECT * FROM holons ORDER BY created_at")
|
||||
return [_row_to_dict(r) for r in rows]
|
||||
|
||||
|
||||
@router.get("/{slug}", response_model=HolonResponse)
|
||||
async def get_holon(slug: str):
|
||||
"""Get a single holon by slug."""
|
||||
pool = get_pool()
|
||||
row = await pool.fetchrow("SELECT * FROM holons WHERE slug = $1", slug)
|
||||
if not row:
|
||||
raise HTTPException(404, f"Holon '{slug}' not found")
|
||||
return _row_to_dict(row)
|
||||
|
||||
|
||||
def _row_to_dict(row) -> dict:
|
||||
import json
|
||||
d = dict(row)
|
||||
d["id"] = str(d["id"])
|
||||
for k in ("membrane_config", "metadata"):
|
||||
if isinstance(d[k], str):
|
||||
d[k] = json.loads(d[k])
|
||||
return d
|
||||
|
||||
|
||||
def _json(obj: dict) -> str:
|
||||
import json
|
||||
return json.dumps(obj)
|
||||
|
|
@ -0,0 +1,79 @@
|
|||
"""Configuration for Spore Agent Commons node."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic_settings import BaseSettings
|
||||
from koi_net.config.full_node import FullNodeConfig
|
||||
from koi_net.config.koi_net_config import KoiNetConfig, NodeContact
|
||||
from koi_net.config.server_config import ServerConfig
|
||||
from koi_net.protocol.node import NodeProfile, NodeType, NodeProvides
|
||||
from rid_lib.core import RID
|
||||
|
||||
from .rid_types import SPORE_RID_TYPES, SporeHolon
|
||||
|
||||
|
||||
class SporeConfig(BaseSettings):
|
||||
"""App-level config loaded from env vars."""
|
||||
|
||||
node_name: str = "spore-commons"
|
||||
node_port: int = 8351
|
||||
root_dir: Path = Path("/data")
|
||||
|
||||
# Database
|
||||
postgres_host: str = "spore-db"
|
||||
postgres_port: int = 5432
|
||||
postgres_user: str = "spore"
|
||||
postgres_password: str = ""
|
||||
postgres_db: str = "spore_commons"
|
||||
|
||||
# Redis
|
||||
redis_url: str = "redis://spore-redis:6379"
|
||||
|
||||
# Federation
|
||||
bkc_node_url: str = ""
|
||||
|
||||
# AI
|
||||
embedding_api_url: str = "http://litellm:4000"
|
||||
|
||||
@property
|
||||
def database_url(self) -> str:
|
||||
return (
|
||||
f"postgresql://{self.postgres_user}:{self.postgres_password}"
|
||||
f"@{self.postgres_host}:{self.postgres_port}/{self.postgres_db}"
|
||||
)
|
||||
|
||||
model_config = {"env_prefix": "", "case_sensitive": False}
|
||||
|
||||
|
||||
def build_node_config(cfg: SporeConfig) -> FullNodeConfig:
|
||||
"""Build koi-net FullNodeConfig from our SporeConfig."""
|
||||
koi_net = KoiNetConfig(
|
||||
node_name=cfg.node_name,
|
||||
node_profile=NodeProfile(
|
||||
node_type=NodeType.FULL,
|
||||
provides=NodeProvides(
|
||||
event=list(SPORE_RID_TYPES),
|
||||
state=list(SPORE_RID_TYPES),
|
||||
),
|
||||
),
|
||||
rid_types_of_interest=list(SPORE_RID_TYPES),
|
||||
cache_directory_path=cfg.root_dir / ".rid_cache",
|
||||
private_key_pem_path=cfg.root_dir / "priv_key.pem",
|
||||
)
|
||||
|
||||
# Add BKC as first contact if configured
|
||||
if cfg.bkc_node_url:
|
||||
koi_net.first_contact = NodeContact(
|
||||
rid=None,
|
||||
url=cfg.bkc_node_url,
|
||||
)
|
||||
|
||||
return FullNodeConfig(
|
||||
koi_net=koi_net,
|
||||
server=ServerConfig(
|
||||
host="0.0.0.0",
|
||||
port=cfg.node_port,
|
||||
path="/koi-net",
|
||||
),
|
||||
)
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
"""asyncpg connection pool for Spore Commons."""
|
||||
|
||||
import asyncpg
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_pool: asyncpg.Pool | None = None
|
||||
|
||||
|
||||
async def init_pool(database_url: str) -> asyncpg.Pool:
|
||||
"""Initialize the connection pool and run migrations."""
|
||||
global _pool
|
||||
_pool = await asyncpg.create_pool(
|
||||
database_url,
|
||||
min_size=2,
|
||||
max_size=10,
|
||||
command_timeout=30,
|
||||
)
|
||||
await _run_migrations(_pool)
|
||||
log.info("Database pool initialized")
|
||||
return _pool
|
||||
|
||||
|
||||
async def _run_migrations(pool: asyncpg.Pool) -> None:
|
||||
"""Run SQL migration files in order."""
|
||||
migrations_dir = Path(__file__).parent / "migrations"
|
||||
if not migrations_dir.exists():
|
||||
return
|
||||
|
||||
async with pool.acquire() as conn:
|
||||
# Create migrations tracking table
|
||||
await conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS _migrations (
|
||||
filename TEXT PRIMARY KEY,
|
||||
applied_at TIMESTAMPTZ DEFAULT now()
|
||||
)
|
||||
""")
|
||||
|
||||
applied = {
|
||||
row["filename"]
|
||||
for row in await conn.fetch("SELECT filename FROM _migrations")
|
||||
}
|
||||
|
||||
sql_files = sorted(migrations_dir.glob("*.sql"))
|
||||
for sql_file in sql_files:
|
||||
if sql_file.name in applied:
|
||||
continue
|
||||
log.info(f"Applying migration: {sql_file.name}")
|
||||
sql = sql_file.read_text()
|
||||
async with conn.transaction():
|
||||
await conn.execute(sql)
|
||||
await conn.execute(
|
||||
"INSERT INTO _migrations (filename) VALUES ($1)",
|
||||
sql_file.name,
|
||||
)
|
||||
|
||||
|
||||
def get_pool() -> asyncpg.Pool:
|
||||
"""Get the current connection pool."""
|
||||
if _pool is None:
|
||||
raise RuntimeError("Database pool not initialized")
|
||||
return _pool
|
||||
|
||||
|
||||
async def close_pool() -> None:
|
||||
"""Close the connection pool."""
|
||||
global _pool
|
||||
if _pool:
|
||||
await _pool.close()
|
||||
_pool = None
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
-- Spore Agent Commons — Initial Schema
|
||||
-- Requires: pgvector extension
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS vector;
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Universal entity table
|
||||
CREATE TABLE IF NOT EXISTS entities (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
rid TEXT UNIQUE NOT NULL,
|
||||
kind TEXT NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'active',
|
||||
content JSONB NOT NULL DEFAULT '{}',
|
||||
embedding vector(1024),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_entities_rid ON entities (rid);
|
||||
CREATE INDEX IF NOT EXISTS idx_entities_kind ON entities (kind);
|
||||
CREATE INDEX IF NOT EXISTS idx_entities_content ON entities USING GIN (content);
|
||||
|
||||
-- Governance specification documents (DAG)
|
||||
CREATE TABLE IF NOT EXISTS governance_docs (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
doc_id TEXT UNIQUE NOT NULL,
|
||||
doc_kind TEXT NOT NULL,
|
||||
title TEXT NOT NULL DEFAULT '',
|
||||
status TEXT NOT NULL DEFAULT 'draft',
|
||||
body TEXT NOT NULL DEFAULT '',
|
||||
frontmatter JSONB NOT NULL DEFAULT '{}',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_governance_docs_kind ON governance_docs (doc_kind);
|
||||
CREATE INDEX IF NOT EXISTS idx_governance_docs_status ON governance_docs (status);
|
||||
|
||||
-- Governance DAG edges (depends_on relationships)
|
||||
CREATE TABLE IF NOT EXISTS governance_deps (
|
||||
from_doc TEXT NOT NULL REFERENCES governance_docs(doc_id) ON DELETE CASCADE,
|
||||
to_doc TEXT NOT NULL REFERENCES governance_docs(doc_id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (from_doc, to_doc)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_governance_deps_to ON governance_deps (to_doc);
|
||||
|
||||
-- Holons (agents, teams, organizations)
|
||||
CREATE TABLE IF NOT EXISTS holons (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
rid TEXT UNIQUE NOT NULL,
|
||||
slug TEXT UNIQUE NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
holon_type TEXT NOT NULL DEFAULT 'agent',
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
membrane_config JSONB NOT NULL DEFAULT '{}',
|
||||
metadata JSONB NOT NULL DEFAULT '{}',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_holons_slug ON holons (slug);
|
||||
CREATE INDEX IF NOT EXISTS idx_holons_type ON holons (holon_type);
|
||||
|
||||
-- Immutable append-only event log
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
sequence_num BIGSERIAL UNIQUE,
|
||||
entity_rid TEXT NOT NULL,
|
||||
event_kind TEXT NOT NULL,
|
||||
actor_rid TEXT,
|
||||
payload JSONB NOT NULL DEFAULT '{}',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_events_entity ON events (entity_rid);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_sequence ON events (sequence_num);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_kind ON events (event_kind);
|
||||
|
||||
-- Federation peer nodes
|
||||
CREATE TABLE IF NOT EXISTS koi_peers (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
node_rid TEXT UNIQUE NOT NULL,
|
||||
node_url TEXT NOT NULL,
|
||||
trust_tier TEXT NOT NULL DEFAULT 'monitored',
|
||||
handshake_status TEXT NOT NULL DEFAULT 'pending',
|
||||
config JSONB NOT NULL DEFAULT '{}',
|
||||
last_seen_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_koi_peers_trust ON koi_peers (trust_tier);
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
"""Governance DAG construction and querying."""
|
||||
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from .parser import GovernanceDoc
|
||||
from .validator import detect_cycles
|
||||
|
||||
|
||||
@dataclass
|
||||
class DAGNode:
|
||||
doc: GovernanceDoc
|
||||
children: list[str] = field(default_factory=list)
|
||||
parents: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GovernanceDAG:
|
||||
nodes: dict[str, DAGNode] = field(default_factory=dict)
|
||||
|
||||
def add_doc(self, doc: GovernanceDoc) -> None:
|
||||
"""Add a document to the DAG."""
|
||||
if doc.doc_id not in self.nodes:
|
||||
self.nodes[doc.doc_id] = DAGNode(doc=doc)
|
||||
else:
|
||||
self.nodes[doc.doc_id].doc = doc
|
||||
|
||||
# Wire edges
|
||||
for dep_id in doc.depends_on:
|
||||
if dep_id not in self.nodes:
|
||||
# Placeholder — will be resolved when dep is added
|
||||
self.nodes[dep_id] = DAGNode(
|
||||
doc=GovernanceDoc(doc_id=dep_id, doc_kind="unknown")
|
||||
)
|
||||
self.nodes[dep_id].children.append(doc.doc_id)
|
||||
self.nodes[doc.doc_id].parents.append(dep_id)
|
||||
|
||||
def validate(self) -> list[str]:
|
||||
"""Validate the DAG. Returns list of errors (empty = valid)."""
|
||||
errors = []
|
||||
|
||||
# Check for cycles
|
||||
adj = {
|
||||
node_id: node.parents for node_id, node in self.nodes.items()
|
||||
}
|
||||
cycles = detect_cycles(adj)
|
||||
for cycle in cycles:
|
||||
errors.append(f"Cycle detected: {' → '.join(cycle)}")
|
||||
|
||||
# Check for unresolved placeholders
|
||||
for node_id, node in self.nodes.items():
|
||||
if node.doc.doc_kind == "unknown":
|
||||
errors.append(f"Unresolved dependency: {node_id}")
|
||||
|
||||
return errors
|
||||
|
||||
def roots(self) -> list[str]:
|
||||
"""Return doc_ids with no dependencies (DAG roots)."""
|
||||
return [
|
||||
nid for nid, node in self.nodes.items()
|
||||
if not node.parents
|
||||
]
|
||||
|
||||
def topological_order(self) -> list[str]:
|
||||
"""Return doc_ids in topological order (dependencies first)."""
|
||||
visited: set[str] = set()
|
||||
order: list[str] = []
|
||||
|
||||
def visit(node_id: str) -> None:
|
||||
if node_id in visited:
|
||||
return
|
||||
visited.add(node_id)
|
||||
for parent in self.nodes[node_id].parents:
|
||||
visit(parent)
|
||||
order.append(node_id)
|
||||
|
||||
for nid in self.nodes:
|
||||
visit(nid)
|
||||
|
||||
return order
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Serialize DAG to a JSON-friendly dict."""
|
||||
return {
|
||||
"nodes": [
|
||||
{
|
||||
"doc_id": nid,
|
||||
"doc_kind": node.doc.doc_kind,
|
||||
"title": node.doc.title,
|
||||
"status": node.doc.status,
|
||||
"parents": node.parents,
|
||||
"children": node.children,
|
||||
}
|
||||
for nid, node in self.nodes.items()
|
||||
],
|
||||
"roots": self.roots(),
|
||||
"topological_order": self.topological_order(),
|
||||
}
|
||||
|
||||
|
||||
def build_dag(docs: list[GovernanceDoc]) -> GovernanceDAG:
|
||||
"""Build a GovernanceDAG from a list of parsed documents."""
|
||||
dag = GovernanceDAG()
|
||||
for doc in docs:
|
||||
dag.add_doc(doc)
|
||||
return dag
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
"""Parse governance documents with YAML frontmatter.
|
||||
|
||||
Expected format:
|
||||
---
|
||||
doc_id: spore.governance.consent
|
||||
doc_kind: protocol
|
||||
status: active
|
||||
depends_on:
|
||||
- spore.governance.membrane
|
||||
- spore.governance.holons
|
||||
---
|
||||
|
||||
# Document Title
|
||||
|
||||
Body content here...
|
||||
"""
|
||||
|
||||
import frontmatter
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class GovernanceDoc(BaseModel):
|
||||
doc_id: str
|
||||
doc_kind: str
|
||||
title: str = ""
|
||||
status: str = "draft"
|
||||
depends_on: list[str] = []
|
||||
body: str = ""
|
||||
frontmatter: dict = {}
|
||||
|
||||
|
||||
def parse_governance_doc(content: str) -> GovernanceDoc:
|
||||
"""Parse a markdown document with YAML frontmatter into a GovernanceDoc."""
|
||||
post = frontmatter.loads(content)
|
||||
meta = dict(post.metadata)
|
||||
|
||||
doc_id = meta.pop("doc_id", "")
|
||||
if not doc_id:
|
||||
raise ValueError("Missing required frontmatter field: doc_id")
|
||||
|
||||
doc_kind = meta.pop("doc_kind", "document")
|
||||
status = meta.pop("status", "draft")
|
||||
depends_on = meta.pop("depends_on", [])
|
||||
title = meta.pop("title", "")
|
||||
|
||||
# Extract title from first heading if not in frontmatter
|
||||
if not title:
|
||||
for line in post.content.split("\n"):
|
||||
stripped = line.strip()
|
||||
if stripped.startswith("# "):
|
||||
title = stripped[2:].strip()
|
||||
break
|
||||
|
||||
return GovernanceDoc(
|
||||
doc_id=doc_id,
|
||||
doc_kind=doc_kind,
|
||||
title=title,
|
||||
status=status,
|
||||
depends_on=depends_on if isinstance(depends_on, list) else [depends_on],
|
||||
body=post.content,
|
||||
frontmatter=meta,
|
||||
)
|
||||
|
||||
|
||||
def parse_many(contents: list[str]) -> list[GovernanceDoc]:
|
||||
"""Parse multiple governance documents."""
|
||||
return [parse_governance_doc(c) for c in contents]
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
"""Cycle detection for governance DAG using DFS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
WHITE, GRAY, BLACK = 0, 1, 2
|
||||
|
||||
|
||||
def detect_cycles(adj: dict[str, list[str]]) -> list[list[str]]:
|
||||
"""Detect all cycles in a directed graph.
|
||||
|
||||
Args:
|
||||
adj: Adjacency list mapping node → list of neighbors (dependencies).
|
||||
|
||||
Returns:
|
||||
List of cycles, each cycle is a list of node IDs forming the loop.
|
||||
"""
|
||||
color: dict[str, int] = {node: WHITE for node in adj}
|
||||
parent: dict[str, str | None] = {node: None for node in adj}
|
||||
cycles: list[list[str]] = []
|
||||
|
||||
def dfs(u: str) -> None:
|
||||
color[u] = GRAY
|
||||
for v in adj.get(u, []):
|
||||
if v not in color:
|
||||
# Node referenced but not in graph — skip
|
||||
continue
|
||||
if color[v] == GRAY:
|
||||
# Back edge → cycle found
|
||||
cycle = [v, u]
|
||||
node = u
|
||||
while node != v:
|
||||
node = parent[node]
|
||||
if node is None:
|
||||
break
|
||||
cycle.append(node)
|
||||
cycle.reverse()
|
||||
cycles.append(cycle)
|
||||
elif color[v] == WHITE:
|
||||
parent[v] = u
|
||||
dfs(v)
|
||||
color[u] = BLACK
|
||||
|
||||
for node in adj:
|
||||
if color[node] == WHITE:
|
||||
dfs(node)
|
||||
|
||||
return cycles
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
"""SporeNode — extends koi-net FullNode with Spore primitives."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from koi_net.core import FullNode
|
||||
from koi_net.components.interfaces import KnowledgeHandler, HandlerType
|
||||
from koi_net.protocol.event import EventType
|
||||
from koi_net.protocol.knowledge_object import KnowledgeObject
|
||||
from rid_lib.ext import Bundle
|
||||
|
||||
from .config import SporeConfig, build_node_config
|
||||
from .rid_types import SporeHolon, SPORE_RID_TYPES
|
||||
|
||||
|
||||
@dataclass
|
||||
class SporeDerefHandler:
|
||||
"""Dereference Spore RID types from PostgreSQL."""
|
||||
|
||||
handler_type = HandlerType.Bundle
|
||||
rid_types = SPORE_RID_TYPES
|
||||
event_types = ()
|
||||
|
||||
def handle(self, kobj: KnowledgeObject) -> KnowledgeObject | None:
|
||||
# Phase 1: pass-through. Phase 2 will add DB lookups.
|
||||
return kobj
|
||||
|
||||
|
||||
class SporeNode(FullNode):
|
||||
"""Spore Agent Commons koi-net node."""
|
||||
spore_deref: SporeDerefHandler = SporeDerefHandler
|
||||
|
||||
|
||||
def create_node(cfg: SporeConfig) -> object:
|
||||
"""Create and configure the SporeNode.
|
||||
|
||||
Returns a NodeContainer (koi-net assembler pattern).
|
||||
"""
|
||||
node_config = build_node_config(cfg)
|
||||
|
||||
# Write config to root_dir for koi-net to pick up
|
||||
config_dir = cfg.root_dir
|
||||
config_dir.mkdir(parents=True, exist_ok=True)
|
||||
config_path = config_dir / "config.yaml"
|
||||
|
||||
import yaml
|
||||
with open(config_path, "w") as f:
|
||||
yaml.dump(node_config.model_dump(mode="json"), f)
|
||||
|
||||
node = SporeNode(root_dir=config_dir)
|
||||
return node
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
"""Custom RID types for Spore Agent Commons primitives.
|
||||
|
||||
Each type is an ORN with namespace "spore.<type>".
|
||||
Reference format: <slug> (URL-safe identifier).
|
||||
"""
|
||||
|
||||
from rid_lib.core import ORN
|
||||
|
||||
|
||||
class SporeHolon(ORN):
|
||||
"""Agent, team, or organization in the commons."""
|
||||
namespace = "spore.holon"
|
||||
|
||||
def __init__(self, slug: str):
|
||||
self.slug = slug
|
||||
|
||||
@property
|
||||
def reference(self) -> str:
|
||||
return self.slug
|
||||
|
||||
@classmethod
|
||||
def from_reference(cls, reference: str) -> "SporeHolon":
|
||||
return cls(slug=reference)
|
||||
|
||||
|
||||
class SporeClaim(ORN):
|
||||
"""Knowledge claim in the commons."""
|
||||
namespace = "spore.claim"
|
||||
|
||||
def __init__(self, claim_id: str):
|
||||
self.claim_id = claim_id
|
||||
|
||||
@property
|
||||
def reference(self) -> str:
|
||||
return self.claim_id
|
||||
|
||||
@classmethod
|
||||
def from_reference(cls, reference: str) -> "SporeClaim":
|
||||
return cls(claim_id=reference)
|
||||
|
||||
|
||||
class SporeEvidence(ORN):
|
||||
"""Evidence supporting or challenging a claim."""
|
||||
namespace = "spore.evidence"
|
||||
|
||||
def __init__(self, evidence_id: str):
|
||||
self.evidence_id = evidence_id
|
||||
|
||||
@property
|
||||
def reference(self) -> str:
|
||||
return self.evidence_id
|
||||
|
||||
@classmethod
|
||||
def from_reference(cls, reference: str) -> "SporeEvidence":
|
||||
return cls(evidence_id=reference)
|
||||
|
||||
|
||||
class SporeAttestation(ORN):
|
||||
"""Attestation (endorse/dispute/abstain) on a claim."""
|
||||
namespace = "spore.attestation"
|
||||
|
||||
def __init__(self, attestation_id: str):
|
||||
self.attestation_id = attestation_id
|
||||
|
||||
@property
|
||||
def reference(self) -> str:
|
||||
return self.attestation_id
|
||||
|
||||
@classmethod
|
||||
def from_reference(cls, reference: str) -> "SporeAttestation":
|
||||
return cls(attestation_id=reference)
|
||||
|
||||
|
||||
class SporeIntent(ORN):
|
||||
"""Intent (need/offer/possibility) published by a holon."""
|
||||
namespace = "spore.intent"
|
||||
|
||||
def __init__(self, intent_id: str):
|
||||
self.intent_id = intent_id
|
||||
|
||||
@property
|
||||
def reference(self) -> str:
|
||||
return self.intent_id
|
||||
|
||||
@classmethod
|
||||
def from_reference(cls, reference: str) -> "SporeIntent":
|
||||
return cls(intent_id=reference)
|
||||
|
||||
|
||||
class SporeCommitment(ORN):
|
||||
"""Commitment between holons with lifecycle state machine."""
|
||||
namespace = "spore.commitment"
|
||||
|
||||
def __init__(self, commitment_id: str):
|
||||
self.commitment_id = commitment_id
|
||||
|
||||
@property
|
||||
def reference(self) -> str:
|
||||
return self.commitment_id
|
||||
|
||||
@classmethod
|
||||
def from_reference(cls, reference: str) -> "SporeCommitment":
|
||||
return cls(commitment_id=reference)
|
||||
|
||||
|
||||
# All Spore RID types for convenience
|
||||
SPORE_RID_TYPES = (
|
||||
SporeHolon,
|
||||
SporeClaim,
|
||||
SporeEvidence,
|
||||
SporeAttestation,
|
||||
SporeIntent,
|
||||
SporeCommitment,
|
||||
)
|
||||
Loading…
Reference in New Issue