mirror of
https://github.com/obra/superpowers.git
synced 2026-05-08 18:19:04 +08:00
Compare commits
20 Commits
pi-extensi
...
f/evals-li
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bad4708a7b | ||
|
|
ec9b96a7bf | ||
|
|
2d4cdea2bb | ||
|
|
af465f9687 | ||
|
|
e4191c3609 | ||
|
|
d545612825 | ||
|
|
b43d14f87f | ||
|
|
11d5db1b22 | ||
|
|
051bff661b | ||
|
|
dc6255291b | ||
|
|
d337f4a18a | ||
|
|
6fe9cf7515 | ||
|
|
3177c87aa8 | ||
|
|
a94d2cc414 | ||
|
|
dcffaa087a | ||
|
|
b3817bba4f | ||
|
|
3c046f579e | ||
|
|
895bb732d5 | ||
|
|
cf5914a31f | ||
|
|
cf34cef01e |
@@ -1,121 +0,0 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { dirname, resolve } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import type { ExtensionAPI } from "@earendil-works/pi-coding-agent";
|
||||
|
||||
const EXTREMELY_IMPORTANT_MARKER = "<EXTREMELY_IMPORTANT>";
|
||||
const BOOTSTRAP_MARKER = "superpowers:using-superpowers bootstrap for pi";
|
||||
|
||||
const extensionDir = dirname(fileURLToPath(import.meta.url));
|
||||
const packageRoot = resolve(extensionDir, "../..");
|
||||
const skillsDir = resolve(packageRoot, "skills");
|
||||
const bootstrapSkillPath = resolve(skillsDir, "using-superpowers", "SKILL.md");
|
||||
|
||||
let cachedBootstrap: string | null | undefined;
|
||||
|
||||
export default function superpowersPiExtension(pi: ExtensionAPI) {
|
||||
let injectBootstrap = true;
|
||||
|
||||
pi.on("resources_discover", async () => ({
|
||||
skillPaths: [skillsDir],
|
||||
}));
|
||||
|
||||
pi.on("session_start", async () => {
|
||||
injectBootstrap = true;
|
||||
});
|
||||
|
||||
pi.on("session_compact", async () => {
|
||||
injectBootstrap = true;
|
||||
});
|
||||
|
||||
pi.on("agent_end", async () => {
|
||||
injectBootstrap = false;
|
||||
});
|
||||
|
||||
pi.on("context", async (event) => {
|
||||
if (!injectBootstrap) return;
|
||||
if (event.messages.some(messageContainsBootstrap)) return;
|
||||
|
||||
const bootstrap = getBootstrapContent();
|
||||
if (!bootstrap) return;
|
||||
|
||||
const bootstrapMessage = {
|
||||
role: "user" as const,
|
||||
content: [{ type: "text" as const, text: bootstrap }],
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
const insertAt = firstNonCompactionSummaryIndex(event.messages);
|
||||
return {
|
||||
messages: [
|
||||
...event.messages.slice(0, insertAt),
|
||||
bootstrapMessage,
|
||||
...event.messages.slice(insertAt),
|
||||
],
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function getBootstrapContent(): string | null {
|
||||
if (cachedBootstrap !== undefined) return cachedBootstrap;
|
||||
|
||||
try {
|
||||
const skillContent = readFileSync(bootstrapSkillPath, "utf8");
|
||||
const body = stripFrontmatter(skillContent);
|
||||
cachedBootstrap = `${EXTREMELY_IMPORTANT_MARKER}
|
||||
${BOOTSTRAP_MARKER}
|
||||
|
||||
You have superpowers.
|
||||
|
||||
The using-superpowers skill content is included below and is already loaded for this Pi session. Follow it now. Do not try to load using-superpowers again.
|
||||
|
||||
${body}
|
||||
|
||||
${piToolMapping()}
|
||||
</EXTREMELY_IMPORTANT>`;
|
||||
return cachedBootstrap;
|
||||
} catch {
|
||||
cachedBootstrap = null;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function stripFrontmatter(content: string): string {
|
||||
const match = content.match(/^---\n[\s\S]*?\n---\n([\s\S]*)$/);
|
||||
return (match ? match[1] : content).trim();
|
||||
}
|
||||
|
||||
function piToolMapping(): string {
|
||||
return `## Pi tool mapping
|
||||
|
||||
Pi has native skills but does not expose Claude Code's \`Skill\` tool. When a Superpowers instruction says to use the \`Skill\` tool, use Pi's native skill system instead: load the relevant \`SKILL.md\` with \`read\` when the skill applies, or let a human invoke \`/skill:name\` explicitly.
|
||||
|
||||
Pi's built-in coding tools are lowercase: \`read\`, \`write\`, \`edit\`, \`bash\`, plus optional \`grep\`, \`find\`, and \`ls\`. Map Claude-style tool names \`Read\`, \`Write\`, \`Edit\`, and \`Bash\` to those Pi tools.
|
||||
|
||||
Pi does not ship a standard \`Task\` subagent tool. If a subagent tool such as \`subagent\` from \`pi-subagents\` is available, use it for Superpowers subagent workflows. If no subagent tool is available, do the work in this session or explain the missing capability instead of inventing tool calls.
|
||||
|
||||
Pi does not ship a standard \`TodoWrite\` task-list tool. If an installed todo/task tool is available, use it. Otherwise track work in plan files or a repo-local \`TODO.md\` when task tracking is needed.`;
|
||||
}
|
||||
|
||||
function messageContainsBootstrap(message: unknown): boolean {
|
||||
const content = (message as { content?: unknown }).content;
|
||||
if (typeof content === "string") return content.includes(BOOTSTRAP_MARKER);
|
||||
if (!Array.isArray(content)) return false;
|
||||
return content.some((part) => {
|
||||
return (
|
||||
part &&
|
||||
typeof part === "object" &&
|
||||
(part as { type?: unknown }).type === "text" &&
|
||||
typeof (part as { text?: unknown }).text === "string" &&
|
||||
(part as { text: string }).text.includes(BOOTSTRAP_MARKER)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function firstNonCompactionSummaryIndex(messages: unknown[]): number {
|
||||
let index = 0;
|
||||
while ((messages[index] as { role?: unknown } | undefined)?.role === "compactionSummary") {
|
||||
index += 1;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
18
README.md
18
README.md
@@ -4,7 +4,7 @@ Superpowers is a complete software development methodology for your coding agent
|
||||
|
||||
## Quickstart
|
||||
|
||||
Give your agent Superpowers: [Claude Code](#claude-code), [Codex CLI](#codex-cli), [Codex App](#codex-app), [Factory Droid](#factory-droid), [Gemini CLI](#gemini-cli), [Pi](#pi), [OpenCode](#opencode), [Cursor](#cursor), [GitHub Copilot CLI](#github-copilot-cli).
|
||||
Give your agent Superpowers: [Claude Code](#claude-code), [Codex CLI](#codex-cli), [Codex App](#codex-app), [Factory Droid](#factory-droid), [Gemini CLI](#gemini-cli), [OpenCode](#opencode), [Cursor](#cursor), [GitHub Copilot CLI](#github-copilot-cli).
|
||||
|
||||
## How it works
|
||||
|
||||
@@ -114,22 +114,6 @@ Superpowers is available via the [official Codex plugin marketplace](https://git
|
||||
gemini extensions update superpowers
|
||||
```
|
||||
|
||||
### Pi
|
||||
|
||||
Install Superpowers as a Pi package from this repository:
|
||||
|
||||
```bash
|
||||
pi install git:github.com/obra/superpowers
|
||||
```
|
||||
|
||||
For local development, run Pi with this checkout loaded as a temporary package:
|
||||
|
||||
```bash
|
||||
pi -e /path/to/superpowers
|
||||
```
|
||||
|
||||
The Pi package loads the Superpowers skills and a small extension that injects the `using-superpowers` bootstrap at session startup and again after compaction. Pi has native skills, so no compatibility `Skill` tool is required. Subagent and task-list tools remain optional Pi companion packages.
|
||||
|
||||
### OpenCode
|
||||
|
||||
OpenCode uses its own plugin install; install Superpowers separately even if you
|
||||
|
||||
@@ -1,143 +0,0 @@
|
||||
# Pi Extension and Evals Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Add first-class Pi package support for Superpowers and add Pi as a Drill eval backend.
|
||||
|
||||
**Architecture:** The Pi package is declared in the root `package.json` and loads existing `skills/` plus a small Pi extension. The extension injects the `using-superpowers` bootstrap into provider context as a user-role message on session startup and after compaction, with Pi-specific tool mapping. Drill gains a `pi` backend, Pi session-log normalization, and tests.
|
||||
|
||||
**Tech Stack:** Pi TypeScript extension API, Node built-in test runner, Drill Python eval harness, pytest.
|
||||
|
||||
---
|
||||
|
||||
### Task 1: Pi package manifest and extension tests
|
||||
|
||||
**Files:**
|
||||
- Modify: `package.json`
|
||||
- Create: `tests/pi/test-pi-extension.mjs`
|
||||
|
||||
- [ ] **Step 1: Write failing package/extension tests**
|
||||
|
||||
Create `tests/pi/test-pi-extension.mjs` with tests that import `extensions/superpowers.ts`, register fake Pi handlers, and assert:
|
||||
- root `package.json` has `keywords` containing `pi-package`
|
||||
- root `package.json` has `pi.skills: ["./skills"]`
|
||||
- root `package.json` has `pi.extensions: ["./extensions/superpowers.ts"]`
|
||||
- the extension registers `resources_discover`, `session_start`, `session_compact`, `context`, and `agent_end`
|
||||
- startup `context` injects exactly one user-role bootstrap message
|
||||
- `agent_end` clears startup injection
|
||||
- `session_compact` re-enables injection
|
||||
- the extension does not register `session_before_compact`
|
||||
|
||||
- [ ] **Step 2: Run tests and verify RED**
|
||||
|
||||
Run: `node --experimental-strip-types --test tests/pi/test-pi-extension.mjs`
|
||||
|
||||
Expected: FAIL because `extensions/superpowers.ts` does not exist and `package.json` lacks the `pi` manifest.
|
||||
|
||||
- [ ] **Step 3: Implement manifest fields**
|
||||
|
||||
Update `package.json` with `description`, `keywords`, `pi.extensions`, and `pi.skills` while preserving existing `name`, `version`, `type`, and `main`.
|
||||
|
||||
- [ ] **Step 4: Implement `extensions/superpowers.ts`**
|
||||
|
||||
Create a zero-runtime-dependency extension that:
|
||||
- locates the package root from `import.meta.url`
|
||||
- reads `skills/using-superpowers/SKILL.md`
|
||||
- strips YAML frontmatter
|
||||
- appends Pi-specific tool mapping
|
||||
- exposes `resources_discover` with the skills path
|
||||
- marks bootstrap pending on `session_start` and `session_compact`
|
||||
- injects a user-role bootstrap message in `context`
|
||||
- inserts post-compact bootstrap after leading `compactionSummary` messages
|
||||
- clears pending bootstrap on `agent_end`
|
||||
|
||||
- [ ] **Step 5: Run tests and verify GREEN**
|
||||
|
||||
Run: `node --experimental-strip-types --test tests/pi/test-pi-extension.mjs`
|
||||
|
||||
Expected: PASS.
|
||||
|
||||
### Task 2: Pi tool mapping reference
|
||||
|
||||
**Files:**
|
||||
- Create: `skills/using-superpowers/references/pi-tools.md`
|
||||
- Modify: `tests/pi/test-pi-extension.mjs`
|
||||
|
||||
- [ ] **Step 1: Write failing test for Pi reference doc**
|
||||
|
||||
Add assertions that `skills/using-superpowers/references/pi-tools.md` exists and documents mappings for `Skill`, `Task`, `TodoWrite`, and built-in tool names.
|
||||
|
||||
- [ ] **Step 2: Run tests and verify RED**
|
||||
|
||||
Run: `node --experimental-strip-types --test tests/pi/test-pi-extension.mjs`
|
||||
|
||||
Expected: FAIL because `pi-tools.md` does not exist.
|
||||
|
||||
- [ ] **Step 3: Add Pi reference doc**
|
||||
|
||||
Create `skills/using-superpowers/references/pi-tools.md` explaining Pi-native skills, optional `pi-subagents`, no canonical todo/tasklist plugin, and built-in lowercase tools.
|
||||
|
||||
- [ ] **Step 4: Run tests and verify GREEN**
|
||||
|
||||
Run: `node --experimental-strip-types --test tests/pi/test-pi-extension.mjs`
|
||||
|
||||
Expected: PASS.
|
||||
|
||||
### Task 3: Drill Pi backend and session log normalization
|
||||
|
||||
**Files:**
|
||||
- Create: `evals/backends/pi.yaml`
|
||||
- Modify: `evals/drill/backend.py`
|
||||
- Modify: `evals/drill/engine.py`
|
||||
- Modify: `evals/drill/normalizer.py`
|
||||
- Modify: `evals/tests/test_backend.py`
|
||||
- Modify: `evals/tests/test_normalizer.py`
|
||||
|
||||
- [ ] **Step 1: Write failing backend/normalizer tests**
|
||||
|
||||
Add pytest coverage for:
|
||||
- `load_backend("pi")` returns `family == "pi"`
|
||||
- Pi backend command starts with `pi` and includes `-e ${SUPERPOWERS_ROOT}`
|
||||
- `_resolve_log_dir()` for Pi points under `~/.pi/agent/sessions`
|
||||
- `filter_pi_logs_by_cwd()` keeps only session files whose header `cwd` matches the scenario workdir
|
||||
- `normalize_pi_logs()` extracts `toolCall` blocks from Pi assistant session entries and maps built-in lowercase tools to canonical names
|
||||
|
||||
- [ ] **Step 2: Run tests and verify RED**
|
||||
|
||||
Run: `uv run pytest evals/tests/test_backend.py evals/tests/test_normalizer.py -q`
|
||||
|
||||
Expected: FAIL because the Pi backend and normalizer do not exist.
|
||||
|
||||
- [ ] **Step 3: Add `evals/backends/pi.yaml`**
|
||||
|
||||
Configure the backend to run `pi -e ${SUPERPOWERS_ROOT}`, use permissive TUI readiness, `/quit` shutdown, and Pi session log location.
|
||||
|
||||
- [ ] **Step 4: Implement Pi family support**
|
||||
|
||||
Update `Backend.family`, `Engine._resolve_log_dir`, `Engine._collect_tool_calls`, and `normalizer.py` with Pi log filtering and normalizing.
|
||||
|
||||
- [ ] **Step 5: Run tests and verify GREEN**
|
||||
|
||||
Run: `uv run pytest evals/tests/test_backend.py evals/tests/test_normalizer.py -q`
|
||||
|
||||
Expected: PASS.
|
||||
|
||||
### Task 4: Documentation and full verification
|
||||
|
||||
**Files:**
|
||||
- Modify: `README.md`
|
||||
- Modify: `evals/README.md`
|
||||
|
||||
- [ ] **Step 1: Document Pi install and eval backend**
|
||||
|
||||
Add Pi to README quickstart/install list and add backend entry/usage to `evals/README.md`.
|
||||
|
||||
- [ ] **Step 2: Run verification**
|
||||
|
||||
Run:
|
||||
```bash
|
||||
node --experimental-strip-types --test tests/pi/test-pi-extension.mjs
|
||||
uv run pytest evals/tests/test_backend.py evals/tests/test_setup.py evals/tests/test_normalizer.py -q
|
||||
```
|
||||
|
||||
Expected: all tests pass.
|
||||
@@ -43,9 +43,6 @@ uv run drill run spec-writing-blind-spot -b claude-opus-4-6 --n 5
|
||||
# Sweep across multiple backends
|
||||
uv run drill run spec-writing-blind-spot --models claude-opus-4-6,claude-opus-4-7 --n 10
|
||||
|
||||
# Run against Pi, loading the local Superpowers package via -e ${SUPERPOWERS_ROOT}
|
||||
uv run drill run triggering-writing-plans -b pi
|
||||
|
||||
# Compare results
|
||||
uv run drill compare spec-writing-blind-spot
|
||||
|
||||
@@ -75,7 +72,6 @@ uv run drill list
|
||||
| `codex` | Codex CLI | — |
|
||||
| `gemini` | Gemini CLI | auto-gemini-3 |
|
||||
| `gemini-2-5-flash` | Gemini CLI | gemini-2.5-flash |
|
||||
| `pi` | Pi coding agent | configured Pi default |
|
||||
|
||||
## Project structure
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
name: pi
|
||||
cli: pi
|
||||
args:
|
||||
- "-e"
|
||||
- "${SUPERPOWERS_ROOT}"
|
||||
required_env:
|
||||
- SUPERPOWERS_ROOT
|
||||
hooks:
|
||||
pre_run: []
|
||||
post_run: []
|
||||
shutdown: "/quit"
|
||||
idle:
|
||||
quiescence_seconds: 5
|
||||
ready_pattern: "."
|
||||
busy_pattern: "esc to cancel|Thinking\\.\\.\\.|\\(esc to cancel[^)]*\\)|[⠇⠏⠋⠙⠹⠸⠼⠴⠦⠧⠶⠾⠽⠻⠿]"
|
||||
max_busy_seconds: 1800
|
||||
startup_timeout: 60
|
||||
turn_timeout: 300
|
||||
terminal:
|
||||
cols: 200
|
||||
rows: 50
|
||||
session_logs:
|
||||
pattern: "~/.pi/agent/sessions/**/*.jsonl"
|
||||
@@ -71,7 +71,7 @@ class Backend:
|
||||
@property
|
||||
def family(self) -> str:
|
||||
"""Normalize backend name to a family for log-dir / normalizer dispatch."""
|
||||
for fam in ("claude", "codex", "gemini", "pi"):
|
||||
for fam in ("claude", "codex", "gemini"):
|
||||
if self.name == fam or self.name.startswith(f"{fam}-"):
|
||||
return fam
|
||||
return "other"
|
||||
|
||||
@@ -21,7 +21,6 @@ from drill.normalizer import (
|
||||
NORMALIZERS,
|
||||
collect_new_logs,
|
||||
filter_codex_logs_by_cwd,
|
||||
filter_pi_logs_by_cwd,
|
||||
snapshot_log_dir,
|
||||
)
|
||||
from drill.session import TmuxSession
|
||||
@@ -349,11 +348,6 @@ class Engine:
|
||||
# Project name is the workdir basename, lowercased
|
||||
project = workdir.resolve().name.lower()
|
||||
return Path.home() / ".gemini" / "tmp" / project
|
||||
elif self.backend.family == "pi":
|
||||
# Pi stores sessions under ~/.pi/agent/sessions/<encoded-cwd>/.
|
||||
# Return the root and filter by the session header cwd because
|
||||
# multiple evals may run concurrently under the same tree.
|
||||
return Path.home() / ".pi" / "agent" / "sessions"
|
||||
pattern = self.backend.session_logs.get("pattern", "")
|
||||
if not pattern:
|
||||
return None
|
||||
@@ -369,8 +363,6 @@ class Engine:
|
||||
new_files = collect_new_logs(log_dir, snapshot)
|
||||
if self.backend.family == "codex":
|
||||
new_files = filter_codex_logs_by_cwd(new_files, str(workdir.resolve()))
|
||||
elif self.backend.family == "pi":
|
||||
new_files = filter_pi_logs_by_cwd(new_files, str(workdir.resolve()))
|
||||
normalizer = NORMALIZERS.get(self.backend.family)
|
||||
if not normalizer:
|
||||
return []
|
||||
|
||||
@@ -74,23 +74,6 @@ def filter_codex_logs_by_cwd(paths: list[Path], target_cwd: str) -> list[Path]:
|
||||
return matched
|
||||
|
||||
|
||||
def filter_pi_logs_by_cwd(paths: list[Path], target_cwd: str) -> list[Path]:
|
||||
"""Drop Pi sessions whose header cwd doesn't match target_cwd."""
|
||||
matched: list[Path] = []
|
||||
for path in paths:
|
||||
try:
|
||||
with path.open() as f:
|
||||
first_line = f.readline()
|
||||
entry = json.loads(first_line)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
continue
|
||||
if entry.get("type") != "session":
|
||||
continue
|
||||
if entry.get("cwd") == target_cwd:
|
||||
matched.append(path)
|
||||
return matched
|
||||
|
||||
|
||||
def normalize_claude_logs(raw_content: str) -> list[dict[str, Any]]:
|
||||
"""Normalize Claude Code session logs.
|
||||
|
||||
@@ -172,52 +155,6 @@ def normalize_codex_logs(raw_content: str) -> list[dict[str, Any]]:
|
||||
return results
|
||||
|
||||
|
||||
# Reverse mapping: Pi tool names → Claude Code canonical names
|
||||
PI_TOOL_MAP: dict[str, str] = {
|
||||
"read": "Read",
|
||||
"write": "Write",
|
||||
"edit": "Edit",
|
||||
"bash": "Bash",
|
||||
"grep": "Grep",
|
||||
"find": "Glob",
|
||||
"ls": "Glob",
|
||||
}
|
||||
|
||||
|
||||
PI_NATIVE_TOOLS = (set(PI_TOOL_MAP.values()) - {"Bash"}) | {"subagent", "todo", "manage_todo_list"}
|
||||
|
||||
|
||||
def normalize_pi_logs(raw_content: str) -> list[dict[str, Any]]:
|
||||
"""Normalize Pi JSONL session logs.
|
||||
|
||||
Pi session files are JSONL entries. Assistant messages contain tool calls as
|
||||
content blocks: {"type": "toolCall", "name": "read", "arguments": {...}}.
|
||||
"""
|
||||
results: list[dict[str, Any]] = []
|
||||
for line in raw_content.strip().split("\n"):
|
||||
if not line.strip():
|
||||
continue
|
||||
try:
|
||||
entry = json.loads(line)
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
if entry.get("type") != "message":
|
||||
continue
|
||||
message = entry.get("message", {})
|
||||
if message.get("role") != "assistant":
|
||||
continue
|
||||
for block in message.get("content", []):
|
||||
if block.get("type") != "toolCall":
|
||||
continue
|
||||
name = block.get("name", "")
|
||||
canonical = PI_TOOL_MAP.get(name, name)
|
||||
source = "native" if canonical in PI_NATIVE_TOOLS else "shell"
|
||||
results.append(
|
||||
{"tool": canonical, "args": block.get("arguments", {}), "source": source}
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
# Reverse mapping: Gemini tool names → Claude Code canonical names
|
||||
GEMINI_TOOL_MAP: dict[str, str] = {
|
||||
"run_shell_command": "Bash",
|
||||
@@ -288,5 +225,4 @@ NORMALIZERS: dict[str, Callable[[str], list[dict[str, Any]]]] = {
|
||||
"claude": normalize_claude_logs,
|
||||
"codex": normalize_codex_logs,
|
||||
"gemini": normalize_gemini_logs,
|
||||
"pi": normalize_pi_logs,
|
||||
}
|
||||
|
||||
@@ -44,12 +44,6 @@ class TestLoadBackend:
|
||||
assert flash_backend.family == "gemini"
|
||||
assert flash_backend.model == "gemini-2.5-flash"
|
||||
|
||||
def test_loads_pi_backend(self, backends_dir):
|
||||
backend = load_backend("pi", backends_dir)
|
||||
assert backend.name == "pi"
|
||||
assert backend.cli == "pi"
|
||||
assert backend.family == "pi"
|
||||
|
||||
|
||||
class TestBackendBuildCommand:
|
||||
def test_claude_build_command(self, backends_dir, monkeypatch):
|
||||
@@ -66,12 +60,6 @@ class TestBackendBuildCommand:
|
||||
cmd = backend.build_command("/tmp/workdir")
|
||||
assert cmd[0] == "codex"
|
||||
|
||||
def test_pi_build_command_loads_local_superpowers_package(self, backends_dir, monkeypatch):
|
||||
monkeypatch.setenv("SUPERPOWERS_ROOT", "/tmp/superpowers")
|
||||
backend = load_backend("pi", backends_dir)
|
||||
cmd = backend.build_command("/tmp/workdir")
|
||||
assert cmd == ["pi", "-e", "/tmp/superpowers"]
|
||||
|
||||
|
||||
class TestBackendEnvValidation:
|
||||
def test_missing_env_raises(self, backends_dir, monkeypatch):
|
||||
@@ -137,21 +125,6 @@ class TestBackendFamily:
|
||||
backend = load_backend("codex", backends_dir)
|
||||
assert backend.family == "codex"
|
||||
|
||||
def test_pi_backend_family(self):
|
||||
backend = Backend(
|
||||
name="pi",
|
||||
cli="pi",
|
||||
args=[],
|
||||
required_env=[],
|
||||
hooks={"pre_run": [], "post_run": []},
|
||||
shutdown="/quit",
|
||||
idle={},
|
||||
startup_timeout=30,
|
||||
terminal={},
|
||||
session_logs={},
|
||||
)
|
||||
assert backend.family == "pi"
|
||||
|
||||
def test_variant_name_preserves_family(self):
|
||||
backend = Backend(
|
||||
name="claude-opus-4-6",
|
||||
|
||||
@@ -4,7 +4,7 @@ import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from drill.engine import Engine, RunResult, ScenarioConfig, VerifyConfig, snapshot_filesystem
|
||||
from drill.engine import RunResult, ScenarioConfig, VerifyConfig, snapshot_filesystem
|
||||
|
||||
|
||||
class TestVerifyConfig:
|
||||
@@ -138,40 +138,6 @@ class TestEngineAssertionIntegration:
|
||||
assert (tmp_path / "meta.json").exists()
|
||||
|
||||
|
||||
class TestEnginePiBackend:
|
||||
def test_resolves_pi_session_log_root(self, tmp_path: Path) -> None:
|
||||
scenario = tmp_path / "scenario.yaml"
|
||||
scenario.write_text("scenario: test-pi\n")
|
||||
backends = tmp_path / "backends"
|
||||
backends.mkdir()
|
||||
(backends / "pi.yaml").write_text(
|
||||
"""
|
||||
name: pi
|
||||
cli: pi
|
||||
args: []
|
||||
required_env: []
|
||||
hooks:
|
||||
pre_run: []
|
||||
post_run: []
|
||||
shutdown: /quit
|
||||
idle: {}
|
||||
startup_timeout: 1
|
||||
terminal: {}
|
||||
session_logs:
|
||||
pattern: ~/.pi/agent/sessions/**/*.jsonl
|
||||
"""
|
||||
)
|
||||
engine = Engine(
|
||||
scenario_path=scenario,
|
||||
backend_name="pi",
|
||||
backends_dir=backends,
|
||||
fixtures_dir=tmp_path,
|
||||
results_dir=tmp_path,
|
||||
)
|
||||
|
||||
assert engine._resolve_log_dir(tmp_path) == Path.home() / ".pi" / "agent" / "sessions"
|
||||
|
||||
|
||||
class TestEngineRunParams:
|
||||
def test_run_result_uses_custom_output_dir(self, tmp_path: Path) -> None:
|
||||
custom_dir = tmp_path / "custom" / "run-00"
|
||||
|
||||
@@ -3,11 +3,9 @@ import json
|
||||
from drill.normalizer import (
|
||||
collect_new_logs,
|
||||
filter_codex_logs_by_cwd,
|
||||
filter_pi_logs_by_cwd,
|
||||
normalize_claude_logs,
|
||||
normalize_codex_logs,
|
||||
normalize_gemini_logs,
|
||||
normalize_pi_logs,
|
||||
snapshot_log_dir,
|
||||
)
|
||||
|
||||
@@ -139,56 +137,6 @@ class TestNormalizeCodexLogs:
|
||||
assert normalized[1]["source"] == "native"
|
||||
|
||||
|
||||
class TestNormalizePiLogs:
|
||||
def test_filter_by_cwd_keeps_matching_session_headers(self, tmp_path):
|
||||
target = "/tmp/drill-target"
|
||||
match = tmp_path / "match.jsonl"
|
||||
match.write_text(json.dumps({"type": "session", "cwd": target}) + "\n")
|
||||
other = tmp_path / "other.jsonl"
|
||||
other.write_text(json.dumps({"type": "session", "cwd": "/tmp/other"}) + "\n")
|
||||
malformed = tmp_path / "malformed.jsonl"
|
||||
malformed.write_text("not json\n")
|
||||
|
||||
assert filter_pi_logs_by_cwd([match, other, malformed], target) == [match]
|
||||
|
||||
def test_normalizes_assistant_tool_calls_from_session_entries(self):
|
||||
lines = [
|
||||
json.dumps({"type": "session", "cwd": "/tmp/project"}),
|
||||
json.dumps(
|
||||
{
|
||||
"type": "message",
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": [
|
||||
{"type": "text", "text": "I will inspect this."},
|
||||
{
|
||||
"type": "toolCall",
|
||||
"name": "read",
|
||||
"arguments": {"path": "README.md"},
|
||||
},
|
||||
{
|
||||
"type": "toolCall",
|
||||
"name": "bash",
|
||||
"arguments": {"command": "git status"},
|
||||
},
|
||||
{
|
||||
"type": "toolCall",
|
||||
"name": "subagent",
|
||||
"arguments": {"agent": "reviewer"},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
),
|
||||
]
|
||||
|
||||
assert normalize_pi_logs("\n".join(lines)) == [
|
||||
{"tool": "Read", "args": {"path": "README.md"}, "source": "native"},
|
||||
{"tool": "Bash", "args": {"command": "git status"}, "source": "shell"},
|
||||
{"tool": "subagent", "args": {"agent": "reviewer"}, "source": "native"},
|
||||
]
|
||||
|
||||
|
||||
class TestNormalizeGeminiLogs:
|
||||
def test_normalizes_jsonl_tool_calls(self):
|
||||
lines = [
|
||||
|
||||
19
package.json
19
package.json
@@ -1,23 +1,6 @@
|
||||
{
|
||||
"name": "superpowers",
|
||||
"version": "5.1.0",
|
||||
"description": "Superpowers skills and runtime bootstrap for coding agents",
|
||||
"type": "module",
|
||||
"main": ".opencode/plugins/superpowers.js",
|
||||
"keywords": [
|
||||
"pi-package",
|
||||
"skills",
|
||||
"tdd",
|
||||
"debugging",
|
||||
"collaboration",
|
||||
"workflow"
|
||||
],
|
||||
"pi": {
|
||||
"extensions": [
|
||||
"./.pi/extensions/superpowers.ts"
|
||||
],
|
||||
"skills": [
|
||||
"./skills"
|
||||
]
|
||||
}
|
||||
"main": ".opencode/plugins/superpowers.js"
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Pi Tool Mapping
|
||||
|
||||
Pi supports Superpowers skills natively through skill discovery and `/skill:name` commands. It does not expose Claude Code's `Skill` tool.
|
||||
|
||||
When a Superpowers skill mentions Claude Code tool names, use these Pi equivalents:
|
||||
|
||||
| Superpowers / Claude Code name | Pi equivalent |
|
||||
| --- | --- |
|
||||
| `Skill` | Pi native skills: load the relevant `SKILL.md` with `read`, or let the human use `/skill:name` |
|
||||
| `Read` | `read` |
|
||||
| `Write` | `write` |
|
||||
| `Edit` | `edit` |
|
||||
| `Bash` | `bash` |
|
||||
| `Grep` | `grep` when active; otherwise `bash` with `rg`/`grep` |
|
||||
| `Glob` | `find` or `bash` with shell globs |
|
||||
| `LS` / `List` | `ls` when active; otherwise `bash` with `ls` |
|
||||
| `Task` | Use an installed subagent tool such as `subagent` from `pi-subagents` if available |
|
||||
| `TodoWrite` | Use an installed todo/task tool if available, otherwise track tasks in the plan or `TODO.md` |
|
||||
|
||||
## Skills
|
||||
|
||||
Pi discovers skills from configured skill directories and installed Pi packages. A Superpowers Pi package should expose `skills/` through its `pi.skills` manifest entry. The agent should still follow the Superpowers rule: when a skill applies, load and follow it before responding.
|
||||
|
||||
## Subagents
|
||||
|
||||
Pi core does not ship a standard subagent tool. The `pi-subagents` package is a strong optional companion and provides a `subagent` tool with single-agent, chain, parallel, async, forked-context, and resume/status workflows. If no subagent tool is available, do not fabricate `Task` calls; execute sequentially in the current session or explain that the optional subagent capability is not installed.
|
||||
|
||||
## Task lists
|
||||
|
||||
Pi core does not ship a standard task-list tool. If a todo/task extension is installed, use its documented tool. Otherwise use Superpowers plan files, checklists in Markdown, or a repo-local `TODO.md` for task tracking.
|
||||
@@ -1,128 +0,0 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
import test from 'node:test';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const repoRoot = resolve(__dirname, '../..');
|
||||
const packageJsonPath = resolve(repoRoot, 'package.json');
|
||||
const extensionPath = resolve(repoRoot, '.pi/extensions/superpowers.ts');
|
||||
const piToolsPath = resolve(repoRoot, 'skills/using-superpowers/references/pi-tools.md');
|
||||
|
||||
async function readPackageJson() {
|
||||
return JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
||||
}
|
||||
|
||||
async function loadExtension() {
|
||||
const handlers = new Map();
|
||||
const pi = {
|
||||
on(event, handler) {
|
||||
if (!handlers.has(event)) handlers.set(event, []);
|
||||
handlers.get(event).push(handler);
|
||||
},
|
||||
};
|
||||
const mod = await import(pathToFileURL(extensionPath).href + `?cachebust=${Date.now()}-${Math.random()}`);
|
||||
mod.default(pi);
|
||||
return { handlers };
|
||||
}
|
||||
|
||||
function firstHandler(handlers, event) {
|
||||
const eventHandlers = handlers.get(event) ?? [];
|
||||
assert.equal(eventHandlers.length, 1, `expected one ${event} handler`);
|
||||
return eventHandlers[0];
|
||||
}
|
||||
|
||||
function textOf(message) {
|
||||
if (typeof message.content === 'string') return message.content;
|
||||
return message.content
|
||||
.filter((part) => part.type === 'text')
|
||||
.map((part) => part.text)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
test('package.json declares a pi package with skills and extension resources', async () => {
|
||||
const pkg = await readPackageJson();
|
||||
|
||||
assert.equal(pkg.name, 'superpowers');
|
||||
assert.ok(pkg.keywords.includes('pi-package'));
|
||||
assert.deepEqual(pkg.pi.skills, ['./skills']);
|
||||
assert.deepEqual(pkg.pi.extensions, ['./.pi/extensions/superpowers.ts']);
|
||||
});
|
||||
|
||||
test('extension registers lifecycle hooks without pre-compaction injection', async () => {
|
||||
const { handlers } = await loadExtension();
|
||||
|
||||
for (const event of ['resources_discover', 'session_start', 'session_compact', 'context', 'agent_end']) {
|
||||
assert.equal((handlers.get(event) ?? []).length, 1, `missing ${event} handler`);
|
||||
}
|
||||
assert.equal((handlers.get('session_before_compact') ?? []).length, 0);
|
||||
});
|
||||
|
||||
test('resources_discover contributes the bundled skills directory', async () => {
|
||||
const { handlers } = await loadExtension();
|
||||
const discover = firstHandler(handlers, 'resources_discover');
|
||||
|
||||
const result = await discover({ type: 'resources_discover', cwd: repoRoot, reason: 'startup' }, {});
|
||||
|
||||
assert.deepEqual(result.skillPaths, [resolve(repoRoot, 'skills')]);
|
||||
});
|
||||
|
||||
test('startup context injects the bootstrap as one user message until agent_end', async () => {
|
||||
const { handlers } = await loadExtension();
|
||||
const sessionStart = firstHandler(handlers, 'session_start');
|
||||
const context = firstHandler(handlers, 'context');
|
||||
const agentEnd = firstHandler(handlers, 'agent_end');
|
||||
|
||||
await sessionStart({ type: 'session_start', reason: 'startup' }, {});
|
||||
|
||||
const originalMessages = [
|
||||
{ role: 'user', content: [{ type: 'text', text: 'Let us make a react todo list' }], timestamp: 1 },
|
||||
];
|
||||
const result = await context({ type: 'context', messages: originalMessages }, {});
|
||||
|
||||
assert.equal(result.messages.length, 2);
|
||||
assert.equal(result.messages[0].role, 'user');
|
||||
assert.match(textOf(result.messages[0]), /You have superpowers/);
|
||||
assert.match(textOf(result.messages[0]), /Pi tool mapping/);
|
||||
assert.equal(result.messages[1], originalMessages[0]);
|
||||
|
||||
const repeatedProviderRequest = await context({ type: 'context', messages: originalMessages }, {});
|
||||
assert.equal(repeatedProviderRequest.messages.length, 2);
|
||||
assert.match(textOf(repeatedProviderRequest.messages[0]), /You have superpowers/);
|
||||
|
||||
const alreadyInjected = await context({ type: 'context', messages: result.messages }, {});
|
||||
assert.equal(alreadyInjected, undefined, 'bootstrap should not duplicate when already present');
|
||||
|
||||
await agentEnd({ type: 'agent_end', messages: [] }, {});
|
||||
const afterEnd = await context({ type: 'context', messages: originalMessages }, {});
|
||||
assert.equal(afterEnd, undefined, 'startup bootstrap should clear after agent_end');
|
||||
});
|
||||
|
||||
test('session_compact injects bootstrap after compaction summaries, not before compaction', async () => {
|
||||
const { handlers } = await loadExtension();
|
||||
const sessionCompact = firstHandler(handlers, 'session_compact');
|
||||
const context = firstHandler(handlers, 'context');
|
||||
|
||||
await sessionCompact({ type: 'session_compact', compactionEntry: {}, fromExtension: false }, {});
|
||||
|
||||
const summary = { role: 'compactionSummary', summary: 'Prior work summary', tokensBefore: 123, timestamp: 1 };
|
||||
const user = { role: 'user', content: [{ type: 'text', text: 'Continue' }], timestamp: 2 };
|
||||
const result = await context({ type: 'context', messages: [summary, user] }, {});
|
||||
|
||||
assert.equal(result.messages.length, 3);
|
||||
assert.equal(result.messages[0], summary);
|
||||
assert.equal(result.messages[1].role, 'user');
|
||||
assert.match(textOf(result.messages[1]), /You have superpowers/);
|
||||
assert.equal(result.messages[2], user);
|
||||
});
|
||||
|
||||
test('pi tools reference documents pi-specific mappings', async () => {
|
||||
assert.equal(existsSync(piToolsPath), true, 'pi-tools.md should exist');
|
||||
const text = await readFile(piToolsPath, 'utf8');
|
||||
|
||||
for (const expected of ['Skill', 'Task', 'TodoWrite', 'read', 'write', 'edit', 'bash']) {
|
||||
assert.match(text, new RegExp(expected));
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user