mirror of
https://gitflic.ru/project/openide/openide.git
synced 2026-03-22 06:50:54 +07:00
IJPL-233350 remove task skill (outdated)
GitOrigin-RevId: e6b3651135d2a9ff7c63b9f214b1ccbfdbefff86
This commit is contained in:
committed by
intellij-monorepo-bot
parent
6d00dfbfe8
commit
8f89c2d040
@@ -6,7 +6,7 @@ This directory (`community/.ai`) contains the templates and documentation source
|
||||
node community/.ai/render-guides.mjs
|
||||
```
|
||||
|
||||
The renderer produces guide files (`AGENTS.md`, `CLAUDE.md`), skill stubs, Beads rules, and OpenCode config/skills.
|
||||
The renderer produces guide files (`AGENTS.md`, `CLAUDE.md`), skill stubs, and OpenCode config/skills.
|
||||
|
||||
## Quick run
|
||||
|
||||
@@ -26,7 +26,6 @@ AI_GUIDE_EDITION=ULTIMATE node community/.ai/render-guides.mjs
|
||||
- `AGENTS.md`
|
||||
- `community/AGENTS.md` (generated in ultimate workspace)
|
||||
- `CLAUDE.md` (ultimate only)
|
||||
- `.claude/rules/beads.md` (from `community/build/mcp-servers/task/beads-semantics.md`)
|
||||
- `opencode.json` (from `.mcp.json`)
|
||||
- `.opencode/skill/*` (mirrored from `.codex/skills/*`)
|
||||
- Skill stubs in `.agents/skills/*`, `.claude/skills/*`, `community/.claude/skills/*`
|
||||
@@ -50,7 +49,6 @@ AI_GUIDE_EDITION=ULTIMATE node community/.ai/render-guides.mjs
|
||||
+--> AGENTS.md
|
||||
+--> community/AGENTS.md (ultimate workspace)
|
||||
+--> CLAUDE.md (ultimate only)
|
||||
+--> .claude/rules/beads.md (separate source)
|
||||
+--> opencode.json
|
||||
+--> .opencode/skill/*
|
||||
+--> skill stubs (see next section)
|
||||
|
||||
@@ -6,9 +6,6 @@ Special handling applies to the directories below. If a file you touch lives und
|
||||
- **Product DSL** (`community/platform/build-scripts/product-dsl/`): read `./.claude/rules/product-dsl.md` before changing anything in this tree.
|
||||
<!-- /IF_TOOL:CODEX -->
|
||||
|
||||
- **Task MCP server** (`community/build/mcp-servers/task/`):
|
||||
- Tests: `community/build/mcp-servers/task/task-mcp.test.mjs`.
|
||||
- Bazel: do not run Bazel build and tests here.
|
||||
- **IJ Proxy MCP server** (`community/build/mcp-servers/ij-proxy/`):
|
||||
- Tests: run `bun run build` and `bun test`.
|
||||
- Bazel: do not run Bazel build and tests here.
|
||||
|
||||
@@ -13,8 +13,6 @@ const sharedPartialsDir = join(repoRoot, ".ai", "partials");
|
||||
|
||||
const generatedGuideHeader =
|
||||
"<!-- Generated by community/.ai/render-guides.mjs; edit community/.ai/* -->\n\n";
|
||||
const generatedBeadsHeader =
|
||||
"<!-- Generated by community/.ai/render-guides.mjs; edit community/build/mcp-servers/task/beads-semantics.md -->\n\n";
|
||||
|
||||
const validEditions = new Set(["ULTIMATE", "COMMUNITY"]);
|
||||
|
||||
@@ -73,15 +71,6 @@ const outputs = [
|
||||
edition: "ULTIMATE",
|
||||
onlyWhenEdition: "ULTIMATE",
|
||||
},
|
||||
{
|
||||
name: "Beads rules (CLAUDE)",
|
||||
tool: "CLAUDE",
|
||||
templatePath: "community/build/mcp-servers/task/beads-semantics.md",
|
||||
output: ".claude/rules/beads.md",
|
||||
forbiddenToolsSuffix: "",
|
||||
usesCompilationRule: false,
|
||||
generatedHeader: generatedBeadsHeader,
|
||||
},
|
||||
];
|
||||
|
||||
function normalize(text) {
|
||||
|
||||
@@ -23,9 +23,6 @@ Special handling applies to the directories below. If a file you touch lives und
|
||||
|
||||
- **Product DSL** (`community/platform/build-scripts/product-dsl/`): read `./.claude/rules/product-dsl.md` before changing anything in this tree.
|
||||
|
||||
- **Task MCP server** (`community/build/mcp-servers/task/`):
|
||||
- Tests: `community/build/mcp-servers/task/task-mcp.test.mjs`.
|
||||
- Bazel: do not run Bazel build and tests here.
|
||||
- **IJ Proxy MCP server** (`community/build/mcp-servers/ij-proxy/`):
|
||||
- Tests: run `bun run build` and `bun test`.
|
||||
- Bazel: do not run Bazel build and tests here.
|
||||
|
||||
@@ -772,168 +772,6 @@
|
||||
"required": ["operation"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_status",
|
||||
"description": "Get issue state or full details",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string", "description": "Issue ID for full details" },
|
||||
"memory_limit": { "type": "integer", "default": 0, "description": "Max entries per memory list in response (0 to omit memory)" },
|
||||
"meta_max_chars": { "type": "integer", "default": 400, "description": "Max chars for description/design/acceptance in meta view (default: 400)" },
|
||||
"user_request": { "type": "string", "description": "Not supported for task_status; use task_start" },
|
||||
"view": { "type": "string", "default": "summary", "description": "Issue view (default: summary)", "enum": ["summary", "meta"] }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_start",
|
||||
"description": "Start task workflow (status + optional epic creation)",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"acceptance": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
"design": { "type": "string" },
|
||||
"id": { "type": "string", "description": "Issue ID for full details" },
|
||||
"memory_limit": { "type": "integer", "default": 0, "description": "Max entries per memory list in response (0 to omit memory)" },
|
||||
"meta_max_chars": { "type": "integer", "default": 400, "description": "Max chars for description/design/acceptance in meta view (default: 400)" },
|
||||
"user_request": { "type": "string", "description": "User task description" },
|
||||
"view": { "type": "string", "default": "summary", "description": "Issue view (default: summary)", "enum": ["summary", "meta"] }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_progress",
|
||||
"description": "Update findings/decisions/status",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"decisions": { "type": "array", "description": "Decisions made", "items": { "type": "string" } },
|
||||
"findings": { "type": "array", "description": "Discoveries", "items": { "type": "string" } },
|
||||
"id": { "type": "string", "description": "Issue ID" },
|
||||
"memory_limit": { "type": "integer", "default": 0, "description": "Max entries per memory list in response (0 to omit memory)" },
|
||||
"status": { "type": "string", "enum": ["in_progress", "blocked", "deferred"] }
|
||||
},
|
||||
"required": ["id"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_update_meta",
|
||||
"description": "Update description/design/acceptance",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"acceptance": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
"design": { "type": "string" },
|
||||
"id": { "type": "string", "description": "Issue ID" },
|
||||
"memory_limit": { "type": "integer", "default": 0, "description": "Max entries per memory list in response (0 to omit memory)" },
|
||||
"meta_max_chars": { "type": "integer", "default": 400, "description": "Max chars for description/design/acceptance in meta view (default: 400)" },
|
||||
"view": { "type": "string", "default": "summary", "description": "Issue view (default: summary)", "enum": ["summary", "meta"] }
|
||||
},
|
||||
"required": ["id"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_decompose",
|
||||
"description": "Create sub-issues under epic (auto-starts single child)",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"epic_id": { "type": "string", "description": "Parent epic ID" },
|
||||
"sub_issues": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"acceptance": { "type": "string" },
|
||||
"dep_type": { "type": "string" },
|
||||
"depends_on": { "type": "array", "items": { "anyOf": [{ "type": "integer" }, { "type": "string" }] } },
|
||||
"description": { "type": "string" },
|
||||
"design": { "type": "string" },
|
||||
"title": { "type": "string" },
|
||||
"type": { "type": "string" }
|
||||
},
|
||||
"required": ["title", "description", "acceptance", "design"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"update_epic_acceptance": { "type": "string" }
|
||||
},
|
||||
"required": ["epic_id", "sub_issues"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_create",
|
||||
"description": "Create issue",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"acceptance": { "type": "string" },
|
||||
"dep_type": { "type": "string" },
|
||||
"depends_on": { "anyOf": [{ "type": "string" }, { "type": "array", "items": { "type": "string" } }] },
|
||||
"description": { "type": "string" },
|
||||
"design": { "type": "string" },
|
||||
"parent": { "type": "string" },
|
||||
"priority": { "type": "string" },
|
||||
"title": { "type": "string" },
|
||||
"type": { "type": "string", "default": "task" }
|
||||
},
|
||||
"required": ["title", "description", "design", "acceptance"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_link",
|
||||
"description": "Add dependencies between existing issues",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"dep_type": { "type": "string" },
|
||||
"depends_on": { "anyOf": [{ "type": "string" }, { "type": "array", "items": { "type": "string" } }] },
|
||||
"id": { "type": "string" }
|
||||
},
|
||||
"required": ["id", "depends_on"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_done",
|
||||
"description": "Close issue",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"decisions": { "type": "array", "items": { "type": "string" } },
|
||||
"findings": { "type": "array", "items": { "type": "string" } },
|
||||
"id": { "type": "string" },
|
||||
"reason": { "type": "string" }
|
||||
},
|
||||
"required": ["id", "reason"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__task__task_reopen",
|
||||
"description": "Reopen closed issue",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string", "description": "Issue ID" },
|
||||
"memory_limit": { "type": "integer", "default": 0, "description": "Max entries per memory list in response (0 to omit memory)" },
|
||||
"meta_max_chars": { "type": "integer", "default": 400, "description": "Max chars for description/design/acceptance in meta view (default: 400)" },
|
||||
"reason": { "type": "string", "description": "Reason for reopening" },
|
||||
"view": { "type": "string", "default": "summary", "description": "Issue view (default: summary)", "enum": ["summary", "meta"] }
|
||||
},
|
||||
"required": ["id", "reason"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "mcp__ide__getDiagnostics",
|
||||
"description": "Gets diagnostic info.",
|
||||
@@ -1326,4 +1164,4 @@
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
import {spawn} from 'child_process'
|
||||
|
||||
// Runs bd command asynchronously using spawn + Promise
|
||||
export function bd(args) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let stdout = ''
|
||||
let stderr = ''
|
||||
|
||||
const proc = spawn('bd', args, {stdio: ['ignore', 'pipe', 'pipe']})
|
||||
|
||||
proc.stdout.on('data', (data) => { stdout += data.toString() })
|
||||
proc.stderr.on('data', (data) => { stderr += data.toString() })
|
||||
|
||||
proc.on('error', (err) => {
|
||||
reject(new Error(err.code === 'ENOENT' ? "bd CLI not found. Ensure 'bd' is installed and in PATH." : `bd error: ${err.message}`))
|
||||
})
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`bd command failed: ${stderr || `exit code ${code}`}`))
|
||||
} else {
|
||||
resolve(stdout.trim())
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function bdAddComment(id, text) {
|
||||
await bd(['comments', 'add', id, text])
|
||||
}
|
||||
|
||||
export async function bdJson(args) {
|
||||
const result = await bd([...args, '--json'])
|
||||
return JSON.parse(result)
|
||||
}
|
||||
|
||||
// bd show returns array, this extracts single issue (null if not found)
|
||||
export async function bdShowOne(id) {
|
||||
try {
|
||||
const result = await bdJson(['show', id])
|
||||
return result[0] || null
|
||||
} catch (e) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
@@ -1,114 +0,0 @@
|
||||
# Beads Task MCP (Agent Spec)
|
||||
|
||||
## Response envelope
|
||||
- Every response includes `kind` and `next`.
|
||||
- Response kinds: `issue`, `summary`, `empty`, `progress`, `created`, `updated`, `closed`, `error`.
|
||||
- Issue/progress responses may include top-level `memory`.
|
||||
- Summary responses use `issues` (array of summary issues).
|
||||
- Summary/issue objects include core fields only.
|
||||
|
||||
## Views and memory
|
||||
Inputs:
|
||||
- `view="summary" | "meta"` (default `summary`) and `meta_max_chars` (default 400; <=0 disables truncation).
|
||||
- Applies to: `task_status`, `task_start`, `task_reopen`.
|
||||
|
||||
Outputs:
|
||||
- Summary issue fields: `id`, `title`, `status`, `priority`, `type`, `assignee`, `parent`, `ready_children`, `children`, `is_new` (omit empty).
|
||||
- `children` is returned for epic issue views (`task_status(id)`) as a list of summary issues.
|
||||
- Meta fields (only in `view="meta"`): `description`, `design`, `acceptance`.
|
||||
- `meta_truncated` lists meta fields that were truncated.
|
||||
- Memory payload is returned when `memory_limit > 0`.
|
||||
- Shape: `{findings?, decisions?, truncated?, more?}` (omit empty fields).
|
||||
- `memory_limit` caps each list; `0` omits memory.
|
||||
- When applying `memory_limit`, return the most recent entries (latest), not the earliest.
|
||||
- If `truncated` is true, `more` may include counts of omitted items: `{findings, decisions}`.
|
||||
|
||||
## Field semantics (WHAT / HOW / WHY)
|
||||
- `description` = WHAT (scope + user-visible outcome).
|
||||
- `design` = HOW (approach, architecture, steps, constraints, risks).
|
||||
- `acceptance` = DONE (verifiable criteria/tests; no implementation steps).
|
||||
- `findings` = FACTS (observations, repros, measurements, logs).
|
||||
- `decisions` = WHY (choice + rationale + tradeoffs).
|
||||
|
||||
## Input validation
|
||||
- Tool inputs are strict; unknown fields are rejected.
|
||||
- Defaults: `view=summary`, `meta_max_chars=400`, `memory_limit=0`.
|
||||
- `task_create` and `task_decompose` require non-empty `description`, `design`, and `acceptance`.
|
||||
- `task_link` requires `id` and non-empty `depends_on` (string or string array).
|
||||
- `task_start` requires `id` or `user_request`.
|
||||
|
||||
## Errors (common)
|
||||
- `task_create`: missing meta -> `Missing required fields: ...`.
|
||||
- `task_decompose`: sub-issue missing meta -> `sub_issues[i] missing required fields: ...`.
|
||||
- `task_update_meta`: no fields -> `At least one of description, design, acceptance is required`.
|
||||
- `task_start`: missing id/user_request -> `task_start requires id or user_request`.
|
||||
|
||||
## Tool behaviors (canonical)
|
||||
- `task_status()` -> `kind: "summary" | "empty" | "error"`
|
||||
- When in_progress tasks exist, `summary.issues` lists them all.
|
||||
- `empty` means no in_progress tasks.
|
||||
- `task_status(id, memory_limit?, view?, meta_max_chars?)` -> `kind: "issue" | "error"` (optional `memory`)
|
||||
- `task_start(user_request, description?, design?, acceptance?, memory_limit?, view?, meta_max_chars?)` -> `kind: "issue" (is_new=true) | "error"`
|
||||
- If description/design/acceptance are provided, they are used.
|
||||
- Otherwise: description = "USER REQUEST: ...", design/acceptance = "PENDING".
|
||||
- Always creates a new epic when `user_request` is provided, even if in_progress tasks exist.
|
||||
- `task_start(id, memory_limit?, view?, meta_max_chars?)` -> `kind: "issue"` (is_new=false, status `in_progress`)
|
||||
- `task_progress(..., memory_limit?)` -> `kind: "progress" | "error"` (optional `memory`)
|
||||
- `task_update_meta(id, description?, design?, acceptance?, view?, meta_max_chars?, memory_limit?)` -> `kind: "issue" | "error"` (optional `memory`)
|
||||
- `task_decompose(epic_id, sub_issues)` -> `kind: "created" (ids, epic_id, started_child_id) | "error"`
|
||||
- Auto-starts when a single child is created.
|
||||
- `task_create(title, description, design, acceptance, type?, parent?, ...)` -> `kind: "created" (id) | "error"`
|
||||
- `type` can be any issue type, including `epic`.
|
||||
- `task_link(id, depends_on, dep_type?)` -> `kind: "updated" (id, added_depends_on, dep_type?) | "error"`
|
||||
- `task_done(id, reason)` -> `kind: "closed" (closed, next_ready, epic_status, parent_id)`
|
||||
- If the closed issue was the last open child of an epic, the parent epic is auto-closed (>=1 child, not pinned or hooked).
|
||||
- Auto-close reason: "Auto-closed: all child issues closed".
|
||||
- `task_reopen(id, reason, memory_limit?, view?, meta_max_chars?)` -> `kind: "issue" | "error"` (optional `memory`)
|
||||
|
||||
## Status and semantics
|
||||
- Statuses: `open`, `in_progress`, `blocked`, `deferred`, `closed`, `tombstone`, `pinned`, `hooked`.
|
||||
- Ready queue = open issues with no `blocks` deps; only `blocks` affects readiness.
|
||||
- Use `blocked` when waiting on a dependency; use `deferred` when intentionally paused.
|
||||
- `pinned` and `hooked` are protected statuses; do not auto-close.
|
||||
|
||||
## Structure
|
||||
- Priority set on create: `task_create(priority="P2")` (P0..P4 / 0..4).
|
||||
- Parent/child: `task_decompose(epic_id, ...)` or `task_create(parent=epic_id)`.
|
||||
- Dependencies: `task_decompose(depends_on=[...])` accepts indices (0..i-1) or issue IDs; `dep_type` on a sub-issue applies the dependency type to all of its `depends_on` entries.
|
||||
- `task_create(depends_on=..., dep_type=...)` accepts a string or string array and sets a type; add dependencies later with `task_link(id, depends_on, dep_type?)`.
|
||||
- Epics own child tasks via `parent-child` links.
|
||||
- Child IDs are dotted (example: `bd-xxxx.1`); up to 3 nesting levels.
|
||||
- Work happens on child issues; epic is for roll-up only.
|
||||
|
||||
## Usage hints
|
||||
- Start/claim: `task_start(id)` or `task_progress(id, status="in_progress")`.
|
||||
- Resume without an id: call `task_status()`; if `kind` is `empty`, ask whether to start a new epic; otherwise ask the user which issue to resume.
|
||||
- After multi-child decomposition, call `task_start(id)` on the chosen child to set `in_progress`.
|
||||
- Block/defer: `task_progress(id, status="blocked"|"deferred")`.
|
||||
- Close: `task_done(id, reason)`. Reopen: `task_reopen(id, reason)`.
|
||||
- `task_status(id=epic)` returns child statuses.
|
||||
- `task_done` returns `next_ready` and `epic_status`.
|
||||
- `task_start(user_request)` always creates a new epic; call `task_status()` first if you need to review in_progress work.
|
||||
- Use `task_update_meta` for description/design/acceptance changes; use `task_progress` for findings/decisions/status updates.
|
||||
|
||||
## Examples (one-liners)
|
||||
- `task_status()`
|
||||
- `task_status(id="bd-123", view="meta")`
|
||||
- `task_start(user_request="Add caching")`
|
||||
- `task_start(user_request="Add caching", description="Add cache layer", design="LRU in front of DB", acceptance="Cache hits/misses tracked")`
|
||||
- `task_start(id="bd-123")`
|
||||
- `task_progress(id="bd-123", decisions=["Use LRU cache"])`
|
||||
- `task_update_meta(id="bd-123", design="LRU in front of DB")`
|
||||
- `task_decompose(epic_id="bd-123", sub_issues=[{title:"Add cache", description:"Add cache layer", design:"LRU map", acceptance:"Cache hit/miss verified"}])`
|
||||
- `task_decompose(epic_id="bd-123", sub_issues=[{title:"Wire cache", description:"Hook cache", design:"Call cache API", acceptance:"Cache used", depends_on:["bd-122"], dep_type:"blocks"}])`
|
||||
- `task_create(title="Add cache", description="Add cache layer", design="LRU map", acceptance="Cache hit/miss verified")`
|
||||
- `task_create(title="Wire cache", description="Hook cache", design="Call cache API", acceptance="Cache used", depends_on=["bd-101","bd-102"])`
|
||||
- `task_link(id="bd-103", depends_on=["bd-101","bd-102"])`
|
||||
- `task_done(id="bd-123", reason="Completed")`
|
||||
- `task_reopen(id="bd-123", reason="Regression found")`
|
||||
|
||||
## Decomposition guidelines
|
||||
- Prefer 2-7 children per epic; avoid single-child decompositions unless justified.
|
||||
- Each child should be doable in ~0.5-2 hours; split if larger.
|
||||
- Do not split by phase (design/impl/test); split by deliverable behavior.
|
||||
- Use dependencies only when truly blocking; avoid chains that serialize work.
|
||||
@@ -1,224 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
import {bdAddComment} from './bd-client.mjs'
|
||||
|
||||
export const FINDING_PREFIX = 'FINDING:'
|
||||
export const DECISION_PREFIX = 'KEY DECISION:'
|
||||
export const DEFAULT_MEMORY_LIMIT = 0
|
||||
|
||||
function normalizeEntry(entry) {
|
||||
if (typeof entry !== 'string') return null
|
||||
const normalized = entry.trim()
|
||||
return normalized ? normalized : null
|
||||
}
|
||||
|
||||
function normalizePrefixedEntry(entry, prefix) {
|
||||
const normalized = normalizeEntry(entry)
|
||||
if (!normalized) return null
|
||||
if (prefix && normalized.startsWith(prefix)) {
|
||||
const stripped = normalized.slice(prefix.length).trim()
|
||||
return stripped || null
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
function normalizeEntries(entries, prefix) {
|
||||
if (!Array.isArray(entries)) return []
|
||||
const normalized = []
|
||||
for (const entry of entries) {
|
||||
const value = normalizePrefixedEntry(entry, prefix)
|
||||
if (value) normalized.push(value)
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
function mergeEntries(primary, secondary) {
|
||||
const result = []
|
||||
const seen = new Set()
|
||||
for (const entry of [...primary, ...secondary]) {
|
||||
const normalized = normalizeEntry(entry)
|
||||
if (!normalized || seen.has(normalized)) continue
|
||||
seen.add(normalized)
|
||||
result.push(normalized)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Parse notes into structured sections (legacy JSON + text) for backward compatibility.
|
||||
function parseNotesOnly(notes) {
|
||||
const sections = {findings: [], decisions: []}
|
||||
if (!notes) return sections
|
||||
|
||||
const trimmed = notes.trim()
|
||||
if (trimmed.startsWith('{')) {
|
||||
try {
|
||||
const parsed = JSON.parse(trimmed)
|
||||
sections.findings = normalizeEntries(parsed.findings, FINDING_PREFIX)
|
||||
sections.decisions = normalizeEntries(parsed.decisions, DECISION_PREFIX)
|
||||
if (parsed.pending_close) {
|
||||
sections.pending_close = parsed.pending_close
|
||||
}
|
||||
return sections
|
||||
} catch (e) {
|
||||
// Fall through to text parsing
|
||||
}
|
||||
}
|
||||
|
||||
for (const line of notes.split('\n')) {
|
||||
const trimmedLine = line.trim()
|
||||
if (trimmedLine.startsWith(FINDING_PREFIX)) {
|
||||
const value = normalizePrefixedEntry(trimmedLine, FINDING_PREFIX)
|
||||
if (value) sections.findings.push(value)
|
||||
} else if (trimmedLine.startsWith(DECISION_PREFIX)) {
|
||||
const value = normalizePrefixedEntry(trimmedLine, DECISION_PREFIX)
|
||||
if (value) sections.decisions.push(value)
|
||||
}
|
||||
}
|
||||
return sections
|
||||
}
|
||||
|
||||
function parseComments(comments) {
|
||||
const sections = {findings: [], decisions: []}
|
||||
if (!Array.isArray(comments)) return sections
|
||||
|
||||
for (const comment of comments) {
|
||||
const text = typeof comment?.text === 'string' ? comment.text.trim() : ''
|
||||
if (!text) continue
|
||||
if (text.startsWith(FINDING_PREFIX)) {
|
||||
const value = normalizePrefixedEntry(text, FINDING_PREFIX)
|
||||
if (value) sections.findings.push(value)
|
||||
} else if (text.startsWith(DECISION_PREFIX)) {
|
||||
const value = normalizePrefixedEntry(text, DECISION_PREFIX)
|
||||
if (value) sections.decisions.push(value)
|
||||
}
|
||||
}
|
||||
return sections
|
||||
}
|
||||
|
||||
// Build notes from comments + legacy notes for API compatibility.
|
||||
function parseNotes(notes, comments) {
|
||||
const notesSections = parseNotesOnly(notes)
|
||||
const commentSections = parseComments(comments)
|
||||
const findings = mergeEntries(commentSections.findings, notesSections.findings)
|
||||
const decisions = mergeEntries(commentSections.decisions, notesSections.decisions)
|
||||
const result = {findings, decisions}
|
||||
if (findings.length > 0 || decisions.length > 0) {
|
||||
return result
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function normalizeLimit(limit) {
|
||||
if (limit === undefined || limit === null) return DEFAULT_MEMORY_LIMIT
|
||||
const parsed = Number.parseInt(limit, 10)
|
||||
if (!Number.isFinite(parsed)) return DEFAULT_MEMORY_LIMIT
|
||||
return parsed <= 0 ? 0 : parsed
|
||||
}
|
||||
|
||||
function sliceToLimit(entries, limit) {
|
||||
if (!Array.isArray(entries) || limit <= 0) return []
|
||||
if (entries.length <= limit) return entries
|
||||
return entries.slice(-limit)
|
||||
}
|
||||
|
||||
function buildMemoryFromSections(sections, limit) {
|
||||
if (!sections) return null
|
||||
const cap = normalizeLimit(limit)
|
||||
if (cap <= 0) return null
|
||||
const totals = {
|
||||
findings: sections.findings.length,
|
||||
decisions: sections.decisions.length
|
||||
}
|
||||
if (totals.findings === 0 && totals.decisions === 0) {
|
||||
return null
|
||||
}
|
||||
const findings = sliceToLimit(sections.findings, cap)
|
||||
const decisions = sliceToLimit(sections.decisions, cap)
|
||||
const truncated = findings.length < totals.findings || decisions.length < totals.decisions
|
||||
|
||||
const memory = {
|
||||
findings,
|
||||
decisions,
|
||||
totals,
|
||||
truncated,
|
||||
limit: cap
|
||||
}
|
||||
return memory
|
||||
}
|
||||
|
||||
export function buildMemory(notes, comments, limit) {
|
||||
return buildMemoryFromSections(parseNotes(notes, comments), limit)
|
||||
}
|
||||
|
||||
export function buildMemoryFromEntries(findings, decisions, limit) {
|
||||
const sections = {
|
||||
findings: normalizeEntries(findings),
|
||||
decisions: normalizeEntries(decisions)
|
||||
}
|
||||
return buildMemoryFromSections(sections, limit)
|
||||
}
|
||||
|
||||
export function extractMemoryFromIssue(issue, limit) {
|
||||
const memory = buildMemory(issue.notes, issue.comments, limit)
|
||||
delete issue.notes
|
||||
delete issue.comments
|
||||
return memory
|
||||
}
|
||||
|
||||
function filterNewEntries(entries, existingSet) {
|
||||
const result = []
|
||||
if (!Array.isArray(entries)) return result
|
||||
|
||||
for (const entry of entries) {
|
||||
const normalized = normalizeEntry(entry)
|
||||
if (!normalized || existingSet.has(normalized)) continue
|
||||
existingSet.add(normalized)
|
||||
result.push(normalized)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export async function addSectionComments(issueId, findings, decisions) {
|
||||
for (const finding of findings) {
|
||||
await bdAddComment(issueId, `${FINDING_PREFIX} ${finding}`)
|
||||
}
|
||||
for (const decision of decisions) {
|
||||
await bdAddComment(issueId, `${DECISION_PREFIX} ${decision}`)
|
||||
}
|
||||
}
|
||||
|
||||
export function prepareSectionUpdates(issue, incomingFindings, incomingDecisions) {
|
||||
const notesSections = parseNotesOnly(issue.notes)
|
||||
const commentSections = parseComments(issue['comments'])
|
||||
|
||||
const existingFindings = mergeEntries(commentSections.findings, notesSections.findings)
|
||||
const existingDecisions = mergeEntries(commentSections.decisions, notesSections.decisions)
|
||||
|
||||
const normalizedFindings = normalizeEntries(incomingFindings, FINDING_PREFIX)
|
||||
const normalizedDecisions = normalizeEntries(incomingDecisions, DECISION_PREFIX)
|
||||
|
||||
const newFindings = filterNewEntries(normalizedFindings, new Set(existingFindings))
|
||||
const newDecisions = filterNewEntries(normalizedDecisions, new Set(existingDecisions))
|
||||
|
||||
const migrateFindings = filterNewEntries(notesSections.findings, new Set(commentSections.findings))
|
||||
const migrateDecisions = filterNewEntries(notesSections.decisions, new Set(commentSections.decisions))
|
||||
|
||||
const findingsToAdd = mergeEntries(migrateFindings, newFindings)
|
||||
const decisionsToAdd = mergeEntries(migrateDecisions, newDecisions)
|
||||
|
||||
const finalFindings = mergeEntries(existingFindings, newFindings)
|
||||
const finalDecisions = mergeEntries(existingDecisions, newDecisions)
|
||||
|
||||
const shouldStripNotes = notesSections.findings.length > 0
|
||||
|| notesSections.decisions.length > 0
|
||||
|| notesSections.pending_close
|
||||
|
||||
return {
|
||||
notesSections,
|
||||
finalFindings,
|
||||
finalDecisions,
|
||||
findingsToAdd,
|
||||
decisionsToAdd,
|
||||
shouldStripNotes
|
||||
}
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
#!/usr/bin/env
|
||||
node
|
||||
|
||||
/* global process */
|
||||
import {toolHandlers, tools} from './task-core.mjs'
|
||||
|
||||
function printUsage() {
|
||||
const toolList = tools.map(tool => tool.name).sort().join(', ')
|
||||
console.error('Usage:')
|
||||
console.error(' node community/build/mcp-servers/task/task-cli.mjs tools')
|
||||
console.error(' node community/build/mcp-servers/task/task-cli.mjs call <tool> <json>')
|
||||
console.error('')
|
||||
console.error('Examples:')
|
||||
console.error(' node community/build/mcp-servers/task/task-cli.mjs tools')
|
||||
console.error(' node community/build/mcp-servers/task/task-cli.mjs call task_status "{}"')
|
||||
console.error(' node community/build/mcp-servers/task/task-cli.mjs call task_status "{\"id\":\"idea-2-xyz\"}"')
|
||||
console.error('')
|
||||
console.error(`Available tools: ${toolList}`)
|
||||
}
|
||||
|
||||
function parseArgs(rawArgs) {
|
||||
if (!rawArgs || rawArgs.length === 0) {
|
||||
return {}
|
||||
}
|
||||
const joined = rawArgs.join(' ').trim()
|
||||
if (!joined) {
|
||||
return {}
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(joined)
|
||||
if (parsed === null || typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
throw new Error('arguments must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid JSON arguments: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const [command, toolName, ...rest] = process['argv'].slice(2)
|
||||
|
||||
if (!command) {
|
||||
printUsage()
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (command === 'tools') {
|
||||
console.log(JSON.stringify(tools, null, 2))
|
||||
return
|
||||
}
|
||||
|
||||
if (command === 'call') {
|
||||
if (!toolName) {
|
||||
console.error('Missing tool name.')
|
||||
printUsage()
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const handler = toolHandlers[toolName]
|
||||
if (!handler) {
|
||||
console.error(`Unknown tool: ${toolName}`)
|
||||
printUsage()
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const args = parseArgs(rest)
|
||||
const result = await handler(args, {})
|
||||
console.log(JSON.stringify(result, null, 2))
|
||||
return
|
||||
}
|
||||
|
||||
console.error(`Unknown command: ${command}`)
|
||||
printUsage()
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
main().catch(error => {
|
||||
console.error(error.message || String(error))
|
||||
process.exit(1)
|
||||
})
|
||||
@@ -1,417 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
// Manual Codex CLI smoke checks for task workflow.
|
||||
import assert from 'node:assert/strict'
|
||||
import {execSync, spawn} from 'node:child_process'
|
||||
import {cpSync, existsSync, mkdirSync, mkdtempSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync} from 'node:fs'
|
||||
import {homedir, tmpdir} from 'node:os'
|
||||
import {dirname, join, resolve} from 'node:path'
|
||||
import {fileURLToPath} from 'node:url'
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url))
|
||||
const repoRoot = resolve(__dirname, '..', '..', '..', '..')
|
||||
|
||||
const rawArgs = process.argv.slice(2)
|
||||
const args = new Set(rawArgs)
|
||||
const keep = args.has('--keep')
|
||||
const verbose = args.has('--verbose')
|
||||
|
||||
function getArgValue(flag) {
|
||||
const prefix = `${flag}=`
|
||||
const match = rawArgs.find(arg => arg.startsWith(prefix))
|
||||
if (match) return match.slice(prefix.length)
|
||||
const index = rawArgs.indexOf(flag)
|
||||
if (index >= 0 && rawArgs[index + 1]) return rawArgs[index + 1]
|
||||
return null
|
||||
}
|
||||
|
||||
const codexBin = process.env.CODEX_BIN || 'codex'
|
||||
const model = process.env.CODEX_MODEL || 'gpt-5.2'
|
||||
const reasoningEffort = process.env.CODEX_REASONING_EFFORT || 'high'
|
||||
const timeoutMs = Number.parseInt(process.env.CODEX_TIMEOUT || '600', 10) * 1000
|
||||
const idleTimeoutMs = Number.parseInt(getArgValue('--idle-timeout') || process.env.CODEX_IDLE_TIMEOUT || '60', 10) * 1000
|
||||
const cliCodexHome = getArgValue('--codex-home')
|
||||
const sourceCodexHome = cliCodexHome || process.env.CODEX_HOME_SOURCE || process.env.CODEX_HOME || join(homedir(), '.codex')
|
||||
|
||||
function run(cmd, cwd) {
|
||||
return execSync(cmd, {cwd, encoding: 'utf-8', stdio: 'pipe'})
|
||||
}
|
||||
|
||||
function copyCodexAuth(targetCodexHome) {
|
||||
if (!existsSync(sourceCodexHome)) return []
|
||||
const candidates = [
|
||||
'auth.json',
|
||||
'auth.yaml',
|
||||
'auth.yml',
|
||||
'credentials.json',
|
||||
'credentials.yaml',
|
||||
'credentials.yml'
|
||||
]
|
||||
const dirCandidates = ['auth', 'credentials']
|
||||
const copied = []
|
||||
for (const filename of candidates) {
|
||||
const sourcePath = join(sourceCodexHome, filename)
|
||||
if (!existsSync(sourcePath)) continue
|
||||
cpSync(sourcePath, join(targetCodexHome, filename))
|
||||
copied.push(filename)
|
||||
}
|
||||
for (const dirname of dirCandidates) {
|
||||
const sourcePath = join(sourceCodexHome, dirname)
|
||||
if (!existsSync(sourcePath)) continue
|
||||
try {
|
||||
if (statSync(sourcePath).isDirectory()) {
|
||||
cpSync(sourcePath, join(targetCodexHome, dirname), {recursive: true})
|
||||
copied.push(`${dirname}/`)
|
||||
}
|
||||
} catch {
|
||||
// Ignore non-directory entries.
|
||||
}
|
||||
}
|
||||
return copied
|
||||
}
|
||||
|
||||
function writeCodexConfig(targetCodexHome) {
|
||||
const configPath = join(targetCodexHome, 'config.toml')
|
||||
const mcpPath = resolve(repoRoot, 'community/build/mcp-servers/task/task-mcp.mjs')
|
||||
const tomlPath = mcpPath.replace(/\\/g, '\\\\')
|
||||
const lines = [
|
||||
`model = "${model}"`,
|
||||
'approval_policy = "never"',
|
||||
'cli_auth_credentials_store = "file"',
|
||||
'',
|
||||
'[mcp_servers.task]',
|
||||
'type = "stdio"',
|
||||
'command = "node"',
|
||||
`args = ["${tomlPath}"]`,
|
||||
''
|
||||
]
|
||||
writeFileSync(configPath, lines.join('\n'))
|
||||
return configPath
|
||||
}
|
||||
|
||||
function listFilesRecursive(root) {
|
||||
if (!existsSync(root)) return []
|
||||
const results = []
|
||||
const stack = [root]
|
||||
while (stack.length > 0) {
|
||||
const dir = stack.pop()
|
||||
let entries
|
||||
try {
|
||||
entries = readdirSync(dir, {withFileTypes: true})
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
for (const entry of entries) {
|
||||
const fullPath = join(dir, entry.name)
|
||||
if (entry.isDirectory()) {
|
||||
stack.push(fullPath)
|
||||
} else {
|
||||
results.push(fullPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
function findLatestSessionLog(codexHome) {
|
||||
const sessionsDir = join(codexHome, 'sessions')
|
||||
const files = listFilesRecursive(sessionsDir).filter(file => file.endsWith('.jsonl'))
|
||||
let latest = null
|
||||
let latestMtime = 0
|
||||
for (const file of files) {
|
||||
let stats
|
||||
try {
|
||||
stats = statSync(file)
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
if (stats.mtimeMs > latestMtime) {
|
||||
latestMtime = stats.mtimeMs
|
||||
latest = file
|
||||
}
|
||||
}
|
||||
return latest
|
||||
}
|
||||
|
||||
function analyzeSessionLog(codexHome) {
|
||||
const summary = {taskMcpCalls: 0, bdShellCommands: []}
|
||||
const logPath = findLatestSessionLog(codexHome)
|
||||
if (!logPath) return summary
|
||||
let text
|
||||
try {
|
||||
text = readFileSync(logPath, 'utf-8')
|
||||
} catch {
|
||||
return summary
|
||||
}
|
||||
for (const line of text.split('\n')) {
|
||||
if (!line.trim()) continue
|
||||
let event
|
||||
try {
|
||||
event = JSON.parse(line)
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
if (event.type === 'response_item' && event.payload?.type === 'function_call') {
|
||||
const name = event.payload.name
|
||||
if (typeof name === 'string' && name.startsWith('mcp__task__')) {
|
||||
summary.taskMcpCalls += 1
|
||||
}
|
||||
if (name === 'shell_command') {
|
||||
let commandText = null
|
||||
if (typeof event.payload.arguments === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(event.payload.arguments)
|
||||
if (parsed && typeof parsed.command === 'string') {
|
||||
commandText = parsed.command
|
||||
}
|
||||
} catch {
|
||||
// ignore malformed args
|
||||
}
|
||||
}
|
||||
if (commandText && /\bbd\b/.test(commandText)) {
|
||||
summary.bdShellCommands.push(commandText)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return summary
|
||||
}
|
||||
|
||||
function createWorkDir() {
|
||||
const workDir = mkdtempSync(join(tmpdir(), 'codex-task-smoke-'))
|
||||
run('git init', workDir)
|
||||
run('bd init --stealth', workDir)
|
||||
|
||||
const codexDir = join(workDir, '.codex')
|
||||
const skillsDir = join(codexDir, 'skills')
|
||||
mkdirSync(skillsDir, {recursive: true})
|
||||
cpSync(join(repoRoot, '.codex', 'skills', 'task'), join(skillsDir, 'task'), {recursive: true})
|
||||
const copied = copyCodexAuth(codexDir)
|
||||
const configPath = writeCodexConfig(codexDir)
|
||||
if (verbose) {
|
||||
console.log(`codex config source: ${sourceCodexHome}`)
|
||||
console.log(`codex config copied: ${copied.length > 0 ? copied.join(', ') : '(none)'}`)
|
||||
console.log(`codex config created: ${configPath}`)
|
||||
}
|
||||
if (copied.length === 0) {
|
||||
console.log(`warn - no Codex auth found in ${sourceCodexHome}`)
|
||||
}
|
||||
|
||||
return workDir
|
||||
}
|
||||
|
||||
function runCodex(prompt, workDir, {verboseLogs} = {}) {
|
||||
return new Promise((resolve) => {
|
||||
const cmdArgs = [
|
||||
'exec',
|
||||
'--json',
|
||||
'--model', model,
|
||||
'-c', `model_reasoning_effort="${reasoningEffort}"`,
|
||||
'--sandbox', 'workspace-write',
|
||||
'--disable', 'shell_snapshot',
|
||||
'--skip-git-repo-check',
|
||||
'--color', 'never',
|
||||
'--cd', workDir,
|
||||
prompt
|
||||
]
|
||||
|
||||
const env = {
|
||||
...process.env,
|
||||
BD_NO_DAEMON: '1',
|
||||
CODEX_CI: '0',
|
||||
CODEX_SANDBOX_NETWORK_DISABLED: '0',
|
||||
CODEX_HOME: join(workDir, '.codex')
|
||||
}
|
||||
|
||||
if (verboseLogs) {
|
||||
console.log(`codex cmd: ${codexBin} ${cmdArgs.join(' ')}`)
|
||||
console.log(`codex env CODEX_HOME=${env.CODEX_HOME}`)
|
||||
console.log(`codex env CODEX_CI=${env.CODEX_CI} CODEX_SANDBOX_NETWORK_DISABLED=${env.CODEX_SANDBOX_NETWORK_DISABLED}`)
|
||||
}
|
||||
|
||||
const contentParts = []
|
||||
let stderr = ''
|
||||
let nonJsonLines = 0
|
||||
let timedOut = false
|
||||
let idleTimedOut = null
|
||||
let buffer = ''
|
||||
let lastOutputActivity = Date.now()
|
||||
let lastJsonActivity = Date.now()
|
||||
|
||||
const proc = spawn(codexBin, cmdArgs, {cwd: workDir, env, stdio: ['ignore', 'pipe', 'pipe']})
|
||||
|
||||
const idleInterval = setInterval(() => {
|
||||
const now = Date.now()
|
||||
if (!idleTimedOut && now - lastOutputActivity > idleTimeoutMs) {
|
||||
idleTimedOut = 'output'
|
||||
proc.kill('SIGKILL')
|
||||
return
|
||||
}
|
||||
if (!idleTimedOut && now - lastJsonActivity > idleTimeoutMs) {
|
||||
idleTimedOut = 'json'
|
||||
proc.kill('SIGKILL')
|
||||
}
|
||||
}, 1000)
|
||||
|
||||
const timeoutId = setTimeout(() => {
|
||||
timedOut = true
|
||||
proc.kill('SIGKILL')
|
||||
}, timeoutMs)
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
lastOutputActivity = Date.now()
|
||||
buffer += data.toString().replace(/\r/g, '\n')
|
||||
const lines = buffer.split('\n')
|
||||
buffer = lines.pop() || ''
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue
|
||||
try {
|
||||
const event = JSON.parse(line)
|
||||
lastJsonActivity = Date.now()
|
||||
if (event.type === 'item.completed' && event.item?.type === 'agent_message' && event.item.text) {
|
||||
contentParts.push(event.item.text)
|
||||
} else if (event.type === 'message' && event.message?.role === 'assistant') {
|
||||
for (const block of (event.message.content || [])) {
|
||||
if (block.type === 'text' && block.text) contentParts.push(block.text)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
nonJsonLines += 1
|
||||
if (verboseLogs && nonJsonLines <= 50) {
|
||||
console.log(`codex stdout: ${line}`)
|
||||
}
|
||||
stderr += `${line}\n`
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
proc.stderr.on('data', (data) => {
|
||||
lastOutputActivity = Date.now()
|
||||
const text = data.toString()
|
||||
stderr += text
|
||||
if (verboseLogs && text.trim()) {
|
||||
console.log(text.trimEnd())
|
||||
}
|
||||
})
|
||||
|
||||
proc.on('error', (err) => {
|
||||
clearTimeout(timeoutId)
|
||||
clearInterval(idleInterval)
|
||||
const summary = analyzeSessionLog(env.CODEX_HOME)
|
||||
resolve({
|
||||
success: false,
|
||||
content: '',
|
||||
error: err.code === 'ENOENT'
|
||||
? "Codex CLI not found. Ensure 'codex' is installed and in PATH."
|
||||
: `Error: ${err.message}`,
|
||||
...summary
|
||||
})
|
||||
})
|
||||
|
||||
proc.on('close', (code) => {
|
||||
clearTimeout(timeoutId)
|
||||
clearInterval(idleInterval)
|
||||
if (buffer.trim()) {
|
||||
const line = buffer.trim()
|
||||
try {
|
||||
const event = JSON.parse(line)
|
||||
lastJsonActivity = Date.now()
|
||||
if (event.type === 'item.completed' && event.item?.type === 'agent_message' && event.item.text) {
|
||||
contentParts.push(event.item.text)
|
||||
}
|
||||
} catch {
|
||||
nonJsonLines += 1
|
||||
if (verboseLogs && nonJsonLines <= 50) {
|
||||
console.log(`codex stdout: ${line}`)
|
||||
}
|
||||
stderr += `${line}\n`
|
||||
}
|
||||
}
|
||||
|
||||
const summary = analyzeSessionLog(env.CODEX_HOME)
|
||||
|
||||
if (timedOut) {
|
||||
resolve({success: false, content: '', error: `Codex timed out after ${timeoutMs / 1000}s`, ...summary})
|
||||
return
|
||||
}
|
||||
if (idleTimedOut === 'output') {
|
||||
resolve({success: false, content: '', error: `Codex idle timeout after ${idleTimeoutMs / 1000}s without output`, ...summary})
|
||||
return
|
||||
}
|
||||
if (idleTimedOut === 'json') {
|
||||
resolve({success: false, content: '', error: `Codex idle timeout after ${idleTimeoutMs / 1000}s without JSON output`, ...summary})
|
||||
return
|
||||
}
|
||||
if (code !== 0) {
|
||||
resolve({success: false, content: '', error: `Codex exited with code ${code}: ${stderr}`, ...summary})
|
||||
return
|
||||
}
|
||||
resolve({success: true, content: contentParts.join('\n'), ...summary})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function scenarioCreateEpic(workDir) {
|
||||
const prompt = 'Track this in Beads: product-dsl generator emits a duplicate content module id; root cause unknown. Create an epic and start the investigation now.'
|
||||
const result = await runCodex(prompt, workDir, {verboseLogs: verbose})
|
||||
assert.ok(result.success, result.error)
|
||||
assert.ok(result.taskMcpCalls > 0, 'expected task MCP to be invoked')
|
||||
assert.equal(result.bdShellCommands.length, 0, 'did not expect bd CLI usage')
|
||||
if (verbose && result.content) console.log(result.content)
|
||||
|
||||
const issues = JSON.parse(run('bd list --json', workDir))
|
||||
const epic = issues.find(issue => (issue.issue_type || issue.type) === 'epic')
|
||||
assert.ok(epic, 'expected an epic to be created')
|
||||
const children = issues.filter(issue => issue.parent === epic.id || issue.id.startsWith(`${epic.id}.`))
|
||||
assert.ok(children.length >= 1, 'expected at least one child under epic')
|
||||
assert.ok(children.some(child => child.status === 'in_progress'), 'expected an auto-started child')
|
||||
}
|
||||
|
||||
async function scenarioResumeTask(workDir) {
|
||||
const taskId = run('bd create --title "Existing Task" --type task --silent', workDir).trim()
|
||||
const prompt = `Resume task ${taskId} in Beads.`
|
||||
const result = await runCodex(prompt, workDir, {verboseLogs: verbose})
|
||||
assert.ok(result.success, result.error)
|
||||
assert.ok(result.taskMcpCalls > 0, 'expected task MCP to be invoked')
|
||||
assert.equal(result.bdShellCommands.length, 0, 'did not expect bd CLI usage')
|
||||
if (verbose && result.content) console.log(result.content)
|
||||
|
||||
const show = JSON.parse(run(`bd show ${taskId} --json`, workDir))
|
||||
const issue = show[0]
|
||||
assert.ok(issue, 'expected issue to exist')
|
||||
assert.equal(issue.status, 'in_progress')
|
||||
}
|
||||
|
||||
async function runScenario(name, fn) {
|
||||
const workDir = createWorkDir()
|
||||
let success = false
|
||||
try {
|
||||
await fn(workDir)
|
||||
success = true
|
||||
console.log(`ok - ${name}`)
|
||||
} catch (err) {
|
||||
console.error(`fail - ${name}: ${err.message || err}`)
|
||||
} finally {
|
||||
if (!success || keep) {
|
||||
console.log(`work dir: ${workDir}`)
|
||||
} else {
|
||||
rmSync(workDir, {recursive: true, force: true})
|
||||
}
|
||||
}
|
||||
return success
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const results = []
|
||||
results.push(await runScenario('create epic + investigation child', scenarioCreateEpic))
|
||||
results.push(await runScenario('resume task by id', scenarioResumeTask))
|
||||
|
||||
if (results.every(Boolean)) {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
main().then((code) => {
|
||||
process.exitCode = code
|
||||
})
|
||||
@@ -1,188 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
import {toolHandlers} from './task-handlers.mjs'
|
||||
|
||||
const tools = [
|
||||
{
|
||||
name: 'task_status',
|
||||
description: 'Get issue state or full details',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: {type: 'string', description: 'Issue ID for full details'},
|
||||
user_request: {type: 'string', description: 'Not supported for task_status; use task_start'},
|
||||
view: {type: 'string', enum: ['summary', 'meta'], default: 'summary', description: 'Issue view (default: summary)'},
|
||||
meta_max_chars: {type: 'integer', default: 400, description: 'Max chars for description/design/acceptance in meta view (default: 400)'},
|
||||
memory_limit: {type: 'integer', default: 0, description: 'Max entries per memory list in response (0 to omit memory)'}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_start',
|
||||
description: 'Start task workflow (status + optional epic creation)',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: {type: 'string', description: 'Issue ID for full details'},
|
||||
user_request: {type: 'string', description: 'User task description'},
|
||||
description: {type: 'string'},
|
||||
design: {type: 'string'},
|
||||
acceptance: {type: 'string'},
|
||||
view: {type: 'string', enum: ['summary', 'meta'], default: 'summary', description: 'Issue view (default: summary)'},
|
||||
meta_max_chars: {type: 'integer', default: 400, description: 'Max chars for description/design/acceptance in meta view (default: 400)'},
|
||||
memory_limit: {type: 'integer', default: 0, description: 'Max entries per memory list in response (0 to omit memory)'}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_progress',
|
||||
description: 'Update findings/decisions/status',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: {type: 'string', description: 'Issue ID'},
|
||||
findings: {type: 'array', items: {type: 'string'}, description: 'Discoveries'},
|
||||
decisions: {type: 'array', items: {type: 'string'}, description: 'Decisions made'},
|
||||
status: {type: 'string', enum: ['in_progress', 'blocked', 'deferred']},
|
||||
memory_limit: {type: 'integer', default: 0, description: 'Max entries per memory list in response (0 to omit memory)'}
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_update_meta',
|
||||
description: 'Update description/design/acceptance',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: {type: 'string', description: 'Issue ID'},
|
||||
description: {type: 'string'},
|
||||
design: {type: 'string'},
|
||||
acceptance: {type: 'string'},
|
||||
view: {type: 'string', enum: ['summary', 'meta'], default: 'summary', description: 'Issue view (default: summary)'},
|
||||
meta_max_chars: {type: 'integer', default: 400, description: 'Max chars for description/design/acceptance in meta view (default: 400)'},
|
||||
memory_limit: {type: 'integer', default: 0, description: 'Max entries per memory list in response (0 to omit memory)'}
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_decompose',
|
||||
description: 'Create sub-issues under epic (auto-starts single child)',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
epic_id: {type: 'string', description: 'Parent epic ID'},
|
||||
sub_issues: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
title: {type: 'string'},
|
||||
description: {type: 'string'},
|
||||
acceptance: {type: 'string'},
|
||||
design: {type: 'string'},
|
||||
type: {type: 'string'},
|
||||
depends_on: {
|
||||
type: 'array',
|
||||
items: {
|
||||
anyOf: [
|
||||
{type: 'integer'},
|
||||
{type: 'string'}
|
||||
]
|
||||
}
|
||||
},
|
||||
dep_type: {type: 'string'}
|
||||
},
|
||||
required: ['title', 'description', 'acceptance', 'design']
|
||||
}
|
||||
},
|
||||
update_epic_acceptance: {type: 'string'}
|
||||
},
|
||||
required: ['epic_id', 'sub_issues']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_create',
|
||||
description: 'Create issue',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
title: {type: 'string'},
|
||||
description: {type: 'string'},
|
||||
type: {type: 'string', default: 'task'},
|
||||
parent: {type: 'string'},
|
||||
acceptance: {type: 'string'},
|
||||
design: {type: 'string'},
|
||||
priority: {type: 'string'},
|
||||
depends_on: {
|
||||
anyOf: [
|
||||
{type: 'string'},
|
||||
{type: 'array', items: {type: 'string'}}
|
||||
]
|
||||
},
|
||||
dep_type: {type: 'string'}
|
||||
},
|
||||
required: ['title', 'description', 'design', 'acceptance']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_link',
|
||||
description: 'Add dependencies between existing issues',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: {type: 'string'},
|
||||
depends_on: {
|
||||
anyOf: [
|
||||
{type: 'string'},
|
||||
{type: 'array', items: {type: 'string'}}
|
||||
]
|
||||
},
|
||||
dep_type: {type: 'string'}
|
||||
},
|
||||
required: ['id', 'depends_on']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_done',
|
||||
description: 'Close issue',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: {type: 'string'},
|
||||
reason: {type: 'string'},
|
||||
findings: {type: 'array', items: {type: 'string'}},
|
||||
decisions: {type: 'array', items: {type: 'string'}}
|
||||
},
|
||||
required: ['id', 'reason']
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'task_reopen',
|
||||
description: 'Reopen closed issue',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
id: {type: 'string', description: 'Issue ID'},
|
||||
reason: {type: 'string', description: 'Reason for reopening'},
|
||||
view: {type: 'string', enum: ['summary', 'meta'], default: 'summary', description: 'Issue view (default: summary)'},
|
||||
meta_max_chars: {type: 'integer', default: 400, description: 'Max chars for description/design/acceptance in meta view (default: 400)'},
|
||||
memory_limit: {type: 'integer', default: 0, description: 'Max entries per memory list in response (0 to omit memory)'}
|
||||
},
|
||||
required: ['id', 'reason']
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
export {tools, toolHandlers}
|
||||
@@ -1,527 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
import {bd, bdJson, bdShowOne} from './bd-client.mjs'
|
||||
import {addSectionComments, buildMemoryFromEntries, extractMemoryFromIssue, prepareSectionUpdates} from './notes.mjs'
|
||||
import {buildIssueView} from './task-issue-view.mjs'
|
||||
import {
|
||||
buildClosed,
|
||||
buildCreated,
|
||||
buildEmpty,
|
||||
buildError,
|
||||
buildIssue,
|
||||
buildProgress,
|
||||
buildSummary,
|
||||
buildUpdated
|
||||
} from './task-responses.mjs'
|
||||
import {buildInProgressSummaries, createEpic, getReadyChildren} from './task-helpers.mjs'
|
||||
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* id?: string,
|
||||
* title?: string,
|
||||
* status?: string,
|
||||
* issue_type?: string,
|
||||
* parent?: string,
|
||||
* priority?: (string|number),
|
||||
* notes?: string,
|
||||
* comments?: Array,
|
||||
* children?: Array
|
||||
* }} Issue
|
||||
*/
|
||||
|
||||
function compactMemory(memory) {
|
||||
if (!memory) return null
|
||||
const findings = Array.isArray(memory.findings) ? memory.findings : []
|
||||
const decisions = Array.isArray(memory.decisions) ? memory.decisions : []
|
||||
const result = {}
|
||||
if (findings.length > 0) result.findings = findings
|
||||
if (decisions.length > 0) result.decisions = decisions
|
||||
if (memory.truncated) {
|
||||
result.truncated = true
|
||||
if (memory.totals) {
|
||||
const moreFindings = Math.max(0, (memory.totals.findings || 0) - findings.length)
|
||||
const moreDecisions = Math.max(0, (memory.totals.decisions || 0) - decisions.length)
|
||||
if (moreFindings > 0 || moreDecisions > 0) {
|
||||
result.more = {findings: moreFindings, decisions: moreDecisions}
|
||||
}
|
||||
}
|
||||
}
|
||||
return Object.keys(result).length === 0 ? null : result
|
||||
}
|
||||
|
||||
function getNonEmptyString(value) {
|
||||
if (typeof value !== 'string') return null
|
||||
const trimmed = value.trim()
|
||||
return trimmed.length > 0 ? trimmed : null
|
||||
}
|
||||
|
||||
function readMetaFields(source) {
|
||||
return {
|
||||
description: getNonEmptyString(source.description),
|
||||
design: getNonEmptyString(source.design),
|
||||
acceptance: getNonEmptyString(source.acceptance)
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeDependsOnInput(dependsOn) {
|
||||
if (dependsOn === undefined || dependsOn === null) return {list: []}
|
||||
if (typeof dependsOn === 'string') {
|
||||
const trimmed = dependsOn.trim()
|
||||
return trimmed ? {list: [trimmed]} : {error: 'depends_on must be a non-empty string'}
|
||||
}
|
||||
if (Array.isArray(dependsOn)) {
|
||||
const list = []
|
||||
for (const entry of dependsOn) {
|
||||
if (typeof entry !== 'string') return {error: 'depends_on entries must be strings'}
|
||||
const trimmed = entry.trim()
|
||||
if (!trimmed) return {error: 'depends_on entries must be non-empty'}
|
||||
list.push(trimmed)
|
||||
}
|
||||
return {list}
|
||||
}
|
||||
return {error: 'depends_on must be a string or array of strings'}
|
||||
}
|
||||
|
||||
function normalizeDecomposeDependsOn(dependsOn, subIndex) {
|
||||
if (!Array.isArray(dependsOn)) return {list: []}
|
||||
const list = []
|
||||
for (const entry of dependsOn) {
|
||||
if (Number.isInteger(entry)) {
|
||||
if (entry < 0 || entry >= subIndex) {
|
||||
return {error: `Invalid depends_on[${entry}] in sub_issue[${subIndex}]: must reference 0 to ${subIndex - 1}`}
|
||||
}
|
||||
list.push({type: 'index', value: entry})
|
||||
continue
|
||||
}
|
||||
if (typeof entry === 'string') {
|
||||
const trimmed = entry.trim()
|
||||
if (!trimmed) return {error: `Invalid depends_on entry in sub_issue[${subIndex}]: empty id`}
|
||||
list.push({type: 'id', value: trimmed})
|
||||
continue
|
||||
}
|
||||
return {error: `Invalid depends_on entry in sub_issue[${subIndex}]: must be integer index or issue id`}
|
||||
}
|
||||
return {list}
|
||||
}
|
||||
|
||||
function summarizeChildren(children) {
|
||||
if (!Array.isArray(children) || children.length === 0) return null
|
||||
return children.map(child => {
|
||||
const summary = {
|
||||
id: child.id,
|
||||
title: child.title,
|
||||
status: child.status
|
||||
}
|
||||
const type = child.issue_type || child.type
|
||||
if (type) summary.type = type
|
||||
if (child.priority !== undefined && child.priority !== null) summary.priority = child.priority
|
||||
if (child.assignee) summary.assignee = child.assignee
|
||||
return summary
|
||||
})
|
||||
}
|
||||
|
||||
async function loadIssue(id, {resume, next, memory_limit, view, meta_max_chars} = {}) {
|
||||
const issue = /** @type {Issue | null} */ (await bdShowOne(id))
|
||||
if (!issue) {
|
||||
return buildError(`Issue ${id} not found`)
|
||||
}
|
||||
if (resume) {
|
||||
await bd(['update', id, '--status', 'in_progress'])
|
||||
issue.is_new = false
|
||||
issue.status = 'in_progress'
|
||||
if (issue.issue_type === 'epic') {
|
||||
const readyChildren = await getReadyChildren(id)
|
||||
if (readyChildren) {
|
||||
issue.ready_children = readyChildren
|
||||
}
|
||||
}
|
||||
}
|
||||
if (issue.issue_type === 'epic') {
|
||||
let summarizedChildren = summarizeChildren(issue.children)
|
||||
if (!summarizedChildren) {
|
||||
try {
|
||||
const listedChildren = await bdJson(['list', '--parent', id])
|
||||
summarizedChildren = summarizeChildren(listedChildren)
|
||||
} catch (error) {
|
||||
summarizedChildren = null
|
||||
}
|
||||
}
|
||||
if (summarizedChildren) {
|
||||
issue.children = summarizedChildren
|
||||
}
|
||||
}
|
||||
const memory = compactMemory(extractMemoryFromIssue(issue, memory_limit))
|
||||
let viewIssue
|
||||
try {
|
||||
viewIssue = buildIssueView(issue, {view, meta_max_chars})
|
||||
} catch (error) {
|
||||
return buildError(error.message || String(error))
|
||||
}
|
||||
return buildIssue(viewIssue, {next: next ?? (resume ? 'continue' : 'await_user'), memory})
|
||||
}
|
||||
|
||||
async function createEpicFromUserRequest(userRequest, {memory_limit, view, meta_max_chars, ...metaArgs} = {}) {
|
||||
const title = userRequest.trim()
|
||||
if (!title) {
|
||||
return null
|
||||
}
|
||||
const meta = readMetaFields(metaArgs)
|
||||
const id = await createEpic(title, {
|
||||
description: meta.description || `USER REQUEST: ${userRequest}`,
|
||||
design: meta.design,
|
||||
acceptance: meta.acceptance
|
||||
})
|
||||
const issue = await bdShowOne(id)
|
||||
if (!issue) {
|
||||
return buildIssue({id, is_new: true}, {next: 'continue'})
|
||||
}
|
||||
const memory = compactMemory(extractMemoryFromIssue(issue, memory_limit))
|
||||
issue.is_new = true
|
||||
let viewIssue
|
||||
try {
|
||||
viewIssue = buildIssueView(issue, {view, meta_max_chars})
|
||||
} catch (error) {
|
||||
return buildError(error.message || String(error))
|
||||
}
|
||||
return buildIssue(viewIssue, {next: 'continue', memory})
|
||||
}
|
||||
|
||||
async function handleTaskStatus(args) {
|
||||
if (args.user_request) {
|
||||
return buildError('task_status does not accept user_request; use task_start')
|
||||
}
|
||||
if (args.id) {
|
||||
return loadIssue(args.id, {
|
||||
resume: false,
|
||||
next: 'await_user',
|
||||
memory_limit: args.memory_limit,
|
||||
view: args.view,
|
||||
meta_max_chars: args.meta_max_chars
|
||||
})
|
||||
}
|
||||
|
||||
const inProgress = /** @type {Issue[]} */ (await bdJson(['list', '--status', 'in_progress']))
|
||||
if (inProgress.length === 0) {
|
||||
return buildEmpty('No in-progress tasks found.', 'await_user')
|
||||
}
|
||||
|
||||
const summaries = await buildInProgressSummaries(inProgress)
|
||||
return buildSummary(summaries, {next: 'await_user'})
|
||||
}
|
||||
|
||||
async function handleTaskStart(args) {
|
||||
if (args.id) {
|
||||
return loadIssue(args.id, {
|
||||
resume: true,
|
||||
next: 'continue',
|
||||
memory_limit: args.memory_limit,
|
||||
view: args.view,
|
||||
meta_max_chars: args.meta_max_chars
|
||||
})
|
||||
}
|
||||
|
||||
const userRequest = getNonEmptyString(args.user_request)
|
||||
if (userRequest) {
|
||||
const created = await createEpicFromUserRequest(userRequest, args)
|
||||
if (created) {
|
||||
return created
|
||||
}
|
||||
}
|
||||
|
||||
return buildError('task_start requires id or user_request')
|
||||
}
|
||||
|
||||
export const toolHandlers = {
|
||||
task_status: handleTaskStatus,
|
||||
task_start: handleTaskStart,
|
||||
|
||||
task_progress: async (args) => {
|
||||
const issue = /** @type {Issue | null} */ (await bdShowOne(args.id))
|
||||
if (!issue) {
|
||||
return buildError(`Issue ${args.id} not found`)
|
||||
}
|
||||
|
||||
const update = prepareSectionUpdates(issue, args.findings, args.decisions)
|
||||
if (update.findingsToAdd.length > 0 || update.decisionsToAdd.length > 0) {
|
||||
await addSectionComments(args.id, update.findingsToAdd, update.decisionsToAdd)
|
||||
}
|
||||
|
||||
const updateArgs = ['update', args.id]
|
||||
if (args.status) updateArgs.push('--status', args.status)
|
||||
if (update.shouldStripNotes) {
|
||||
updateArgs.push('--notes', '')
|
||||
}
|
||||
if (updateArgs.length > 2) {
|
||||
await bd(updateArgs)
|
||||
}
|
||||
|
||||
const memory = compactMemory(buildMemoryFromEntries(
|
||||
update.finalFindings,
|
||||
update.finalDecisions,
|
||||
args.memory_limit
|
||||
))
|
||||
|
||||
return buildProgress({memory, status: args.status || issue.status})
|
||||
},
|
||||
|
||||
task_update_meta: async (args) => {
|
||||
const issue = /** @type {Issue | null} */ (await bdShowOne(args.id))
|
||||
if (!issue) {
|
||||
return buildError(`Issue ${args.id} not found`)
|
||||
}
|
||||
|
||||
const meta = readMetaFields(args)
|
||||
if (!meta.description && !meta.design && !meta.acceptance) {
|
||||
return buildError('At least one of description, design, acceptance is required')
|
||||
}
|
||||
|
||||
const updateArgs = ['update', args.id]
|
||||
if (meta.description) updateArgs.push('--description', meta.description)
|
||||
if (meta.design) updateArgs.push('--design', meta.design)
|
||||
if (meta.acceptance) updateArgs.push('--acceptance', meta.acceptance)
|
||||
|
||||
await bd(updateArgs)
|
||||
|
||||
return loadIssue(args.id, {
|
||||
resume: false,
|
||||
next: 'await_user',
|
||||
memory_limit: args.memory_limit,
|
||||
view: args.view,
|
||||
meta_max_chars: args.meta_max_chars
|
||||
})
|
||||
},
|
||||
|
||||
task_decompose: async (args) => {
|
||||
const normalizedSubs = []
|
||||
for (let i = 0; i < args.sub_issues.length; i++) {
|
||||
const sub = args.sub_issues[i]
|
||||
const meta = readMetaFields(sub)
|
||||
const missing = []
|
||||
if (!meta.description) missing.push('description')
|
||||
if (!meta.design) missing.push('design')
|
||||
if (!meta.acceptance) missing.push('acceptance')
|
||||
if (missing.length > 0) {
|
||||
return buildError(`sub_issues[${i}] missing required fields: ${missing.join(', ')}`)
|
||||
}
|
||||
const depResult = normalizeDecomposeDependsOn(sub.depends_on, i)
|
||||
if (depResult.error) {
|
||||
return buildError(depResult.error)
|
||||
}
|
||||
normalizedSubs.push({...sub, ...meta, normalized_depends_on: depResult.list})
|
||||
}
|
||||
|
||||
const ids = []
|
||||
for (const sub of normalizedSubs) {
|
||||
const subType = sub.type || 'task'
|
||||
const id = await bd(['create', '--title', sub.title, '--parent', args.epic_id, '--type', subType, '--description', sub.description, '--acceptance', sub.acceptance, '--design', sub.design, '--silent'])
|
||||
ids.push(id)
|
||||
}
|
||||
|
||||
// Add dependencies
|
||||
for (let i = 0; i < normalizedSubs.length; i++) {
|
||||
const sub = normalizedSubs[i]
|
||||
const deps = Array.isArray(sub.normalized_depends_on) ? sub.normalized_depends_on : []
|
||||
if (deps.length === 0) continue
|
||||
const seen = new Set()
|
||||
for (const dep of deps) {
|
||||
const depId = dep.type === 'index' ? ids[dep.value] : dep.value
|
||||
if (!depId || depId === ids[i] || seen.has(depId)) continue
|
||||
seen.add(depId)
|
||||
const depArgs = ['dep', 'add', ids[i], depId]
|
||||
const depType = getNonEmptyString(sub.dep_type)
|
||||
if (depType) depArgs.push('--type', depType)
|
||||
await bd(depArgs)
|
||||
}
|
||||
}
|
||||
|
||||
let startedChildId
|
||||
if (args.sub_issues.length === 1) {
|
||||
startedChildId = ids[0]
|
||||
await bd(['update', startedChildId, '--status', 'in_progress'])
|
||||
}
|
||||
|
||||
if (args.update_epic_acceptance) {
|
||||
await bd(['update', args.epic_id, '--acceptance', args.update_epic_acceptance])
|
||||
}
|
||||
|
||||
const payload = {ids, epic_id: args.epic_id}
|
||||
if (startedChildId) payload.started_child_id = startedChildId
|
||||
return buildCreated(payload)
|
||||
},
|
||||
|
||||
task_create: async (args) => {
|
||||
const title = getNonEmptyString(args.title)
|
||||
if (!title) {
|
||||
return buildError('title required for new issue')
|
||||
}
|
||||
const meta = readMetaFields(args)
|
||||
const missing = []
|
||||
if (!meta.description) missing.push('description')
|
||||
if (!meta.design) missing.push('design')
|
||||
if (!meta.acceptance) missing.push('acceptance')
|
||||
if (missing.length > 0) {
|
||||
return buildError(`Missing required fields: ${missing.join(', ')}`)
|
||||
}
|
||||
const issueType = args.type || 'task'
|
||||
const createArgs = [
|
||||
'create',
|
||||
'--title', title,
|
||||
'--type', issueType,
|
||||
'--description', meta.description,
|
||||
'--acceptance', meta.acceptance,
|
||||
'--design', meta.design,
|
||||
'--silent'
|
||||
]
|
||||
if (args.parent) createArgs.push('--parent', args.parent)
|
||||
if (args.priority) createArgs.push('--priority', args.priority)
|
||||
|
||||
const depResult = normalizeDependsOnInput(args.depends_on)
|
||||
if (depResult.error) {
|
||||
return buildError(depResult.error)
|
||||
}
|
||||
|
||||
const id = await bd(createArgs)
|
||||
if (depResult.list.length > 0) {
|
||||
for (const depId of depResult.list) {
|
||||
const depArgs = ['dep', 'add', id, depId]
|
||||
if (args.dep_type) depArgs.push('--type', args.dep_type)
|
||||
await bd(depArgs)
|
||||
}
|
||||
}
|
||||
return buildCreated({id})
|
||||
},
|
||||
|
||||
task_link: async (args) => {
|
||||
const issue = /** @type {Issue | null} */ (await bdShowOne(args.id))
|
||||
if (!issue) {
|
||||
return buildError(`Issue ${args.id} not found`)
|
||||
}
|
||||
|
||||
const depResult = normalizeDependsOnInput(args.depends_on)
|
||||
if (depResult.error) {
|
||||
return buildError(depResult.error)
|
||||
}
|
||||
if (depResult.list.length === 0) {
|
||||
return buildError('depends_on required')
|
||||
}
|
||||
|
||||
const added = []
|
||||
const seen = new Set()
|
||||
for (const depId of depResult.list) {
|
||||
if (depId === args.id) {
|
||||
return buildError('depends_on cannot include the issue id itself')
|
||||
}
|
||||
if (seen.has(depId)) continue
|
||||
seen.add(depId)
|
||||
const depArgs = ['dep', 'add', args.id, depId]
|
||||
if (args.dep_type) depArgs.push('--type', args.dep_type)
|
||||
await bd(depArgs)
|
||||
added.push(depId)
|
||||
}
|
||||
|
||||
const payload = {id: args.id, added_depends_on: added}
|
||||
if (args.dep_type) payload.dep_type = args.dep_type
|
||||
return buildUpdated(payload)
|
||||
},
|
||||
|
||||
task_done: async (args) => {
|
||||
const issue = /** @type {Issue | null} */ (await bdShowOne(args.id))
|
||||
if (!issue) {
|
||||
return buildError(`Issue ${args.id} not found`)
|
||||
}
|
||||
|
||||
const update = prepareSectionUpdates(issue, args.findings, args.decisions)
|
||||
|
||||
const applyComments = async () => {
|
||||
if (update.findingsToAdd.length > 0 || update.decisionsToAdd.length > 0) {
|
||||
await addSectionComments(args.id, update.findingsToAdd, update.decisionsToAdd)
|
||||
}
|
||||
}
|
||||
|
||||
const clearNotesIfNeeded = async () => {
|
||||
if (update.shouldStripNotes) {
|
||||
await bd(['update', args.id, '--notes', ''])
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to perform actual close and return result
|
||||
const doClose = async (summary) => {
|
||||
await bd(['close', args.id, '--reason', summary])
|
||||
|
||||
let epicStatus = null
|
||||
let nextReady = null
|
||||
let parentId = null
|
||||
|
||||
try {
|
||||
const closedIssue = await bdShowOne(args.id)
|
||||
parentId = closedIssue.parent
|
||||
|
||||
if (parentId) {
|
||||
const readyList = await bdJson(['ready', '--parent', parentId])
|
||||
nextReady = readyList[0] || null
|
||||
|
||||
const parentIssue = await bdShowOne(parentId)
|
||||
if (parentIssue) {
|
||||
let children
|
||||
try {
|
||||
children = await bdJson(['list', '--parent', parentId, '--all'])
|
||||
} catch (error) {
|
||||
children = Array.isArray(parentIssue.children) ? parentIssue.children : []
|
||||
}
|
||||
if (!Array.isArray(children)) {
|
||||
children = []
|
||||
}
|
||||
const completed = children.filter(c => c.status === 'closed').length
|
||||
const remaining = children.length - completed
|
||||
epicStatus = {completed, remaining}
|
||||
|
||||
const isEpic = parentIssue.issue_type === 'epic' || parentIssue.type === 'epic'
|
||||
const isPinned = parentIssue.status === 'pinned' || parentIssue.status === 'hooked'
|
||||
if (isEpic && children.length > 0 && remaining === 0 && parentIssue.status !== 'closed' && !isPinned) {
|
||||
await bd(['close', parentId, '--reason', 'Auto-closed: all child issues closed'])
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Continue with partial info
|
||||
}
|
||||
|
||||
return buildClosed({closed: args.id, next_ready: nextReady, epic_status: epicStatus, parent_id: parentId})
|
||||
}
|
||||
|
||||
if (!args.reason) {
|
||||
return buildError('reason required')
|
||||
}
|
||||
|
||||
await applyComments()
|
||||
await clearNotesIfNeeded()
|
||||
return await doClose(args.reason)
|
||||
},
|
||||
|
||||
task_reopen: async (args) => {
|
||||
if (!args.reason || !args.reason.trim()) {
|
||||
return buildError('reason required for reopen')
|
||||
}
|
||||
|
||||
const issue = /** @type {Issue | null} */ (await bdShowOne(args.id))
|
||||
if (!issue) {
|
||||
return buildError(`Issue ${args.id} not found`)
|
||||
}
|
||||
|
||||
await bd(['reopen', args.id, '--reason', args.reason.trim()])
|
||||
|
||||
const reopened = /** @type {Issue | null} */ (await bdShowOne(args.id))
|
||||
if (!reopened) {
|
||||
return buildIssue({id: args.id, status: 'open'}, {next: 'await_user'})
|
||||
}
|
||||
const memory = compactMemory(extractMemoryFromIssue(reopened, args.memory_limit))
|
||||
let viewIssue
|
||||
try {
|
||||
viewIssue = buildIssueView(reopened, {view: args.view, meta_max_chars: args.meta_max_chars})
|
||||
} catch (error) {
|
||||
return buildError(error.message || String(error))
|
||||
}
|
||||
return buildIssue(viewIssue, {next: 'await_user', memory})
|
||||
}
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
import {bd, bdJson} from './bd-client.mjs'
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* id?: string,
|
||||
* title?: string,
|
||||
* status?: string,
|
||||
* issue_type?: string,
|
||||
* parent?: string,
|
||||
* priority?: (string|number)
|
||||
* }} Issue
|
||||
*/
|
||||
|
||||
// Fetch ready children for an epic (used in task_status and task_start resume)
|
||||
export async function getReadyChildren(epicId) {
|
||||
const readyChildren = await bdJson(['ready', '--parent', epicId])
|
||||
if (readyChildren.length > 0) {
|
||||
return readyChildren.map(c => ({id: c.id, title: c.title}))
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export async function createEpic(title, {description, design, acceptance} = {}) {
|
||||
const resolvedDescription = description || `USER REQUEST: ${title}`
|
||||
const resolvedDesign = design || 'PENDING'
|
||||
const resolvedAcceptance = acceptance || 'PENDING'
|
||||
const id = await bd([
|
||||
'create',
|
||||
'--title', title,
|
||||
'--type', 'epic',
|
||||
'--description', resolvedDescription,
|
||||
'--acceptance', resolvedAcceptance,
|
||||
'--design', resolvedDesign,
|
||||
'--silent'
|
||||
])
|
||||
await bd(['update', id, '--status', 'in_progress'])
|
||||
return id
|
||||
}
|
||||
|
||||
export async function buildInProgressSummary(issue) {
|
||||
const result = {id: issue.id, title: issue.title, status: issue.status}
|
||||
if (issue.priority !== undefined && issue.priority !== null) {
|
||||
result.priority = issue.priority
|
||||
}
|
||||
if (issue.issue_type) {
|
||||
result.type = issue.issue_type
|
||||
}
|
||||
if (issue.assignee) {
|
||||
result.assignee = issue.assignee
|
||||
}
|
||||
if (issue.parent) {
|
||||
result.parent = issue.parent
|
||||
}
|
||||
if (issue.issue_type === 'epic') {
|
||||
const readyChildren = await getReadyChildren(issue.id)
|
||||
if (readyChildren) {
|
||||
result.ready_children = readyChildren
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export async function buildInProgressSummaries(issues) {
|
||||
const summaries = []
|
||||
for (const issue of issues) {
|
||||
summaries.push(await buildInProgressSummary(issue))
|
||||
}
|
||||
return summaries
|
||||
}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
const DEFAULT_META_MAX_CHARS = 400
|
||||
|
||||
const VIEW_VALUES = new Set(['summary', 'meta'])
|
||||
|
||||
const SUMMARY_FIELDS = [
|
||||
{from: 'id', to: 'id'},
|
||||
{from: 'title', to: 'title'},
|
||||
{from: 'status', to: 'status'},
|
||||
{from: 'priority', to: 'priority'},
|
||||
{from: 'issue_type', to: 'type'},
|
||||
{from: 'assignee', to: 'assignee'},
|
||||
{from: 'parent', to: 'parent'},
|
||||
{from: 'ready_children', to: 'ready_children'},
|
||||
{from: 'children', to: 'children'},
|
||||
{from: 'is_new', to: 'is_new'}
|
||||
]
|
||||
|
||||
const META_FIELDS = [
|
||||
{from: 'description', to: 'description'},
|
||||
{from: 'design', to: 'design'},
|
||||
{from: 'acceptance_criteria', to: 'acceptance'}
|
||||
]
|
||||
|
||||
function normalizeView(view) {
|
||||
if (!view) return 'summary'
|
||||
const normalized = String(view).trim().toLowerCase()
|
||||
if (!VIEW_VALUES.has(normalized)) {
|
||||
throw new Error(`Invalid view: ${view}`)
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
function normalizeMaxChars(maxChars) {
|
||||
if (maxChars === undefined || maxChars === null) return DEFAULT_META_MAX_CHARS
|
||||
const parsed = Number.parseInt(maxChars, 10)
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) return null
|
||||
return parsed
|
||||
}
|
||||
|
||||
function addField(target, source, fromKey, toKey) {
|
||||
if (!Object.prototype.hasOwnProperty.call(source, fromKey)) return
|
||||
const value = source[fromKey]
|
||||
if (value === undefined || value === null) return
|
||||
if (typeof value === 'string' && value.trim() === '') return
|
||||
if (Array.isArray(value) && value.length === 0) return
|
||||
target[toKey] = value
|
||||
}
|
||||
|
||||
function applyTruncation(result, fields, maxChars) {
|
||||
if (!maxChars) return result
|
||||
const truncated = []
|
||||
for (const field of fields) {
|
||||
const value = result[field]
|
||||
if (typeof value !== 'string') continue
|
||||
if (value.length <= maxChars) continue
|
||||
result[field] = value.slice(0, maxChars) + '...'
|
||||
truncated.push(field)
|
||||
}
|
||||
if (truncated.length > 0) {
|
||||
result.meta_truncated = truncated
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export function buildIssueView(issue, {view, meta_max_chars} = {}) {
|
||||
const normalized = normalizeView(view)
|
||||
const result = {}
|
||||
|
||||
for (const field of SUMMARY_FIELDS) {
|
||||
addField(result, issue, field.from, field.to)
|
||||
}
|
||||
|
||||
if (normalized === 'meta') {
|
||||
for (const field of META_FIELDS) {
|
||||
addField(result, issue, field.from, field.to)
|
||||
}
|
||||
const maxChars = normalizeMaxChars(meta_max_chars)
|
||||
applyTruncation(result, META_FIELDS.map(field => field.to), maxChars)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
import {createMcpServer} from '../shared/mcp-rpc.mjs'
|
||||
import {toolHandlers, tools} from './task-core.mjs'
|
||||
|
||||
createMcpServer({
|
||||
serverInfo: {name: 'task', version: '4.0.0'},
|
||||
tools,
|
||||
toolHandlers
|
||||
})
|
||||
@@ -1,770 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
/// <reference types="node" />
|
||||
import {after, before, beforeEach, describe, it} from 'node:test'
|
||||
import {deepStrictEqual, ok, strictEqual} from 'node:assert/strict'
|
||||
import {execSync, spawn} from 'node:child_process'
|
||||
import {mkdtempSync, rmSync} from 'node:fs'
|
||||
import {tmpdir} from 'node:os'
|
||||
import {dirname, join} from 'node:path'
|
||||
import process from 'node:process'
|
||||
import {fileURLToPath} from 'node:url'
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url))
|
||||
const REQUEST_TIMEOUT_MS = 120_000
|
||||
const SUITE_TIMEOUT_MS = 1_200_000
|
||||
const TEST_ENV = {...process.env, BEADS_NO_DAEMON: 'true'}
|
||||
|
||||
function execTest(command, options = {}) {
|
||||
return execSync(command, {env: TEST_ENV, ...options})
|
||||
}
|
||||
|
||||
// MCP client for testing
|
||||
class McpTestClient {
|
||||
/** @type {import('node:child_process').ChildProcessWithoutNullStreams} */
|
||||
server
|
||||
|
||||
/** @param {import('node:child_process').ChildProcessWithoutNullStreams} serverProcess */
|
||||
constructor(serverProcess) {
|
||||
/** @type {import('node:child_process').ChildProcessWithoutNullStreams} */
|
||||
this.server = serverProcess
|
||||
/** @type {Array<any>} */
|
||||
this.responseQueue = []
|
||||
/** @type {Array<(response: any) => void>} */
|
||||
this.pendingResolvers = []
|
||||
this.requestId = 0
|
||||
|
||||
/** @type {import('node:child_process').ChildProcessWithoutNullStreams} */
|
||||
const server = this.server
|
||||
|
||||
// Buffer stdout for JSON-RPC responses
|
||||
let buffer = ''
|
||||
// noinspection JSUnresolvedReference
|
||||
server.stdout.on('data', (data) => {
|
||||
buffer += data.toString()
|
||||
const lines = buffer.split('\n')
|
||||
buffer = lines.pop() // Keep incomplete line in buffer
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue
|
||||
try {
|
||||
const response = JSON.parse(line)
|
||||
if (this.pendingResolvers.length > 0) {
|
||||
this.pendingResolvers.shift()(response)
|
||||
} else {
|
||||
this.responseQueue.push(response)
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore non-JSON output (stderr leaking to stdout, etc.)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async send(method, params = {}) {
|
||||
const id = ++this.requestId
|
||||
const request = {jsonrpc: '2.0', id, method, params}
|
||||
/** @type {import('node:child_process').ChildProcessWithoutNullStreams} */
|
||||
const server = this.server
|
||||
// noinspection JSUnresolvedReference
|
||||
server.stdin.write(JSON.stringify(request) + '\n')
|
||||
|
||||
// Wait for response with matching id
|
||||
return new Promise((resolve, reject) => {
|
||||
let settled = false
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (settled) return
|
||||
settled = true
|
||||
reject(new Error(`Timed out waiting for response to ${method}`))
|
||||
}, REQUEST_TIMEOUT_MS)
|
||||
|
||||
const finish = (response) => {
|
||||
if (settled) return
|
||||
settled = true
|
||||
clearTimeout(timeoutId)
|
||||
resolve(response)
|
||||
}
|
||||
|
||||
const checkQueue = () => {
|
||||
const idx = this.responseQueue.findIndex(r => r.id === id)
|
||||
if (idx >= 0) {
|
||||
finish(this.responseQueue.splice(idx, 1)[0])
|
||||
} else {
|
||||
this.pendingResolvers.push((response) => {
|
||||
if (response.id === id) {
|
||||
finish(response)
|
||||
} else {
|
||||
this.responseQueue.push(response)
|
||||
checkQueue()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
checkQueue()
|
||||
})
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
return this.send('initialize', {
|
||||
protocolVersion: '2024-11-05',
|
||||
clientInfo: {name: 'test-client', version: '1.0.0'},
|
||||
capabilities: {}
|
||||
})
|
||||
}
|
||||
|
||||
async callTool(name, args = {}) {
|
||||
const response = await this.send('tools/call', {name, arguments: args})
|
||||
if (response.error) {
|
||||
throw new Error(`MCP error: ${response.error.message}`)
|
||||
}
|
||||
// Parse tool result from content
|
||||
const content = response.result?.content?.[0]
|
||||
if (content?.type === 'text') {
|
||||
return JSON.parse(content.text)
|
||||
}
|
||||
return response.result
|
||||
}
|
||||
|
||||
close() {
|
||||
/** @type {import('node:child_process').ChildProcessWithoutNullStreams} */
|
||||
const server = this.server
|
||||
// noinspection JSUnresolvedReference
|
||||
server.stdin.end()
|
||||
// noinspection JSUnresolvedReference
|
||||
server.kill()
|
||||
}
|
||||
}
|
||||
|
||||
// Start MCP server in test directory
|
||||
function startServer(testDir) {
|
||||
/** @type {import('node:child_process').ChildProcessWithoutNullStreams} */
|
||||
const server = spawn('node', [join(__dirname, 'task-mcp.mjs')], {
|
||||
cwd: testDir, // Server runs in test dir so bd finds .beads/
|
||||
env: TEST_ENV,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
})
|
||||
return new McpTestClient(server)
|
||||
}
|
||||
|
||||
function createInProgressChildren(epicId, testDir) {
|
||||
const child1 = execTest(
|
||||
`bd create --title "Child 1" --type task --parent ${epicId} --silent`,
|
||||
{cwd: testDir, encoding: 'utf-8'}
|
||||
).trim()
|
||||
const child2 = execTest(
|
||||
`bd create --title "Child 2" --type task --parent ${epicId} --silent`,
|
||||
{cwd: testDir, encoding: 'utf-8'}
|
||||
).trim()
|
||||
execTest(`bd update ${child1} --status in_progress`, {cwd: testDir, stdio: 'pipe'})
|
||||
execTest(`bd update ${child2} --status in_progress`, {cwd: testDir, stdio: 'pipe'})
|
||||
return {child1, child2}
|
||||
}
|
||||
|
||||
describe('task MCP integration', {timeout: SUITE_TIMEOUT_MS}, () => {
|
||||
let testDir
|
||||
let client
|
||||
|
||||
before(async () => {
|
||||
// Create isolated test environment with git repo (required for bd)
|
||||
testDir = mkdtempSync(join(tmpdir(), 'task-mcp-test-'))
|
||||
execTest('git init', {cwd: testDir, stdio: 'pipe'})
|
||||
execTest('bd init --stealth', {cwd: testDir, stdio: 'pipe'})
|
||||
client = startServer(testDir)
|
||||
await client.initialize()
|
||||
})
|
||||
|
||||
after(() => {
|
||||
if (client) client.close()
|
||||
rmSync(testDir, {recursive: true, force: true})
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
// Close all in-progress issues to reset state
|
||||
try {
|
||||
/** @type {{id: string}[]} */
|
||||
const inProgress = JSON.parse(execTest('bd list --status in_progress --json', {cwd: testDir, encoding: 'utf-8'}))
|
||||
for (const issue of inProgress) {
|
||||
execTest(`bd close ${issue.id} --reason "test cleanup"`, {cwd: testDir, stdio: 'pipe'})
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors (e.g., no issues exist)
|
||||
}
|
||||
})
|
||||
|
||||
describe('task_status', () => {
|
||||
it('returns empty when no issues exist', async () => {
|
||||
const result = await client.callTool('task_status', {})
|
||||
strictEqual(result.kind, 'empty')
|
||||
strictEqual(result.next, 'await_user')
|
||||
strictEqual(result.message, 'No in-progress tasks found.')
|
||||
})
|
||||
|
||||
it('rejects user_request', async () => {
|
||||
const result = await client.callTool('task_status', {user_request: 'test task'})
|
||||
strictEqual(result.kind, 'error')
|
||||
strictEqual(result.message, 'task_status does not accept user_request; use task_start')
|
||||
})
|
||||
|
||||
it('omits memory and notes/comments by default', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Memory default'})
|
||||
|
||||
const status = await client.callTool('task_status', {id: epic.issue.id})
|
||||
strictEqual(status.kind, 'issue')
|
||||
strictEqual(status.memory, undefined)
|
||||
strictEqual(status.issue.notes, undefined)
|
||||
strictEqual(status.issue.comments, undefined)
|
||||
})
|
||||
|
||||
it('returns memory when memory_limit is set', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Memory status'})
|
||||
|
||||
await client.callTool('task_progress', {
|
||||
id: epic.issue.id,
|
||||
findings: ['F1', 'F2'],
|
||||
decisions: ['D1', 'D2']
|
||||
})
|
||||
|
||||
const status = await client.callTool('task_status', {id: epic.issue.id, memory_limit: 1})
|
||||
strictEqual(status.kind, 'issue')
|
||||
ok(status.memory)
|
||||
deepStrictEqual(status.memory.findings, ['F2'])
|
||||
deepStrictEqual(status.memory.decisions, ['D2'])
|
||||
strictEqual(status.memory.truncated, true)
|
||||
deepStrictEqual(status.memory.more, {findings: 1, decisions: 1})
|
||||
strictEqual(status.issue.notes, undefined)
|
||||
strictEqual(status.issue.comments, undefined)
|
||||
})
|
||||
|
||||
it('supports meta view with truncation', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Meta View'})
|
||||
|
||||
const status = await client.callTool('task_status', {
|
||||
id: epic.issue.id,
|
||||
view: 'meta',
|
||||
meta_max_chars: 10
|
||||
})
|
||||
|
||||
strictEqual(status.kind, 'issue')
|
||||
strictEqual(status.issue.type, 'epic')
|
||||
strictEqual(status.issue.issue_type, undefined)
|
||||
ok(status.issue.description.endsWith('...'))
|
||||
ok(status.issue.meta_truncated.includes('description'))
|
||||
strictEqual(status.issue.acceptance, 'PENDING')
|
||||
strictEqual(status.issue.design, 'PENDING')
|
||||
})
|
||||
|
||||
it('includes children for epic status view', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Epic with children status'})
|
||||
|
||||
await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Sub 1', description: 'First', acceptance: 'Done', design: 'Simple'},
|
||||
{title: 'Sub 2', description: 'Second', acceptance: 'Done', design: 'Simple'}
|
||||
]
|
||||
})
|
||||
|
||||
const status = await client.callTool('task_status', {id: epic.issue.id})
|
||||
strictEqual(status.kind, 'issue')
|
||||
ok(Array.isArray(status.issue.children))
|
||||
strictEqual(status.issue.children.length, 2)
|
||||
const titles = status.issue.children.map(child => child.title).sort()
|
||||
deepStrictEqual(titles, ['Sub 1', 'Sub 2'])
|
||||
ok(status.issue.children[0].id)
|
||||
ok(status.issue.children[0].status)
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_start', () => {
|
||||
it('creates epic when user_request provided and no in-progress issues', async () => {
|
||||
const result = await client.callTool('task_start', {user_request: 'start task'})
|
||||
strictEqual(result.kind, 'issue')
|
||||
const issue = result.issue
|
||||
ok(issue.id, 'should return id')
|
||||
ok(issue.title === 'start task')
|
||||
ok(issue.status === 'in_progress')
|
||||
ok(issue.is_new === true)
|
||||
strictEqual(issue.type, 'epic')
|
||||
strictEqual(issue.issue_type, undefined)
|
||||
})
|
||||
|
||||
it('uses provided meta when creating epic', async () => {
|
||||
const result = await client.callTool('task_start', {
|
||||
user_request: 'Meta Epic',
|
||||
description: 'Custom description',
|
||||
design: 'Custom design',
|
||||
acceptance: 'Custom acceptance',
|
||||
view: 'meta'
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'issue')
|
||||
strictEqual(result.issue.description, 'Custom description')
|
||||
strictEqual(result.issue.design, 'Custom design')
|
||||
strictEqual(result.issue.acceptance, 'Custom acceptance')
|
||||
})
|
||||
|
||||
it('omits memory by default', async () => {
|
||||
const result = await client.callTool('task_start', {user_request: 'No Memory'})
|
||||
strictEqual(result.kind, 'issue')
|
||||
strictEqual(result.memory, undefined)
|
||||
})
|
||||
|
||||
it('returns issue for explicit id', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Start by id'})
|
||||
|
||||
const result = await client.callTool('task_start', {id: epic.issue.id})
|
||||
strictEqual(result.kind, 'issue')
|
||||
const issue = result.issue
|
||||
ok(issue.id === epic.issue.id)
|
||||
ok(issue.status === 'in_progress')
|
||||
ok(issue.is_new === false)
|
||||
})
|
||||
|
||||
it('creates epic even when in-progress issues exist', async () => {
|
||||
await client.callTool('task_start', {user_request: 'Existing task'})
|
||||
|
||||
const result = await client.callTool('task_start', {user_request: 'New epic'})
|
||||
strictEqual(result.kind, 'issue')
|
||||
strictEqual(result.issue.is_new, true)
|
||||
strictEqual(result.issue.title, 'New epic')
|
||||
})
|
||||
|
||||
it('rejects calls without id or user_request', async () => {
|
||||
const result = await client.callTool('task_start', {})
|
||||
strictEqual(result.kind, 'error')
|
||||
strictEqual(result.message, 'task_start requires id or user_request')
|
||||
})
|
||||
|
||||
it('resumes epic with ready_children', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Resume with Children'})
|
||||
|
||||
await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Child 1', description: 'First', acceptance: 'Done', design: 'Simple'},
|
||||
{title: 'Child 2', description: 'Second', acceptance: 'Done', design: 'Simple'}
|
||||
]
|
||||
})
|
||||
|
||||
execTest(`bd close ${epic.issue.id} --reason "test"`, {cwd: testDir, stdio: 'pipe'})
|
||||
|
||||
const resumed = await client.callTool('task_start', {id: epic.issue.id})
|
||||
strictEqual(resumed.kind, 'issue')
|
||||
const issue = resumed.issue
|
||||
ok(issue.id === epic.issue.id)
|
||||
ok(issue.is_new === false)
|
||||
ok(issue.ready_children, 'should have ready_children')
|
||||
strictEqual(issue.ready_children.length, 2)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
describe('task_status with in-progress epic', () => {
|
||||
it('returns single in-progress issue', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'In Progress Epic'})
|
||||
|
||||
const status = await client.callTool('task_status', {})
|
||||
strictEqual(status.kind, 'summary')
|
||||
ok(Array.isArray(status.issues))
|
||||
strictEqual(status.issues.length, 1)
|
||||
strictEqual(status.issues[0].id, epic.issue.id)
|
||||
strictEqual(status.issues[0].status, 'in_progress')
|
||||
})
|
||||
|
||||
it('returns summary list when tasks share a single epic', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Parent Epic'})
|
||||
|
||||
createInProgressChildren(epic.issue.id, testDir)
|
||||
|
||||
const status = await client.callTool('task_status', {})
|
||||
strictEqual(status.kind, 'summary')
|
||||
ok(Array.isArray(status.issues))
|
||||
ok(status.issues.length >= 3)
|
||||
ok(status.issues.some(issue => issue.id === epic.issue.id))
|
||||
})
|
||||
|
||||
it('returns ready_children for epic with decomposed sub-issues', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Epic with children'})
|
||||
|
||||
await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Sub 1', description: 'First', acceptance: 'Done', design: 'Simple'},
|
||||
{title: 'Sub 2', description: 'Second', acceptance: 'Done', design: 'Simple'}
|
||||
]
|
||||
})
|
||||
|
||||
const status = await client.callTool('task_status', {})
|
||||
strictEqual(status.kind, 'summary')
|
||||
ok(Array.isArray(status.issues))
|
||||
const epicSummary = status.issues.find(issue => issue.id === epic.issue.id)
|
||||
ok(epicSummary.ready_children, 'should have ready_children')
|
||||
strictEqual(epicSummary.ready_children.length, 2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_decompose', () => {
|
||||
it('creates sub-issues under epic', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Decompose Test'})
|
||||
|
||||
const result = await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Sub 1', description: 'First', acceptance: 'Done', design: 'Simple'},
|
||||
{title: 'Sub 2', description: 'Second', acceptance: 'Done', design: 'Simple', depends_on: [0]}
|
||||
]
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'created')
|
||||
strictEqual(result.next, 'continue')
|
||||
strictEqual(result.ids.length, 2)
|
||||
strictEqual(result.epic_id, epic.issue.id)
|
||||
})
|
||||
|
||||
it('rejects sub-issues missing meta', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Decompose Missing Meta'})
|
||||
|
||||
const result = await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Sub 1', description: 'First', acceptance: 'Done'}
|
||||
]
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'error')
|
||||
ok(result.message.includes('missing required fields'))
|
||||
})
|
||||
|
||||
it('auto-starts a single child on create', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Decompose Auto Start Test'})
|
||||
|
||||
const result = await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Sub 1', description: 'First', acceptance: 'Done', design: 'Simple'}
|
||||
]
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'created')
|
||||
strictEqual(result.next, 'continue')
|
||||
ok(result.started_child_id, 'should return started_child_id')
|
||||
strictEqual(result.started_child_id, result.ids[0])
|
||||
|
||||
const inProgress = JSON.parse(execTest('bd list --status in_progress --json', {cwd: testDir, encoding: 'utf-8'}))
|
||||
ok(inProgress.some(issue => issue.id === result.started_child_id))
|
||||
})
|
||||
|
||||
it('accepts issue id dependencies for incremental graph updates', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Decompose Id Dependencies'})
|
||||
const dep = await client.callTool('task_create', {
|
||||
title: 'Dependency Task',
|
||||
description: 'Existing dependency',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done',
|
||||
parent: epic.issue.id
|
||||
})
|
||||
|
||||
const result = await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Sub 1', description: 'First', acceptance: 'Done', design: 'Simple', depends_on: [dep.id]}
|
||||
]
|
||||
})
|
||||
|
||||
const created = JSON.parse(execTest(`bd show ${result.ids[0]} --json`, {cwd: testDir, encoding: 'utf-8'}))[0]
|
||||
ok(created.dependencies.some(depEntry => depEntry.id === dep.id))
|
||||
})
|
||||
|
||||
it('accepts dep_type for sub-issue dependencies', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Decompose Dep Type'})
|
||||
const dep = await client.callTool('task_create', {
|
||||
title: 'Dep Type Task',
|
||||
description: 'Existing dependency',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done',
|
||||
parent: epic.issue.id
|
||||
})
|
||||
|
||||
const result = await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{
|
||||
title: 'Sub 1',
|
||||
description: 'First',
|
||||
acceptance: 'Done',
|
||||
design: 'Simple',
|
||||
depends_on: [dep.id],
|
||||
dep_type: 'blocks'
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const created = JSON.parse(execTest(`bd show ${result.ids[0]} --json`, {cwd: testDir, encoding: 'utf-8'}))[0]
|
||||
const dependency = created.dependencies.find(depEntry => depEntry.id === dep.id)
|
||||
ok(dependency)
|
||||
strictEqual(dependency['dependency_type'], 'blocks')
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_create', () => {
|
||||
it('requires description/design/acceptance', async () => {
|
||||
const result = await client.callTool('task_create', {title: 'Incomplete Task'})
|
||||
strictEqual(result.kind, 'error')
|
||||
ok(result.message.includes('Missing required fields'))
|
||||
})
|
||||
|
||||
it('creates a task with meta', async () => {
|
||||
const result = await client.callTool('task_create', {
|
||||
title: 'Complete Task',
|
||||
description: 'Do the thing',
|
||||
design: 'Straightforward steps',
|
||||
acceptance: 'Verify behavior X'
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'created')
|
||||
strictEqual(result.next, 'continue')
|
||||
ok(result.id, 'should return id')
|
||||
})
|
||||
|
||||
it('accepts depends_on arrays', async () => {
|
||||
const dep1 = await client.callTool('task_create', {
|
||||
title: 'Dep 1',
|
||||
description: 'First dep',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done'
|
||||
})
|
||||
const dep2 = await client.callTool('task_create', {
|
||||
title: 'Dep 2',
|
||||
description: 'Second dep',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done'
|
||||
})
|
||||
|
||||
const result = await client.callTool('task_create', {
|
||||
title: 'Task With Deps',
|
||||
description: 'Depends on others',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done',
|
||||
depends_on: [dep1.id, dep2.id]
|
||||
})
|
||||
|
||||
const created = JSON.parse(execTest(`bd show ${result.id} --json`, {cwd: testDir, encoding: 'utf-8'}))[0]
|
||||
const depIds = created.dependencies.map(depEntry => depEntry.id).sort()
|
||||
deepStrictEqual(depIds, [dep1.id, dep2.id].sort())
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_link', () => {
|
||||
it('adds dependencies between existing issues', async () => {
|
||||
const dep = await client.callTool('task_create', {
|
||||
title: 'Link Dep',
|
||||
description: 'Dependency',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done'
|
||||
})
|
||||
const target = await client.callTool('task_create', {
|
||||
title: 'Link Target',
|
||||
description: 'Target',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done'
|
||||
})
|
||||
|
||||
const result = await client.callTool('task_link', {id: target.id, depends_on: [dep.id]})
|
||||
strictEqual(result.kind, 'updated')
|
||||
strictEqual(result.id, target.id)
|
||||
deepStrictEqual(result.added_depends_on, [dep.id])
|
||||
|
||||
const linked = JSON.parse(execTest(`bd show ${target.id} --json`, {cwd: testDir, encoding: 'utf-8'}))[0]
|
||||
ok(linked.dependencies.some(depEntry => depEntry.id === dep.id))
|
||||
})
|
||||
|
||||
it('rejects empty depends_on', async () => {
|
||||
const target = await client.callTool('task_create', {
|
||||
title: 'Link Target Empty',
|
||||
description: 'Target',
|
||||
design: 'Simple',
|
||||
acceptance: 'Done'
|
||||
})
|
||||
|
||||
const result = await client.callTool('task_link', {id: target.id, depends_on: []})
|
||||
strictEqual(result.kind, 'error')
|
||||
ok(result.message.includes('depends_on'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_update_meta', () => {
|
||||
it('requires at least one field', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Meta Update Required'})
|
||||
|
||||
const result = await client.callTool('task_update_meta', {id: epic.issue.id})
|
||||
strictEqual(result.kind, 'error')
|
||||
ok(result.message.includes('At least one'))
|
||||
})
|
||||
|
||||
it('updates description/design/acceptance', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Meta Update'})
|
||||
|
||||
const updated = await client.callTool('task_update_meta', {
|
||||
id: epic.issue.id,
|
||||
description: 'Updated description',
|
||||
design: 'Updated design',
|
||||
acceptance: 'Updated acceptance',
|
||||
view: 'meta'
|
||||
})
|
||||
|
||||
strictEqual(updated.kind, 'issue')
|
||||
strictEqual(updated.issue.description, 'Updated description')
|
||||
strictEqual(updated.issue.design, 'Updated design')
|
||||
strictEqual(updated.issue.acceptance, 'Updated acceptance')
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_progress', () => {
|
||||
it('adds findings and decisions', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Progress Test'})
|
||||
|
||||
const result = await client.callTool('task_progress', {
|
||||
id: epic.issue.id,
|
||||
findings: ['Found pattern X'],
|
||||
decisions: ['Use approach Y'],
|
||||
memory_limit: 10
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'progress')
|
||||
deepStrictEqual(result.memory.findings, ['Found pattern X'])
|
||||
deepStrictEqual(result.memory.decisions, ['Use approach Y'])
|
||||
|
||||
const comments = JSON.parse(execTest(`bd comments ${epic.issue.id} --json`, {cwd: testDir, encoding: 'utf-8'}))
|
||||
ok(comments.some(comment => comment.text === 'FINDING: Found pattern X'))
|
||||
ok(comments.some(comment => comment.text === 'KEY DECISION: Use approach Y'))
|
||||
})
|
||||
|
||||
it('omits memory when memory_limit is 0', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Progress No Memory'})
|
||||
|
||||
const result = await client.callTool('task_progress', {
|
||||
id: epic.issue.id,
|
||||
findings: ['F1'],
|
||||
decisions: ['D1'],
|
||||
memory_limit: 0
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'progress')
|
||||
strictEqual(result.memory, undefined)
|
||||
})
|
||||
|
||||
it('truncates memory when memory_limit is 1', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Progress Trim Memory'})
|
||||
|
||||
const result = await client.callTool('task_progress', {
|
||||
id: epic.issue.id,
|
||||
findings: ['F1', 'F2'],
|
||||
decisions: ['D1', 'D2'],
|
||||
memory_limit: 1
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'progress')
|
||||
ok(result.memory)
|
||||
deepStrictEqual(result.memory.findings, ['F2'])
|
||||
deepStrictEqual(result.memory.decisions, ['D2'])
|
||||
strictEqual(result.memory.truncated, true)
|
||||
deepStrictEqual(result.memory.more, {findings: 1, decisions: 1})
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_done', () => {
|
||||
it('closes epic directly', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Close Test'})
|
||||
|
||||
const result = await client.callTool('task_done', {
|
||||
id: epic.issue.id,
|
||||
reason: 'Completed successfully'
|
||||
})
|
||||
|
||||
strictEqual(result.kind, 'closed')
|
||||
strictEqual(result.closed, epic.issue.id)
|
||||
})
|
||||
|
||||
it('auto-closes epic when all children are closed', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Auto-close Epic'})
|
||||
const decompose = await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Child 1', description: 'Child one', design: 'Simple', acceptance: 'Done'},
|
||||
{title: 'Child 2', description: 'Child two', design: 'Simple', acceptance: 'Done'}
|
||||
]
|
||||
})
|
||||
|
||||
await client.callTool('task_done', {id: decompose.ids[0], reason: 'Done'})
|
||||
let epicState = JSON.parse(execTest(`bd show ${epic.issue.id} --json`, {cwd: testDir, encoding: 'utf-8'}))[0]
|
||||
ok(epicState.status !== 'closed')
|
||||
|
||||
await client.callTool('task_done', {id: decompose.ids[1], reason: 'Done'})
|
||||
epicState = JSON.parse(execTest(`bd show ${epic.issue.id} --json`, {cwd: testDir, encoding: 'utf-8'}))[0]
|
||||
strictEqual(epicState.status, 'closed')
|
||||
strictEqual(epicState['close_reason'], 'Auto-closed: all child issues closed')
|
||||
})
|
||||
|
||||
it('does not auto-close pinned epics', async () => {
|
||||
const epic = await client.callTool('task_start', {user_request: 'Pinned Epic'})
|
||||
const decompose = await client.callTool('task_decompose', {
|
||||
epic_id: epic.issue.id,
|
||||
sub_issues: [
|
||||
{title: 'Child 1', description: 'Child one', design: 'Simple', acceptance: 'Done'}
|
||||
]
|
||||
})
|
||||
|
||||
execTest(`bd update ${epic.issue.id} --status pinned`, {cwd: testDir, stdio: 'pipe'})
|
||||
|
||||
await client.callTool('task_done', {id: decompose.ids[0], reason: 'Done'})
|
||||
const epicState = JSON.parse(execTest(`bd show ${epic.issue.id} --json`, {cwd: testDir, encoding: 'utf-8'}))[0]
|
||||
ok(['pinned', 'hooked'].includes(epicState.status))
|
||||
})
|
||||
|
||||
it('closes low-priority task directly', async () => {
|
||||
// Create task via bd directly (not epic, lower priority)
|
||||
const id = execTest(
|
||||
'bd create --title "Quick Task" --type task --priority P3 --silent',
|
||||
{cwd: testDir, encoding: 'utf-8'}
|
||||
).trim()
|
||||
execTest(`bd update ${id} --status in_progress`, {cwd: testDir, stdio: 'pipe'})
|
||||
|
||||
const result = await client.callTool('task_done', {
|
||||
id,
|
||||
reason: 'Done'
|
||||
})
|
||||
|
||||
// P3 task should close without review
|
||||
strictEqual(result.kind, 'closed')
|
||||
strictEqual(result.closed, id)
|
||||
})
|
||||
})
|
||||
|
||||
describe('task_reopen', () => {
|
||||
it('reopens closed issue with reason', async () => {
|
||||
const id = execTest(
|
||||
'bd create --title "Reopen Me" --type task --priority P3 --silent',
|
||||
{cwd: testDir, encoding: 'utf-8'}
|
||||
).trim()
|
||||
execTest(`bd close ${id} --reason "done"`, {cwd: testDir, stdio: 'pipe'})
|
||||
|
||||
const result = await client.callTool('task_reopen', {id, reason: 'Regression found'})
|
||||
strictEqual(result.kind, 'issue')
|
||||
strictEqual(result.issue.id, id)
|
||||
strictEqual(result.issue.status, 'open')
|
||||
})
|
||||
|
||||
it('requires reason', async () => {
|
||||
const id = execTest(
|
||||
'bd create --title "Need Reason" --type task --priority P3 --silent',
|
||||
{cwd: testDir, encoding: 'utf-8'}
|
||||
).trim()
|
||||
execTest(`bd close ${id} --reason "done"`, {cwd: testDir, stdio: 'pipe'})
|
||||
|
||||
const result = await client.callTool('task_reopen', {id})
|
||||
strictEqual(result.kind, 'error')
|
||||
ok(result.message.includes('reason required'))
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -1,40 +0,0 @@
|
||||
// Copyright 2000-2026 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
|
||||
|
||||
export function buildError(message) {
|
||||
return {kind: 'error', next: 'await_user', message}
|
||||
}
|
||||
|
||||
export function buildEmpty(message = 'No in-progress tasks found.', next = 'await_user') {
|
||||
return {kind: 'empty', next, message}
|
||||
}
|
||||
|
||||
export function buildIssue(issue, {next = 'continue', memory} = {}) {
|
||||
const response = {kind: 'issue', next, issue}
|
||||
if (memory) response.memory = memory
|
||||
return response
|
||||
}
|
||||
|
||||
export function buildSummary(issues, {next = 'await_user'} = {}) {
|
||||
const normalizedIssues = Array.isArray(issues) ? issues : (issues ? [issues] : [])
|
||||
return {kind: 'summary', next, issues: normalizedIssues}
|
||||
}
|
||||
|
||||
export function buildProgress({memory, status, next = 'await_user'}) {
|
||||
const response = {kind: 'progress', next, status}
|
||||
if (memory) response.memory = memory
|
||||
return response
|
||||
}
|
||||
|
||||
export function buildCreated(payload, next = 'continue') {
|
||||
return {kind: 'created', next, ...payload}
|
||||
}
|
||||
|
||||
export function buildUpdated(payload, next = 'continue') {
|
||||
return {kind: 'updated', next, ...payload}
|
||||
}
|
||||
|
||||
export function buildClosed(payload, nextOverride) {
|
||||
const next = nextOverride ?? (payload.next_ready ? 'start_next_ready' : 'await_user')
|
||||
return {kind: 'closed', next, ...payload}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user