opencode consult skill

This commit is contained in:
David Chen 2026-01-17 14:01:57 -08:00
parent d418e5050d
commit d56d306d1f
3 changed files with 61 additions and 0 deletions

View file

@ -0,0 +1,15 @@
---
description: Get a second opinion from another model
---
The user wants a second opinion from another model.
1. Review our conversation and identify ALL relevant context for the question below
2. Prepare a comprehensive context summary including:
- What we've been working on
- Key decisions or options being considered
- Any tradeoffs or concerns discussed
- Relevant code snippets if applicable
3. Use the `consult` tool with your context summary and the user's question
User's question: $ARGUMENTS

3
opencode/consult.json Normal file
View file

@ -0,0 +1,3 @@
{
"model": "google/gemini-3-pro-preview"
}

43
opencode/tool/consult.ts Normal file
View file

@ -0,0 +1,43 @@
import { tool } from "@opencode-ai/plugin"
import { readFileSync } from "fs"
import { homedir } from "os"
import { join } from "path"
export default tool({
description:
"Get a second opinion from another model. Provide the full context and your question. The other model will analyze and respond with a slightly critical perspective.",
args: {
context: tool.schema
.string()
.describe(
"Full context: conversation summary, code snippets, options being considered, tradeoffs, etc."
),
question: tool.schema
.string()
.describe("What you want the other model to weigh in on"),
},
async execute(args) {
let model = "google/gemini-3-pro-preview"
try {
const configPath = join(homedir(), ".config/opencode/consult.json")
const config = JSON.parse(readFileSync(configPath, "utf-8"))
if (config.model) {
model = config.model
}
} catch {}
const prompt = `You are providing a second opinion with a slightly critical eye. Review this context and help with the question. Don't just agree - look for potential issues, edge cases, or alternative approaches that may have been missed.
## Context
${args.context}
## Question
${args.question}
Provide your analysis and recommendation.`
const result =
await Bun.$`echo ${prompt} | opencode run -m ${model}`.text()
return result
},
})