add tool use, update system prompt
This commit is contained in:
@@ -1,4 +1,6 @@
|
||||
import { TFile } from 'obsidian'
|
||||
import { TFile } from 'obsidian';
|
||||
|
||||
import { SearchAndReplaceToolArgs } from '../types/apply';
|
||||
|
||||
/**
|
||||
* Applies changes to a file by replacing content within specified line range
|
||||
@@ -9,36 +11,95 @@ import { TFile } from 'obsidian'
|
||||
* @param endLine - Ending line number (1-based indexing, optional)
|
||||
* @returns Promise resolving to the modified content or null if operation fails
|
||||
*/
|
||||
export const manualApplyChangesToFile = async (
|
||||
content: string,
|
||||
currentFile: TFile,
|
||||
currentFileContent: string,
|
||||
startLine?: number,
|
||||
endLine?: number,
|
||||
export const ApplyEditToFile = async (
|
||||
currentFile: TFile,
|
||||
currentFileContent: string,
|
||||
content: string,
|
||||
startLine?: number,
|
||||
endLine?: number,
|
||||
): Promise<string | null> => {
|
||||
try {
|
||||
// Input validation
|
||||
if (!content || !currentFileContent) {
|
||||
throw new Error('Content cannot be empty')
|
||||
}
|
||||
try {
|
||||
// 如果文件为空,直接返回新内容
|
||||
if (!currentFileContent || currentFileContent.trim() === '') {
|
||||
return content;
|
||||
}
|
||||
|
||||
const lines = currentFileContent.split('\n')
|
||||
const effectiveStartLine = Math.max(1, startLine ?? 1)
|
||||
const effectiveEndLine = Math.min(endLine ?? lines.length, lines.length)
|
||||
// 如果要清空文件,直接返回空字符串
|
||||
if (content === '') {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Validate line numbers
|
||||
if (effectiveStartLine > effectiveEndLine) {
|
||||
throw new Error('Start line cannot be greater than end line')
|
||||
}
|
||||
const lines = currentFileContent.split('\n')
|
||||
const effectiveStartLine = Math.max(1, startLine ?? 1)
|
||||
const effectiveEndLine = Math.min(endLine ?? lines.length, lines.length)
|
||||
|
||||
// Construct new content
|
||||
return [
|
||||
...lines.slice(0, effectiveStartLine - 1),
|
||||
content,
|
||||
...lines.slice(effectiveEndLine)
|
||||
].join('\n')
|
||||
} catch (error) {
|
||||
console.error('Error applying changes:', error instanceof Error ? error.message : 'Unknown error')
|
||||
return null
|
||||
}
|
||||
// Validate line numbers
|
||||
if (effectiveStartLine > effectiveEndLine) {
|
||||
throw new Error('Start line cannot be greater than end line')
|
||||
}
|
||||
|
||||
// Construct new content
|
||||
return [
|
||||
...lines.slice(0, effectiveStartLine - 1),
|
||||
content,
|
||||
...lines.slice(effectiveEndLine)
|
||||
].join('\n')
|
||||
} catch (error) {
|
||||
console.error('Error applying changes:', error instanceof Error ? error.message : 'Unknown error')
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function escapeRegExp(string: string): string {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
|
||||
}
|
||||
|
||||
/**
|
||||
* 搜索和替换文件内容
|
||||
* @param currentFile - 当前文件
|
||||
* @param currentFileContent - 当前文件内容
|
||||
* @param search - 搜索内容
|
||||
* @param replace - 替换内容
|
||||
*/
|
||||
export const SearchAndReplace = async (
|
||||
currentFile: TFile,
|
||||
currentFileContent: string,
|
||||
operations: SearchAndReplaceToolArgs['operations']
|
||||
) => {
|
||||
let lines = currentFileContent.split("\n")
|
||||
|
||||
for (const op of operations) {
|
||||
const flags = op.regexFlags ?? (op.ignoreCase ? "gi" : "g")
|
||||
const multilineFlags = flags.includes("m") ? flags : flags + "m"
|
||||
|
||||
const searchPattern = op.useRegex
|
||||
? new RegExp(op.search, multilineFlags)
|
||||
: new RegExp(escapeRegExp(op.search), multilineFlags)
|
||||
|
||||
if (op.startLine || op.endLine) {
|
||||
const startLine = Math.max((op.startLine ?? 1) - 1, 0)
|
||||
const endLine = Math.min((op.endLine ?? lines.length) - 1, lines.length - 1)
|
||||
|
||||
// Get the content before and after the target section
|
||||
const beforeLines = lines.slice(0, startLine)
|
||||
const afterLines = lines.slice(endLine + 1)
|
||||
|
||||
// Get the target section and perform replacement
|
||||
const targetContent = lines.slice(startLine, endLine + 1).join("\n")
|
||||
const modifiedContent = targetContent.replace(searchPattern, op.replace)
|
||||
const modifiedLines = modifiedContent.split("\n")
|
||||
|
||||
// Reconstruct the full content with the modified section
|
||||
lines = [...beforeLines, ...modifiedLines, ...afterLines]
|
||||
} else {
|
||||
// Global replacement
|
||||
const fullContent = lines.join("\n")
|
||||
const modifiedContent = fullContent.replace(searchPattern, op.replace)
|
||||
lines = modifiedContent.split("\n")
|
||||
}
|
||||
}
|
||||
|
||||
const newContent = lines.join("\n")
|
||||
return newContent;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { minimatch } from 'minimatch'
|
||||
import { Vault } from 'obsidian'
|
||||
import { TFile, TFolder, Vault } from 'obsidian'
|
||||
|
||||
export const findFilesMatchingPatterns = async (
|
||||
patterns: string[],
|
||||
@@ -10,3 +10,24 @@ export const findFilesMatchingPatterns = async (
|
||||
return patterns.some((pattern) => minimatch(file.path, pattern))
|
||||
})
|
||||
}
|
||||
|
||||
export const listFilesAndFolders = async (vault: Vault, path: string) => {
|
||||
const folder = vault.getAbstractFileByPath(path)
|
||||
const childrenFiles: string[] = []
|
||||
const childrenFolders: string[] = []
|
||||
if (folder instanceof TFolder) {
|
||||
folder.children.forEach((child) => {
|
||||
if (child instanceof TFile) {
|
||||
childrenFiles.push(child.path)
|
||||
} else if (child instanceof TFolder) {
|
||||
childrenFolders.push(child.path + "/")
|
||||
}
|
||||
})
|
||||
return [...childrenFolders, ...childrenFiles]
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
export const regexSearchFiles = async (vault: Vault, path: string, regex: string, file_pattern: string) => {
|
||||
|
||||
}
|
||||
|
||||
312
src/utils/modes.ts
Normal file
312
src/utils/modes.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import { addCustomInstructions } from "../core/prompts/sections/custom-instructions"
|
||||
|
||||
import { ALWAYS_AVAILABLE_TOOLS, TOOL_GROUPS, ToolGroup } from "./tool-groups"
|
||||
|
||||
// Mode types
|
||||
export type Mode = string
|
||||
|
||||
// Group options type
|
||||
export type GroupOptions = {
|
||||
fileRegex?: string // Regular expression pattern
|
||||
description?: string // Human-readable description of the pattern
|
||||
}
|
||||
|
||||
// Group entry can be either a string or tuple with options
|
||||
export type GroupEntry = ToolGroup | readonly [ToolGroup, GroupOptions]
|
||||
|
||||
// Mode configuration type
|
||||
export type ModeConfig = {
|
||||
slug: string
|
||||
name: string
|
||||
roleDefinition: string
|
||||
customInstructions?: string
|
||||
groups: readonly GroupEntry[] // Now supports both simple strings and tuples with options
|
||||
source?: "global" | "project" // Where this mode was loaded from
|
||||
}
|
||||
|
||||
// Mode-specific prompts only
|
||||
export type PromptComponent = {
|
||||
roleDefinition?: string
|
||||
customInstructions?: string
|
||||
}
|
||||
|
||||
export type CustomModePrompts = {
|
||||
[key: string]: PromptComponent | undefined
|
||||
}
|
||||
|
||||
// Helper to extract group name regardless of format
|
||||
export function getGroupName(group: GroupEntry): ToolGroup {
|
||||
if (typeof group === "string") {
|
||||
return group
|
||||
}
|
||||
|
||||
return group[0]
|
||||
}
|
||||
|
||||
// Helper to get group options if they exist
|
||||
function getGroupOptions(group: GroupEntry): GroupOptions | undefined {
|
||||
return Array.isArray(group) ? group[1] : undefined
|
||||
}
|
||||
|
||||
// Helper to check if a file path matches a regex pattern
|
||||
export function doesFileMatchRegex(filePath: string, pattern: string): boolean {
|
||||
try {
|
||||
const regex = new RegExp(pattern)
|
||||
return regex.test(filePath)
|
||||
} catch (error) {
|
||||
console.error(`Invalid regex pattern: ${pattern}`, error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to get all tools for a mode
|
||||
export function getToolsForMode(groups: readonly GroupEntry[]): string[] {
|
||||
const tools = new Set<string>()
|
||||
|
||||
// Add tools from each group
|
||||
groups.forEach((group) => {
|
||||
const groupName = getGroupName(group)
|
||||
const groupConfig = TOOL_GROUPS[groupName]
|
||||
groupConfig.tools.forEach((tool: string) => tools.add(tool))
|
||||
})
|
||||
|
||||
// Always add required tools
|
||||
ALWAYS_AVAILABLE_TOOLS.forEach((tool) => tools.add(tool))
|
||||
|
||||
return Array.from(tools)
|
||||
}
|
||||
|
||||
// Main modes configuration as an ordered array
|
||||
export const modes: readonly ModeConfig[] = [
|
||||
{
|
||||
slug: "write",
|
||||
name: "Write",
|
||||
roleDefinition:
|
||||
"You are Infio, a versatile content creator skilled in composing, editing, and organizing various text-based documents. You excel at structuring information clearly, creating well-formatted content, and helping users express their ideas effectively.",
|
||||
groups: ["read", "edit"],
|
||||
customInstructions:
|
||||
"You can create and modify any text-based files, with particular expertise in Markdown formatting. Help users organize their thoughts, create documentation, take notes, or draft any written content they need. When appropriate, suggest structural improvements and formatting enhancements that make content more readable and accessible. Consider the purpose and audience of each document to provide the most relevant assistance."
|
||||
},
|
||||
{
|
||||
slug: "ask",
|
||||
name: "Ask",
|
||||
roleDefinition:
|
||||
"You are Infio, a versatile assistant dedicated to providing informative responses, thoughtful explanations, and practical guidance on virtually any topic or challenge you face.",
|
||||
groups: ["read"],
|
||||
customInstructions:
|
||||
"You can analyze information, explain concepts across various domains, and access external resources when helpful. Make sure to address the user's questions thoroughly with thoughtful explanations and practical guidance. Use visual aids like Mermaid diagrams when they help make complex topics clearer. Offer solutions to challenges from diverse fields, not just technical ones, and provide context that helps users better understand the subject matter.",
|
||||
},
|
||||
] as const
|
||||
|
||||
// Export the default mode slug
|
||||
export const defaultModeSlug = modes[0].slug
|
||||
|
||||
// Helper functions
|
||||
export function getModeBySlug(slug: string, customModes?: ModeConfig[]): ModeConfig | undefined {
|
||||
// Check custom modes first
|
||||
const customMode = customModes?.find((mode) => mode.slug === slug)
|
||||
if (customMode) {
|
||||
return customMode
|
||||
}
|
||||
// Then check built-in modes
|
||||
return modes.find((mode) => mode.slug === slug)
|
||||
}
|
||||
|
||||
export function getModeConfig(slug: string, customModes?: ModeConfig[]): ModeConfig {
|
||||
const mode = getModeBySlug(slug, customModes)
|
||||
if (!mode) {
|
||||
throw new Error(`No mode found for slug: ${slug}`)
|
||||
}
|
||||
return mode
|
||||
}
|
||||
|
||||
// Get all available modes, with custom modes overriding built-in modes
|
||||
export function getAllModes(customModes?: ModeConfig[]): ModeConfig[] {
|
||||
if (!customModes?.length) {
|
||||
return [...modes]
|
||||
}
|
||||
|
||||
// Start with built-in modes
|
||||
const allModes = [...modes]
|
||||
|
||||
// Process custom modes
|
||||
customModes.forEach((customMode) => {
|
||||
const index = allModes.findIndex((mode) => mode.slug === customMode.slug)
|
||||
if (index !== -1) {
|
||||
// Override existing mode
|
||||
allModes[index] = customMode
|
||||
} else {
|
||||
// Add new mode
|
||||
allModes.push(customMode)
|
||||
}
|
||||
})
|
||||
|
||||
return allModes
|
||||
}
|
||||
|
||||
// Check if a mode is custom or an override
|
||||
export function isCustomMode(slug: string, customModes?: ModeConfig[]): boolean {
|
||||
return !!customModes?.some((mode) => mode.slug === slug)
|
||||
}
|
||||
|
||||
// Custom error class for file restrictions
|
||||
export class FileRestrictionError extends Error {
|
||||
constructor(mode: string, pattern: string, description: string | undefined, filePath: string) {
|
||||
super(
|
||||
`This mode (${mode}) can only edit files matching pattern: ${pattern}${description ? ` (${description})` : ""}. Got: ${filePath}`,
|
||||
)
|
||||
this.name = "FileRestrictionError"
|
||||
}
|
||||
}
|
||||
|
||||
export function isToolAllowedForMode(
|
||||
tool: string,
|
||||
modeSlug: string,
|
||||
customModes: ModeConfig[],
|
||||
toolRequirements?: Record<string, boolean>,
|
||||
toolParams?: Record<string, any>, // All tool parameters
|
||||
experiments?: Record<string, boolean>,
|
||||
): boolean {
|
||||
// Always allow these tools
|
||||
if (ALWAYS_AVAILABLE_TOOLS.includes(tool as any)) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (experiments && tool in experiments) {
|
||||
if (!experiments[tool]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Check tool requirements if any exist
|
||||
if (toolRequirements && tool in toolRequirements) {
|
||||
if (!toolRequirements[tool]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const mode = getModeBySlug(modeSlug, customModes)
|
||||
if (!mode) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if tool is in any of the mode's groups and respects any group options
|
||||
for (const group of mode.groups) {
|
||||
const groupName = getGroupName(group)
|
||||
const options = getGroupOptions(group)
|
||||
|
||||
const groupConfig = TOOL_GROUPS[groupName]
|
||||
|
||||
// If the tool isn't in this group's tools, continue to next group
|
||||
if (!groupConfig.tools.includes(tool)) {
|
||||
continue
|
||||
}
|
||||
|
||||
// If there are no options, allow the tool
|
||||
if (!options) {
|
||||
return true
|
||||
}
|
||||
|
||||
// For the edit group, check file regex if specified
|
||||
if (groupName === "edit" && options.fileRegex) {
|
||||
const filePath = toolParams?.path
|
||||
if (
|
||||
filePath &&
|
||||
(toolParams.diff || toolParams.content || toolParams.operations) &&
|
||||
!doesFileMatchRegex(filePath, options.fileRegex)
|
||||
) {
|
||||
throw new FileRestrictionError(mode.name, options.fileRegex, options.description, filePath)
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Create the mode-specific default prompts
|
||||
export const defaultPrompts: Readonly<CustomModePrompts> = Object.freeze(
|
||||
Object.fromEntries(
|
||||
modes.map((mode) => [
|
||||
mode.slug,
|
||||
{
|
||||
roleDefinition: mode.roleDefinition,
|
||||
customInstructions: mode.customInstructions,
|
||||
},
|
||||
]),
|
||||
),
|
||||
)
|
||||
|
||||
// Helper function to get all modes with their prompt overrides from extension state
|
||||
export async function getAllModesWithPrompts(context: vscode.ExtensionContext): Promise<ModeConfig[]> {
|
||||
const customModes = (await context.globalState.get<ModeConfig[]>("customModes")) || []
|
||||
const customModePrompts = (await context.globalState.get<CustomModePrompts>("customModePrompts")) || {}
|
||||
|
||||
const allModes = getAllModes(customModes)
|
||||
return allModes.map((mode) => ({
|
||||
...mode,
|
||||
roleDefinition: customModePrompts[mode.slug]?.roleDefinition ?? mode.roleDefinition,
|
||||
customInstructions: customModePrompts[mode.slug]?.customInstructions ?? mode.customInstructions,
|
||||
}))
|
||||
}
|
||||
|
||||
// Helper function to get complete mode details with all overrides
|
||||
export async function getFullModeDetails(
|
||||
modeSlug: string,
|
||||
customModes?: ModeConfig[],
|
||||
customModePrompts?: CustomModePrompts,
|
||||
options?: {
|
||||
cwd?: string
|
||||
globalCustomInstructions?: string
|
||||
preferredLanguage?: string
|
||||
},
|
||||
): Promise<ModeConfig> {
|
||||
// First get the base mode config from custom modes or built-in modes
|
||||
const baseMode = getModeBySlug(modeSlug, customModes) || modes.find((m) => m.slug === modeSlug) || modes[0]
|
||||
|
||||
// Check for any prompt component overrides
|
||||
const promptComponent = customModePrompts?.[modeSlug]
|
||||
|
||||
// Get the base custom instructions
|
||||
const baseCustomInstructions = promptComponent?.customInstructions || baseMode.customInstructions || ""
|
||||
|
||||
// If we have cwd, load and combine all custom instructions
|
||||
let fullCustomInstructions = baseCustomInstructions
|
||||
if (options?.cwd) {
|
||||
fullCustomInstructions = await addCustomInstructions(
|
||||
baseCustomInstructions,
|
||||
options.globalCustomInstructions || "",
|
||||
options.cwd,
|
||||
modeSlug,
|
||||
{ preferredLanguage: options.preferredLanguage },
|
||||
)
|
||||
}
|
||||
|
||||
// Return mode with any overrides applied
|
||||
return {
|
||||
...baseMode,
|
||||
roleDefinition: promptComponent?.roleDefinition || baseMode.roleDefinition,
|
||||
customInstructions: fullCustomInstructions,
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to safely get role definition
|
||||
export function getRoleDefinition(modeSlug: string, customModes?: ModeConfig[]): string {
|
||||
const mode = getModeBySlug(modeSlug, customModes)
|
||||
if (!mode) {
|
||||
console.warn(`No mode found for slug: ${modeSlug}`)
|
||||
return ""
|
||||
}
|
||||
return mode.roleDefinition
|
||||
}
|
||||
|
||||
// Helper function to safely get custom instructions
|
||||
export function getCustomInstructions(modeSlug: string, customModes?: ModeConfig[]): string {
|
||||
const mode = getModeBySlug(modeSlug, customModes)
|
||||
if (!mode) {
|
||||
console.warn(`No mode found for slug: ${modeSlug}`)
|
||||
return ""
|
||||
}
|
||||
return mode.customInstructions ?? ""
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { InfioBlockAction, ParsedInfioBlock, parseinfioBlocks } from './parse-infio-block'
|
||||
import { InfioBlockAction, ParsedMsgBlock, parseMsgBlocks } from './parse-infio-block'
|
||||
|
||||
describe('parseinfioBlocks', () => {
|
||||
it('should parse a string with infio_block elements', () => {
|
||||
@@ -22,7 +22,7 @@ print("Hello, world!")
|
||||
</infio_block>
|
||||
Some text after`
|
||||
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: 'Some text before\n' },
|
||||
{
|
||||
type: 'infio_block',
|
||||
@@ -49,7 +49,7 @@ print("Hello, world!")
|
||||
{ type: 'string', content: '\nSome text after' },
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
@@ -58,7 +58,7 @@ print("Hello, world!")
|
||||
<infio_block language="python"></infio_block>
|
||||
`
|
||||
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: '\n ' },
|
||||
{
|
||||
type: 'infio_block',
|
||||
@@ -69,16 +69,16 @@ print("Hello, world!")
|
||||
{ type: 'string', content: '\n ' },
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
it('should handle input without infio_block elements', () => {
|
||||
const input = 'Just a regular string without any infio_block elements.'
|
||||
|
||||
const expected: ParsedInfioBlock[] = [{ type: 'string', content: input }]
|
||||
const expected: ParsedMsgBlock[] = [{ type: 'string', content: input }]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
@@ -100,7 +100,7 @@ print("Hello, world!")
|
||||
</infio_block>
|
||||
End`
|
||||
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: 'Start\n' },
|
||||
{
|
||||
type: 'infio_block',
|
||||
@@ -129,7 +129,7 @@ print("Hello, world!")
|
||||
{ type: 'string', content: '\nEnd' },
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
@@ -139,7 +139,7 @@ print("Hello, world!")
|
||||
# Unfinished infio_block
|
||||
|
||||
Some text after without closing tag`
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: 'Start\n' },
|
||||
{
|
||||
type: 'infio_block',
|
||||
@@ -152,13 +152,13 @@ Some text after without closing tag`,
|
||||
},
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
it('should handle infio_block with startline and endline attributes', () => {
|
||||
const input = `<infio_block language="markdown" startline="2" endline="5"></infio_block>`
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{
|
||||
type: 'infio_block',
|
||||
content: '',
|
||||
@@ -168,13 +168,13 @@ Some text after without closing tag`,
|
||||
},
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
it('should parse infio_block with action attribute', () => {
|
||||
const input = `<infio_block type="edit"></infio_block>`
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{
|
||||
type: 'infio_block',
|
||||
content: '',
|
||||
@@ -182,13 +182,13 @@ Some text after without closing tag`,
|
||||
},
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
it('should handle invalid action attribute', () => {
|
||||
const input = `<infio_block type="invalid"></infio_block>`
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{
|
||||
type: 'infio_block',
|
||||
content: '',
|
||||
@@ -196,7 +196,7 @@ Some text after without closing tag`,
|
||||
},
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
@@ -208,7 +208,7 @@ It might contain multiple lines of text.
|
||||
</think>
|
||||
Some text after`
|
||||
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: 'Some text before\n' },
|
||||
{
|
||||
type: 'think',
|
||||
@@ -220,7 +220,7 @@ It might contain multiple lines of text.
|
||||
{ type: 'string', content: '\nSome text after' },
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
@@ -229,7 +229,7 @@ It might contain multiple lines of text.
|
||||
<think></think>
|
||||
`
|
||||
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: '\n ' },
|
||||
{
|
||||
type: 'think',
|
||||
@@ -238,7 +238,7 @@ It might contain multiple lines of text.
|
||||
{ type: 'string', content: '\n ' },
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
@@ -255,7 +255,7 @@ I need to consider several approaches.
|
||||
</think>
|
||||
End`
|
||||
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: 'Start\n' },
|
||||
{
|
||||
type: 'infio_block',
|
||||
@@ -277,7 +277,7 @@ I need to consider several approaches.
|
||||
{ type: 'string', content: '\nEnd' },
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
|
||||
@@ -286,7 +286,7 @@ I need to consider several approaches.
|
||||
<think>
|
||||
Some unfinished thought
|
||||
without closing tag`
|
||||
const expected: ParsedInfioBlock[] = [
|
||||
const expected: ParsedMsgBlock[] = [
|
||||
{ type: 'string', content: 'Start\n' },
|
||||
{
|
||||
type: 'think',
|
||||
@@ -296,7 +296,7 @@ without closing tag`,
|
||||
},
|
||||
]
|
||||
|
||||
const result = parseinfioBlocks(input)
|
||||
const result = parseMsgBlocks(input)
|
||||
expect(result).toEqual(expected)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,128 +1,433 @@
|
||||
import JSON5 from 'json5'
|
||||
import { parseFragment } from 'parse5'
|
||||
|
||||
export enum InfioBlockAction {
|
||||
Edit = 'edit',
|
||||
New = 'new',
|
||||
Reference = 'reference'
|
||||
}
|
||||
|
||||
export type ParsedInfioBlock =
|
||||
| { type: 'string'; content: string }
|
||||
export type ParsedMsgBlock =
|
||||
| {
|
||||
type: 'infio_block'
|
||||
type: 'string'
|
||||
content: string
|
||||
language?: string
|
||||
filename?: string
|
||||
startLine?: number
|
||||
endLine?: number
|
||||
action?: InfioBlockAction
|
||||
}
|
||||
| { type: 'think'; content: string }
|
||||
|
||||
function isInfioBlockAction(value: string): value is InfioBlockAction {
|
||||
return Object.values<string>(InfioBlockAction).includes(value)
|
||||
}
|
||||
|
||||
export function parseinfioBlocks(input: string): ParsedInfioBlock[] {
|
||||
const parsedResult: ParsedInfioBlock[] = []
|
||||
const fragment = parseFragment(input, {
|
||||
sourceCodeLocationInfo: true,
|
||||
})
|
||||
let lastEndOffset = 0
|
||||
for (const node of fragment.childNodes) {
|
||||
if (node.nodeName === 'infio_block') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
|
||||
const language = node.attrs.find((attr) => attr.name === 'language')?.value
|
||||
const filename = node.attrs.find((attr) => attr.name === 'filename')?.value
|
||||
const startLine = node.attrs.find((attr) => attr.name === 'startline')?.value
|
||||
const endLine = node.attrs.find((attr) => attr.name === 'endline')?.value
|
||||
const actionValue = node.attrs.find((attr) => attr.name === 'type')?.value
|
||||
const action = actionValue && isInfioBlockAction(actionValue)
|
||||
? actionValue
|
||||
: undefined
|
||||
|
||||
|
||||
const children = node.childNodes
|
||||
if (children.length === 0) {
|
||||
parsedResult.push({
|
||||
type: 'infio_block',
|
||||
content: '',
|
||||
language,
|
||||
filename,
|
||||
startLine: startLine ? parseInt(startLine) : undefined,
|
||||
endLine: endLine ? parseInt(endLine) : undefined,
|
||||
action: action,
|
||||
})
|
||||
} else {
|
||||
const innerContentStartOffset =
|
||||
children[0].sourceCodeLocation?.startOffset
|
||||
const innerContentEndOffset =
|
||||
children[children.length - 1].sourceCodeLocation?.endOffset
|
||||
if (!innerContentStartOffset || !innerContentEndOffset) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'infio_block',
|
||||
content: input.slice(innerContentStartOffset, innerContentEndOffset),
|
||||
language,
|
||||
filename,
|
||||
startLine: startLine ? parseInt(startLine) : undefined,
|
||||
endLine: endLine ? parseInt(endLine) : undefined,
|
||||
action: action,
|
||||
})
|
||||
}
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'think') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
|
||||
const children = node.childNodes
|
||||
if (children.length === 0) {
|
||||
parsedResult.push({
|
||||
type: 'think',
|
||||
content: '',
|
||||
})
|
||||
} else {
|
||||
const innerContentStartOffset =
|
||||
children[0].sourceCodeLocation?.startOffset
|
||||
const innerContentEndOffset =
|
||||
children[children.length - 1].sourceCodeLocation?.endOffset
|
||||
if (!innerContentStartOffset || !innerContentEndOffset) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'think',
|
||||
content: input.slice(innerContentStartOffset, innerContentEndOffset),
|
||||
})
|
||||
}
|
||||
lastEndOffset = endOffset
|
||||
}
|
||||
| {
|
||||
type: 'think'
|
||||
content: string
|
||||
} | {
|
||||
type: 'thinking'
|
||||
content: string
|
||||
} | {
|
||||
type: 'write_to_file'
|
||||
path: string
|
||||
content: string
|
||||
lineCount?: number
|
||||
} | {
|
||||
type: 'insert_content'
|
||||
path: string
|
||||
startLine: number
|
||||
content: string
|
||||
} | {
|
||||
type: 'read_file'
|
||||
path: string
|
||||
finish: boolean
|
||||
} | {
|
||||
type: 'attempt_completion'
|
||||
result: string
|
||||
} | {
|
||||
type: 'search_and_replace'
|
||||
path: string
|
||||
operations: {
|
||||
search: string
|
||||
replace: string
|
||||
start_line?: number
|
||||
end_line?: number
|
||||
use_regex?: boolean
|
||||
ignore_case?: boolean
|
||||
regex_flags?: string
|
||||
}[]
|
||||
finish: boolean
|
||||
} | {
|
||||
type: 'ask_followup_question'
|
||||
question: string
|
||||
} | {
|
||||
type: 'list_files'
|
||||
path: string
|
||||
recursive?: boolean
|
||||
finish: boolean
|
||||
} | {
|
||||
type: 'regex_search_files'
|
||||
path: string
|
||||
regex: string
|
||||
finish: boolean
|
||||
} | {
|
||||
type: 'semantic_search_files'
|
||||
path: string
|
||||
query: string
|
||||
finish: boolean
|
||||
}
|
||||
if (lastEndOffset < input.length) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset),
|
||||
|
||||
export function parseMsgBlocks(
|
||||
input: string,
|
||||
): ParsedMsgBlock[] {
|
||||
try {
|
||||
const parsedResult: ParsedMsgBlock[] = []
|
||||
const fragment = parseFragment(input, {
|
||||
sourceCodeLocationInfo: true,
|
||||
})
|
||||
let lastEndOffset = 0
|
||||
for (const node of fragment.childNodes) {
|
||||
if (node.nodeName === 'thinking') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
|
||||
const children = node.childNodes
|
||||
if (children.length === 0) {
|
||||
parsedResult.push({
|
||||
type: 'thinking',
|
||||
content: '',
|
||||
})
|
||||
} else {
|
||||
const innerContentStartOffset =
|
||||
children[0].sourceCodeLocation?.startOffset
|
||||
const innerContentEndOffset =
|
||||
children[children.length - 1].sourceCodeLocation?.endOffset
|
||||
if (!innerContentStartOffset || !innerContentEndOffset) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'thinking',
|
||||
content: input.slice(innerContentStartOffset, innerContentEndOffset),
|
||||
})
|
||||
}
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'think') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
|
||||
const children = node.childNodes
|
||||
if (children.length === 0) {
|
||||
parsedResult.push({
|
||||
type: 'think',
|
||||
content: '',
|
||||
})
|
||||
} else {
|
||||
const innerContentStartOffset =
|
||||
children[0].sourceCodeLocation?.startOffset
|
||||
const innerContentEndOffset =
|
||||
children[children.length - 1].sourceCodeLocation?.endOffset
|
||||
if (!innerContentStartOffset || !innerContentEndOffset) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'think',
|
||||
content: input.slice(innerContentStartOffset, innerContentEndOffset),
|
||||
})
|
||||
}
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'list_files') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let path: string | undefined
|
||||
let recursive: boolean | undefined
|
||||
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'path' && childNode.childNodes.length > 0) {
|
||||
path = childNode.childNodes[0].value
|
||||
} else if (childNode.nodeName === 'recursive' && childNode.childNodes.length > 0) {
|
||||
const recursiveValue = childNode.childNodes[0].value
|
||||
recursive = recursiveValue ? recursiveValue.toLowerCase() === 'true' : false
|
||||
}
|
||||
}
|
||||
|
||||
parsedResult.push({
|
||||
type: 'list_files',
|
||||
path: path || '/',
|
||||
recursive,
|
||||
finish: node.sourceCodeLocation.endTag !== undefined
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'read_file') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let path: string | undefined
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'path' && childNode.childNodes.length > 0) {
|
||||
path = childNode.childNodes[0].value
|
||||
}
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'read_file',
|
||||
path,
|
||||
// Check if the tag is completely parsed with proper closing tag
|
||||
// In parse5, when a tag is properly closed, its sourceCodeLocation will include endTag
|
||||
finish: node.sourceCodeLocation.endTag !== undefined
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'regex_search_files') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let path: string | undefined
|
||||
let regex: string | undefined
|
||||
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'path' && childNode.childNodes.length > 0) {
|
||||
path = childNode.childNodes[0].value
|
||||
} else if (childNode.nodeName === 'regex' && childNode.childNodes.length > 0) {
|
||||
regex = childNode.childNodes[0].value
|
||||
}
|
||||
}
|
||||
|
||||
parsedResult.push({
|
||||
type: 'regex_search_files',
|
||||
path: path,
|
||||
regex: regex,
|
||||
finish: node.sourceCodeLocation.endTag !== undefined
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'semantic_search_files') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let path: string | undefined
|
||||
let query: string | undefined
|
||||
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'path' && childNode.childNodes.length > 0) {
|
||||
path = childNode.childNodes[0].value
|
||||
} else if (childNode.nodeName === 'query' && childNode.childNodes.length > 0) {
|
||||
query = childNode.childNodes[0].value
|
||||
}
|
||||
}
|
||||
|
||||
parsedResult.push({
|
||||
type: 'semantic_search_files',
|
||||
path: path,
|
||||
query: query,
|
||||
finish: node.sourceCodeLocation.endTag !== undefined
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'write_to_file') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let path: string | undefined
|
||||
let content: string = ''
|
||||
let lineCount: number | undefined
|
||||
// 处理子标签
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'path' && childNode.childNodes.length > 0) {
|
||||
path = childNode.childNodes[0].value
|
||||
} else if (childNode.nodeName === 'content' && childNode.childNodes.length > 0) {
|
||||
// 如果内容有多个子节点,需要合并它们
|
||||
content = childNode.childNodes.map(n => n.value || '').join('')
|
||||
} else if (childNode.nodeName === 'line_count' && childNode.childNodes.length > 0) {
|
||||
const lineCountStr = childNode.childNodes[0].value
|
||||
lineCount = lineCountStr ? parseInt(lineCountStr) : undefined
|
||||
}
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'write_to_file',
|
||||
content,
|
||||
path,
|
||||
lineCount
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
|
||||
} else if (node.nodeName === 'insert_content') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let path: string | undefined
|
||||
let content: string = ''
|
||||
let startLine: number = 0
|
||||
|
||||
// 处理子标签
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'path' && childNode.childNodes.length > 0) {
|
||||
path = childNode.childNodes[0].value
|
||||
} else if (childNode.nodeName === 'operations' && childNode.childNodes.length > 0) {
|
||||
try {
|
||||
const operationsJson = childNode.childNodes[0].value
|
||||
const operations = JSON5.parse(operationsJson)
|
||||
if (Array.isArray(operations) && operations.length > 0) {
|
||||
const operation = operations[0]
|
||||
startLine = operation.start_line || 1
|
||||
content = operation.content || ''
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse operations JSON', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parsedResult.push({
|
||||
type: 'insert_content',
|
||||
path,
|
||||
startLine,
|
||||
content
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'search_and_replace') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let path: string | undefined
|
||||
let operations = []
|
||||
|
||||
// 处理子标签
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'path' && childNode.childNodes.length > 0) {
|
||||
path = childNode.childNodes[0].value
|
||||
} else if (childNode.nodeName === 'operations' && childNode.childNodes.length > 0) {
|
||||
try {
|
||||
const operationsJson = childNode.childNodes[0].value
|
||||
operations = JSON5.parse(operationsJson)
|
||||
} catch (error) {
|
||||
console.error('Failed to parse operations JSON', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parsedResult.push({
|
||||
type: 'search_and_replace',
|
||||
path,
|
||||
operations,
|
||||
finish: node.sourceCodeLocation.endTag !== undefined
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
} else if (node.nodeName === 'attempt_completion') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let result: string | undefined
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'result' && childNode.childNodes.length > 0) {
|
||||
result = childNode.childNodes[0].value
|
||||
}
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'attempt_completion',
|
||||
result,
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
|
||||
} else if (node.nodeName === 'ask_followup_question') {
|
||||
if (!node.sourceCodeLocation) {
|
||||
throw new Error('sourceCodeLocation is undefined')
|
||||
}
|
||||
const startOffset = node.sourceCodeLocation.startOffset
|
||||
const endOffset = node.sourceCodeLocation.endOffset
|
||||
if (startOffset > lastEndOffset) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset, startOffset),
|
||||
})
|
||||
}
|
||||
let question: string | undefined
|
||||
for (const childNode of node.childNodes) {
|
||||
if (childNode.nodeName === 'question' && childNode.childNodes.length > 0) {
|
||||
question = childNode.childNodes[0].value
|
||||
}
|
||||
}
|
||||
parsedResult.push({
|
||||
type: 'ask_followup_question',
|
||||
question,
|
||||
})
|
||||
lastEndOffset = endOffset
|
||||
}
|
||||
}
|
||||
|
||||
// handle the last part of the input
|
||||
if (lastEndOffset < input.length) {
|
||||
parsedResult.push({
|
||||
type: 'string',
|
||||
content: input.slice(lastEndOffset),
|
||||
})
|
||||
}
|
||||
return parsedResult
|
||||
} catch (error) {
|
||||
console.error('Failed to parse infio block', error)
|
||||
throw error
|
||||
}
|
||||
return parsedResult
|
||||
}
|
||||
|
||||
107
src/utils/path.ts
Normal file
107
src/utils/path.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import os from "os"
|
||||
import * as path from "path"
|
||||
|
||||
|
||||
/*
|
||||
The Node.js 'path' module resolves and normalizes paths differently depending on the platform:
|
||||
- On Windows, it uses backslashes (\) as the default path separator.
|
||||
- On POSIX-compliant systems (Linux, macOS), it uses forward slashes (/) as the default path separator.
|
||||
|
||||
While modules like 'upath' can be used to normalize paths to use forward slashes consistently,
|
||||
this can create inconsistencies when interfacing with other modules (like vscode.fs) that use
|
||||
backslashes on Windows.
|
||||
|
||||
Our approach:
|
||||
1. We present paths with forward slashes to the AI and user for consistency.
|
||||
2. We use the 'arePathsEqual' function for safe path comparisons.
|
||||
3. Internally, Node.js gracefully handles both backslashes and forward slashes.
|
||||
|
||||
This strategy ensures consistent path presentation while leveraging Node.js's built-in
|
||||
path handling capabilities across different platforms.
|
||||
|
||||
Note: When interacting with the file system or VS Code APIs, we still use the native path module
|
||||
to ensure correct behavior on all platforms. The toPosixPath and arePathsEqual functions are
|
||||
primarily used for presentation and comparison purposes, not for actual file system operations.
|
||||
|
||||
Observations:
|
||||
- Macos isn't so flexible with mixed separators, whereas windows can handle both. ("Node.js does automatically handle path separators on Windows, converting forward slashes to backslashes as needed. However, on macOS and other Unix-like systems, the path separator is always a forward slash (/), and backslashes are treated as regular characters.")
|
||||
*/
|
||||
|
||||
function toPosixPath(p: string) {
|
||||
// Extended-Length Paths in Windows start with "\\?\" to allow longer paths and bypass usual parsing. If detected, we return the path unmodified to maintain functionality, as altering these paths could break their special syntax.
|
||||
const isExtendedLengthPath = p.startsWith("\\\\?\\")
|
||||
|
||||
if (isExtendedLengthPath) {
|
||||
return p
|
||||
}
|
||||
|
||||
return p.replace(/\\/g, "/")
|
||||
}
|
||||
|
||||
// Declaration merging allows us to add a new method to the String type
|
||||
// You must import this file in your entry point (extension.ts) to have access at runtime
|
||||
declare global {
|
||||
interface String {
|
||||
toPosix(): string
|
||||
}
|
||||
}
|
||||
|
||||
String.prototype.toPosix = function (this: string): string {
|
||||
return toPosixPath(this)
|
||||
}
|
||||
|
||||
// Safe path comparison that works across different platforms
|
||||
export function arePathsEqual(path1?: string, path2?: string): boolean {
|
||||
if (!path1 && !path2) {
|
||||
return true
|
||||
}
|
||||
if (!path1 || !path2) {
|
||||
return false
|
||||
}
|
||||
|
||||
path1 = normalizePath(path1)
|
||||
path2 = normalizePath(path2)
|
||||
|
||||
if (process.platform === "win32") {
|
||||
return path1.toLowerCase() === path2.toLowerCase()
|
||||
}
|
||||
return path1 === path2
|
||||
}
|
||||
|
||||
function normalizePath(p: string): string {
|
||||
// normalize resolve ./.. segments, removes duplicate slashes, and standardizes path separators
|
||||
let normalized = path.normalize(p)
|
||||
// however it doesn't remove trailing slashes
|
||||
// remove trailing slash, except for root paths
|
||||
if (normalized.length > 1 && (normalized.endsWith("/") || normalized.endsWith("\\"))) {
|
||||
normalized = normalized.slice(0, -1)
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
export function getReadablePath(cwd: string, relPath?: string): string {
|
||||
relPath = relPath || ""
|
||||
// path.resolve is flexible in that it will resolve relative paths like '../../' to the cwd and even ignore the cwd if the relPath is actually an absolute path
|
||||
const absolutePath = path.resolve(cwd, relPath)
|
||||
if (arePathsEqual(cwd, path.join(os.homedir(), "Desktop"))) {
|
||||
// User opened vscode without a workspace, so cwd is the Desktop. Show the full absolute path to keep the user aware of where files are being created
|
||||
return absolutePath.toPosix()
|
||||
}
|
||||
if (arePathsEqual(path.normalize(absolutePath), path.normalize(cwd))) {
|
||||
return path.basename(absolutePath).toPosix()
|
||||
} else {
|
||||
// show the relative path to the cwd
|
||||
const normalizedRelPath = path.relative(cwd, absolutePath)
|
||||
if (absolutePath.includes(cwd)) {
|
||||
return normalizedRelPath.toPosix()
|
||||
} else {
|
||||
// we are outside the cwd, so show the absolute path (useful for when cline passes in '../../' for example)
|
||||
return absolutePath.toPosix()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const toRelativePath = (filePath: string, cwd: string) => {
|
||||
const relativePath = path.relative(cwd, filePath).toPosix()
|
||||
return filePath.endsWith("/") ? relativePath + "/" : relativePath
|
||||
}
|
||||
@@ -1,28 +1,107 @@
|
||||
import { App, TFile, htmlToMarkdown, requestUrl } from 'obsidian'
|
||||
import { App, MarkdownView, TAbstractFile, TFile, TFolder, Vault, htmlToMarkdown, requestUrl } from 'obsidian'
|
||||
|
||||
import { editorStateToPlainText } from '../components/chat-view/chat-input/utils/editor-state-to-plain-text'
|
||||
import { QueryProgressState } from '../components/chat-view/QueryProgress'
|
||||
import { SYSTEM_PROMPT } from '../core/prompts/system'
|
||||
import { RAGEngine } from '../core/rag/rag-engine'
|
||||
import { SelectVector } from '../database/schema'
|
||||
import { ChatMessage, ChatUserMessage } from '../types/chat'
|
||||
import { ContentPart, RequestMessage } from '../types/llm/request'
|
||||
import {
|
||||
MentionableBlock, MentionableCurrentFile, MentionableFile,
|
||||
MentionableBlock,
|
||||
MentionableFile,
|
||||
MentionableFolder,
|
||||
MentionableImage,
|
||||
MentionableUrl,
|
||||
MentionableVault
|
||||
} from '../types/mentionable'
|
||||
import { InfioSettings } from '../types/settings'
|
||||
import { defaultModeSlug, getFullModeDetails } from "../utils/modes"
|
||||
|
||||
import { listFilesAndFolders } from './glob-utils'
|
||||
import {
|
||||
getNestedFiles,
|
||||
readMultipleTFiles,
|
||||
readTFileContent,
|
||||
readTFileContent
|
||||
} from './obsidian'
|
||||
import { tokenCount } from './token'
|
||||
import { YoutubeTranscript, isYoutubeUrl } from './youtube-transcript'
|
||||
|
||||
export function addLineNumbers(content: string, startLine: number = 1): string {
|
||||
const lines = content.split("\n")
|
||||
const maxLineNumberWidth = String(startLine + lines.length - 1).length
|
||||
return lines
|
||||
.map((line, index) => {
|
||||
const lineNumber = String(startLine + index).padStart(maxLineNumberWidth, " ")
|
||||
return `${lineNumber} | ${line}`
|
||||
})
|
||||
.join("\n")
|
||||
}
|
||||
|
||||
async function getFolderTreeContent(path: TFolder): Promise<string> {
|
||||
try {
|
||||
const entries = path.children
|
||||
let folderContent = ""
|
||||
entries.forEach((entry, index) => {
|
||||
const isLast = index === entries.length - 1
|
||||
const linePrefix = isLast ? "└── " : "├── "
|
||||
if (entry instanceof TFile) {
|
||||
folderContent += `${linePrefix}${entry.name}\n`
|
||||
} else if (entry instanceof TFolder) {
|
||||
folderContent += `${linePrefix}${entry.name}/\n`
|
||||
} else {
|
||||
folderContent += `${linePrefix}${entry.name}\n`
|
||||
}
|
||||
})
|
||||
return folderContent
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to access path "${path.path}": ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function getFileOrFolderContent(path: TAbstractFile, vault: Vault): Promise<string> {
|
||||
try {
|
||||
if (path instanceof TFile) {
|
||||
if (path.extension != 'md') {
|
||||
return "(Binary file, unable to display content)"
|
||||
}
|
||||
return addLineNumbers(await readTFileContent(path, vault))
|
||||
} else if (path instanceof TFolder) {
|
||||
const entries = path.children
|
||||
let folderContent = ""
|
||||
const fileContentPromises: Promise<string | undefined>[] = []
|
||||
entries.forEach((entry, index) => {
|
||||
const isLast = index === entries.length - 1
|
||||
const linePrefix = isLast ? "└── " : "├── "
|
||||
if (entry instanceof TFile) {
|
||||
folderContent += `${linePrefix}${entry.name}\n`
|
||||
fileContentPromises.push(
|
||||
(async () => {
|
||||
try {
|
||||
if (entry.extension != 'md') {
|
||||
return undefined
|
||||
}
|
||||
const content = addLineNumbers(await readTFileContent(entry, vault))
|
||||
return `<file_content path="${entry.path}">\n${content}\n</file_content>`
|
||||
} catch (error) {
|
||||
return undefined
|
||||
}
|
||||
})(),
|
||||
)
|
||||
} else if (entry instanceof TFolder) {
|
||||
folderContent += `${linePrefix}${entry.name}/\n`
|
||||
} else {
|
||||
folderContent += `${linePrefix}${entry.name}\n`
|
||||
}
|
||||
})
|
||||
const fileContents = (await Promise.all(fileContentPromises)).filter((content) => content)
|
||||
return `${folderContent}\n${fileContents.join("\n\n")}`.trim()
|
||||
} else {
|
||||
return `(Failed to read contents of ${path.path})`
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to access path "${path.path}": ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
export class PromptGenerator {
|
||||
private getRagEngine: () => Promise<RAGEngine>
|
||||
private app: App
|
||||
@@ -47,12 +126,10 @@ export class PromptGenerator {
|
||||
messages,
|
||||
useVaultSearch,
|
||||
onQueryProgressChange,
|
||||
type,
|
||||
}: {
|
||||
messages: ChatMessage[]
|
||||
useVaultSearch?: boolean
|
||||
onQueryProgressChange?: (queryProgress: QueryProgressState) => void
|
||||
type?: string
|
||||
}): Promise<{
|
||||
requestMessages: RequestMessage[]
|
||||
compiledMessages: ChatMessage[]
|
||||
@@ -64,14 +141,16 @@ export class PromptGenerator {
|
||||
if (lastUserMessage.role !== 'user') {
|
||||
throw new Error('Last message is not a user message')
|
||||
}
|
||||
const isNewChat = messages.filter(message => message.role === 'user').length === 1
|
||||
|
||||
const { promptContent, shouldUseRAG, similaritySearchResults } =
|
||||
const { promptContent, similaritySearchResults } =
|
||||
await this.compileUserMessagePrompt({
|
||||
isNewChat,
|
||||
message: lastUserMessage,
|
||||
useVaultSearch,
|
||||
onQueryProgressChange,
|
||||
})
|
||||
let compiledMessages = [
|
||||
const compiledMessages = [
|
||||
...messages.slice(0, -1),
|
||||
{
|
||||
...lastUserMessage,
|
||||
@@ -80,39 +159,10 @@ export class PromptGenerator {
|
||||
},
|
||||
]
|
||||
|
||||
// Safeguard: ensure all user messages have parsed content
|
||||
compiledMessages = await Promise.all(
|
||||
compiledMessages.map(async (message) => {
|
||||
if (message.role === 'user' && !message.promptContent) {
|
||||
const { promptContent, similaritySearchResults } =
|
||||
await this.compileUserMessagePrompt({
|
||||
message,
|
||||
})
|
||||
return {
|
||||
...message,
|
||||
promptContent,
|
||||
similaritySearchResults,
|
||||
}
|
||||
}
|
||||
return message
|
||||
}),
|
||||
)
|
||||
|
||||
const systemMessage = this.getSystemMessage(shouldUseRAG, type)
|
||||
|
||||
const customInstructionMessage = this.getCustomInstructionMessage()
|
||||
|
||||
const currentFile = lastUserMessage.mentionables.find(
|
||||
(m): m is MentionableCurrentFile => m.type === 'current-file',
|
||||
)?.file
|
||||
const currentFileMessage = currentFile
|
||||
? await this.getCurrentFileMessage(currentFile)
|
||||
: undefined
|
||||
const systemMessage = await this.getSystemMessageNew()
|
||||
|
||||
const requestMessages: RequestMessage[] = [
|
||||
systemMessage,
|
||||
...(customInstructionMessage ? [customInstructionMessage, PromptGenerator.EMPTY_ASSISTANT_MESSAGE] : []),
|
||||
...(currentFileMessage ? [currentFileMessage, PromptGenerator.EMPTY_ASSISTANT_MESSAGE] : []),
|
||||
...compiledMessages.slice(-19).map((message): RequestMessage => {
|
||||
if (message.role === 'user') {
|
||||
return {
|
||||
@@ -126,7 +176,6 @@ export class PromptGenerator {
|
||||
}
|
||||
}
|
||||
}),
|
||||
...(shouldUseRAG ? [this.getRagInstructionMessage()] : []),
|
||||
]
|
||||
|
||||
return {
|
||||
@@ -135,27 +184,101 @@ export class PromptGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
private async getEnvironmentDetails() {
|
||||
let details = ""
|
||||
// Obsidian Current File
|
||||
details += "\n\n# Obsidian Current File"
|
||||
const currentFile = this.app.workspace.getActiveFile()
|
||||
if (currentFile) {
|
||||
details += `\n${currentFile?.path}`
|
||||
} else {
|
||||
details += "\n(No current file)"
|
||||
}
|
||||
|
||||
// Obsidian Open Tabs
|
||||
details += "\n\n# Obsidian Open Tabs"
|
||||
const openTabs: string[] = [];
|
||||
this.app.workspace.iterateAllLeaves(leaf => {
|
||||
if (leaf.view instanceof MarkdownView && leaf.view.file) {
|
||||
openTabs.push(leaf.view.file?.path);
|
||||
}
|
||||
});
|
||||
if (openTabs.length === 0) {
|
||||
details += "\n(No open tabs)"
|
||||
} else {
|
||||
details += `\n${openTabs.join("\n")}`
|
||||
}
|
||||
|
||||
// Add current time information with timezone
|
||||
const now = new Date()
|
||||
const formatter = new Intl.DateTimeFormat(undefined, {
|
||||
year: "numeric",
|
||||
month: "numeric",
|
||||
day: "numeric",
|
||||
hour: "numeric",
|
||||
minute: "numeric",
|
||||
second: "numeric",
|
||||
hour12: true,
|
||||
})
|
||||
const timeZone = formatter.resolvedOptions().timeZone
|
||||
const timeZoneOffset = -now.getTimezoneOffset() / 60 // Convert to hours and invert sign to match conventional notation
|
||||
const timeZoneOffsetStr = `${timeZoneOffset >= 0 ? "+" : ""}${timeZoneOffset}:00`
|
||||
details += `\n\n# Current Time\n${formatter.format(now)} (${timeZone}, UTC${timeZoneOffsetStr})`
|
||||
|
||||
// Add current mode details
|
||||
const currentMode = defaultModeSlug
|
||||
const modeDetails = await getFullModeDetails(currentMode)
|
||||
details += `\n\n# Current Mode\n`
|
||||
details += `<slug>${currentMode}</slug>\n`
|
||||
details += `<name>${modeDetails.name}</name>\n`
|
||||
|
||||
// // Obsidian Current Folder
|
||||
// const currentFolder = this.app.workspace.getActiveFile() ? this.app.workspace.getActiveFile()?.parent?.path : "/"
|
||||
// // Obsidian Vault Files and Folders
|
||||
// if (currentFolder) {
|
||||
// details += `\n\n# Obsidian Current Folder (${currentFolder}) Files`
|
||||
// const filesAndFolders = await listFilesAndFolders(this.app.vault, currentFolder)
|
||||
// if (filesAndFolders.length > 0) {
|
||||
// details += `\n${filesAndFolders.filter(Boolean).join("\n")}`
|
||||
// } else {
|
||||
// details += "\n(No Markdown files in current folder)"
|
||||
// }
|
||||
// } else {
|
||||
// details += "\n(No current folder)"
|
||||
// }
|
||||
|
||||
return `<environment_details>\n${details.trim()}\n</environment_details>`
|
||||
}
|
||||
|
||||
private async compileUserMessagePrompt({
|
||||
isNewChat,
|
||||
message,
|
||||
useVaultSearch,
|
||||
onQueryProgressChange,
|
||||
}: {
|
||||
isNewChat: boolean
|
||||
message: ChatUserMessage
|
||||
useVaultSearch?: boolean
|
||||
onQueryProgressChange?: (queryProgress: QueryProgressState) => void
|
||||
}): Promise<{
|
||||
promptContent: ChatUserMessage['promptContent']
|
||||
shouldUseRAG: boolean
|
||||
similaritySearchResults?: (Omit<SelectVector, 'embedding'> & {
|
||||
similarity: number
|
||||
})[]
|
||||
}> {
|
||||
if (!message.content) {
|
||||
// Add environment details
|
||||
const environmentDetails = isNewChat
|
||||
? await this.getEnvironmentDetails()
|
||||
: undefined
|
||||
|
||||
// if isToolCallReturn, add read_file_content to promptContent
|
||||
if (message.content === null) {
|
||||
return {
|
||||
promptContent: '',
|
||||
shouldUseRAG: false,
|
||||
promptContent: message.promptContent,
|
||||
similaritySearchResults: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
const query = editorStateToPlainText(message.content)
|
||||
let similaritySearchResults = undefined
|
||||
|
||||
@@ -169,33 +292,94 @@ export class PromptGenerator {
|
||||
onQueryProgressChange?.({
|
||||
type: 'reading-mentionables',
|
||||
})
|
||||
|
||||
const taskPrompt = isNewChat ? `<task>\n${query}\n</task>` : `<feedback>\n${query}\n</feedback>`
|
||||
|
||||
// user mention files
|
||||
const files = message.mentionables
|
||||
.filter((m): m is MentionableFile => m.type === 'file')
|
||||
.map((m) => m.file)
|
||||
let fileContentsPrompts = files.length > 0
|
||||
? (await Promise.all(files.map(async (file) => {
|
||||
const content = await getFileOrFolderContent(file, this.app.vault)
|
||||
return `<file_content path="${file.path}">\n${content}\n</file_content>`
|
||||
}))).join('\n')
|
||||
: undefined
|
||||
|
||||
// user mention folders
|
||||
const folders = message.mentionables
|
||||
.filter((m): m is MentionableFolder => m.type === 'folder')
|
||||
.map((m) => m.folder)
|
||||
const nestedFiles = folders.flatMap((folder) =>
|
||||
getNestedFiles(folder, this.app.vault),
|
||||
let folderContentsPrompts = folders.length > 0
|
||||
? (await Promise.all(folders.map(async (folder) => {
|
||||
const content = await getFileOrFolderContent(folder, this.app.vault)
|
||||
return `<folder_content path="${folder.path}">\n${content}\n</folder_content>`
|
||||
}))).join('\n')
|
||||
: undefined
|
||||
|
||||
// user mention blocks
|
||||
const blocks = message.mentionables.filter(
|
||||
(m): m is MentionableBlock => m.type === 'block',
|
||||
)
|
||||
const allFiles = [...files, ...nestedFiles]
|
||||
const fileContents = await readMultipleTFiles(allFiles, this.app.vault)
|
||||
const blockContentsPrompt = blocks.length > 0
|
||||
? blocks
|
||||
.map(({ file, content, startLine, endLine }) => {
|
||||
const content_with_line_numbers = addLineNumbers(content, startLine)
|
||||
return `<file_block_content location="${file.path}#L${startLine}-${endLine}">\n${content_with_line_numbers}\n</file_block_content>`
|
||||
})
|
||||
.join('\n')
|
||||
: undefined
|
||||
|
||||
// Count tokens incrementally to avoid long processing times on large content sets
|
||||
const exceedsTokenThreshold = async () => {
|
||||
let accTokenCount = 0
|
||||
for (const content of fileContents) {
|
||||
const count = await tokenCount(content)
|
||||
accTokenCount += count
|
||||
if (accTokenCount > this.settings.ragOptions.thresholdTokens) {
|
||||
return true
|
||||
}
|
||||
// user mention urls
|
||||
const urls = message.mentionables.filter(
|
||||
(m): m is MentionableUrl => m.type === 'url',
|
||||
)
|
||||
const urlContents = await Promise.all(
|
||||
urls.map(async ({ url }) => ({
|
||||
url,
|
||||
content: await this.getWebsiteContent(url)
|
||||
}))
|
||||
)
|
||||
const urlContentsPrompt = urlContents.length > 0
|
||||
? urlContents
|
||||
.map(({ url, content }) => (
|
||||
`<url_content url="${url}">\n${content}\n</url_content>`
|
||||
))
|
||||
.join('\n') : undefined
|
||||
|
||||
const currentFile = message.mentionables
|
||||
.filter((m): m is MentionableFile => m.type === 'current-file')
|
||||
.first()
|
||||
const currentFileContent = currentFile && currentFile.file != null
|
||||
? await getFileOrFolderContent(currentFile.file, this.app.vault)
|
||||
: undefined
|
||||
|
||||
const currentFileContentPrompt = isNewChat && currentFileContent
|
||||
? `<current_file_content path="${currentFile.file.path}">\n${currentFileContent}\n</current_file_content>`
|
||||
: undefined
|
||||
|
||||
// Count file and folder tokens
|
||||
let accTokenCount = 0
|
||||
let isOverThreshold = false
|
||||
for (const content of [fileContentsPrompts, folderContentsPrompts].filter(Boolean)) {
|
||||
const count = await tokenCount(content)
|
||||
accTokenCount += count
|
||||
if (accTokenCount > this.settings.ragOptions.thresholdTokens) {
|
||||
isOverThreshold = true
|
||||
}
|
||||
return false
|
||||
}
|
||||
const shouldUseRAG = useVaultSearch || (await exceedsTokenThreshold())
|
||||
if (isOverThreshold) {
|
||||
fileContentsPrompts = files.map((file) => {
|
||||
return `<file_content path="${file.path}">\n(Content omitted due to token limit. Relevant sections will be provided by semantic search below.)\n</file_content>`
|
||||
}).join('\n')
|
||||
folderContentsPrompts = folders.map(async (folder) => {
|
||||
const tree_content = await getFolderTreeContent(folder)
|
||||
return `<folder_content path="${folder.path}">\n${tree_content}\n(Content omitted due to token limit. Relevant sections will be provided by semantic search below.)\n</folder_content>`
|
||||
}).join('\n')
|
||||
}
|
||||
|
||||
let filePrompt: string
|
||||
const shouldUseRAG = useVaultSearch || isOverThreshold
|
||||
let similaritySearchContents
|
||||
if (shouldUseRAG) {
|
||||
similaritySearchResults = useVaultSearch
|
||||
? await (
|
||||
@@ -203,7 +387,7 @@ export class PromptGenerator {
|
||||
).processQuery({
|
||||
query,
|
||||
onQueryProgressChange: onQueryProgressChange,
|
||||
}) // TODO: Add similarity boosting for mentioned files or folders
|
||||
})
|
||||
: await (
|
||||
await this.getRagEngine()
|
||||
).processQuery({
|
||||
@@ -214,60 +398,42 @@ export class PromptGenerator {
|
||||
},
|
||||
onQueryProgressChange: onQueryProgressChange,
|
||||
})
|
||||
filePrompt = `## Potentially relevant snippets from the current vault
|
||||
${similaritySearchResults
|
||||
.map(({ path, content, metadata }) => {
|
||||
const contentWithLineNumbers = this.addLineNumbersToContent({
|
||||
content,
|
||||
startLine: metadata.startLine,
|
||||
})
|
||||
return `\`\`\`${path}\n${contentWithLineNumbers}\n\`\`\`\n`
|
||||
})
|
||||
.join('')}\n`
|
||||
} else {
|
||||
filePrompt = allFiles
|
||||
.map((file, index) => {
|
||||
return `\`\`\`${file.path}\n${fileContents[index]}\n\`\`\`\n`
|
||||
const snippets = similaritySearchResults.map(({ path, content, metadata }) => {
|
||||
const contentWithLineNumbers = this.addLineNumbersToContent({
|
||||
content,
|
||||
startLine: metadata.startLine,
|
||||
})
|
||||
.join('')
|
||||
return `<file_block_content location="${path}#L${metadata.startLine}-${metadata.endLine}">\n${contentWithLineNumbers}\n</file_block_content>`
|
||||
}).join('\n')
|
||||
similaritySearchContents = snippets.length > 0
|
||||
? `<similarity_search_results>\n${snippets}\n</similarity_search_results>`
|
||||
: '<similarity_search_results>\n(No relevant results found)\n</similarity_search_results>'
|
||||
} else {
|
||||
similaritySearchContents = undefined
|
||||
}
|
||||
|
||||
const blocks = message.mentionables.filter(
|
||||
(m): m is MentionableBlock => m.type === 'block',
|
||||
)
|
||||
const blockPrompt = blocks
|
||||
.map(({ file, content, startLine, endLine }) => {
|
||||
return `\`\`\`${file.path}#L${startLine}-${endLine}\n${content}\n\`\`\`\n`
|
||||
})
|
||||
.join('')
|
||||
|
||||
const urls = message.mentionables.filter(
|
||||
(m): m is MentionableUrl => m.type === 'url',
|
||||
)
|
||||
|
||||
const urlPrompt =
|
||||
urls.length > 0
|
||||
? `## Potentially relevant web search results
|
||||
${(
|
||||
await Promise.all(
|
||||
urls.map(
|
||||
async ({ url }) => `\`\`\`
|
||||
Website URL: ${url}
|
||||
Website Content:
|
||||
${await this.getWebsiteContent(url)}
|
||||
\`\`\``,
|
||||
),
|
||||
)
|
||||
).join('\n')}
|
||||
`
|
||||
: ''
|
||||
const parsedText = [
|
||||
taskPrompt,
|
||||
blockContentsPrompt,
|
||||
fileContentsPrompts,
|
||||
folderContentsPrompts,
|
||||
urlContentsPrompt,
|
||||
similaritySearchContents,
|
||||
currentFileContentPrompt,
|
||||
environmentDetails,
|
||||
].filter(Boolean).join('\n\n')
|
||||
|
||||
// user mention images
|
||||
const imageDataUrls = message.mentionables
|
||||
.filter((m): m is MentionableImage => m.type === 'image')
|
||||
.map(({ data }) => data)
|
||||
|
||||
return {
|
||||
promptContent: [
|
||||
{
|
||||
type: 'text',
|
||||
text: parsedText,
|
||||
},
|
||||
...imageDataUrls.map(
|
||||
(data): ContentPart => ({
|
||||
type: 'image_url',
|
||||
@@ -275,14 +441,18 @@ ${await this.getWebsiteContent(url)}
|
||||
url: data,
|
||||
},
|
||||
}),
|
||||
),
|
||||
{
|
||||
type: 'text',
|
||||
text: `${filePrompt}${blockPrompt}${urlPrompt}\n\n${query}\n\n`,
|
||||
},
|
||||
)
|
||||
],
|
||||
shouldUseRAG,
|
||||
similaritySearchResults: similaritySearchResults,
|
||||
similaritySearchResults,
|
||||
}
|
||||
}
|
||||
|
||||
private async getSystemMessageNew(): Promise<RequestMessage> {
|
||||
const systemPrompt = await SYSTEM_PROMPT(this.app.vault.getRoot().path, false)
|
||||
|
||||
return {
|
||||
role: 'system',
|
||||
content: systemPrompt,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -392,7 +562,7 @@ ${customInstruction}
|
||||
return {
|
||||
role: 'user',
|
||||
content: `# Inputs
|
||||
## Current file
|
||||
## Current Open File
|
||||
Here is the file I'm looking at.
|
||||
\`\`\`${currentFile.path}
|
||||
${fileContent}
|
||||
|
||||
77
src/utils/tool-groups.ts
Normal file
77
src/utils/tool-groups.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
// Define tool group configuration
|
||||
export type ToolGroupConfig = {
|
||||
tools: readonly string[]
|
||||
alwaysAvailable?: boolean // Whether this group is always available and shouldn't show in prompts view
|
||||
}
|
||||
|
||||
// Map of tool slugs to their display names
|
||||
export const TOOL_DISPLAY_NAMES = {
|
||||
execute_command: "run commands",
|
||||
read_file: "read files",
|
||||
write_to_file: "write files",
|
||||
apply_diff: "apply changes",
|
||||
search_files: "search files",
|
||||
list_files: "list files",
|
||||
// list_code_definition_names: "list definitions",
|
||||
browser_action: "use a browser",
|
||||
use_mcp_tool: "use mcp tools",
|
||||
access_mcp_resource: "access mcp resources",
|
||||
ask_followup_question: "ask questions",
|
||||
attempt_completion: "complete tasks",
|
||||
switch_mode: "switch modes",
|
||||
new_task: "create new task",
|
||||
} as const
|
||||
|
||||
// Define available tool groups
|
||||
export const TOOL_GROUPS: Record<string, ToolGroupConfig> = {
|
||||
read: {
|
||||
tools: ["read_file", "list_files", "search_files"],
|
||||
},
|
||||
edit: {
|
||||
tools: ["apply_diff", "write_to_file", "insert_content", "search_and_replace"],
|
||||
},
|
||||
// browser: {
|
||||
// tools: ["browser_action"],
|
||||
// },
|
||||
// command: {
|
||||
// tools: ["execute_command"],
|
||||
// },
|
||||
mcp: {
|
||||
tools: ["use_mcp_tool", "access_mcp_resource"],
|
||||
},
|
||||
modes: {
|
||||
tools: ["switch_mode",],
|
||||
alwaysAvailable: true,
|
||||
},
|
||||
}
|
||||
|
||||
export type ToolGroup = keyof typeof TOOL_GROUPS
|
||||
|
||||
// Tools that are always available to all modes
|
||||
export const ALWAYS_AVAILABLE_TOOLS = [
|
||||
"ask_followup_question",
|
||||
"attempt_completion",
|
||||
"switch_mode",
|
||||
"new_task",
|
||||
] as const
|
||||
|
||||
// Tool name types for type safety
|
||||
export type ToolName = keyof typeof TOOL_DISPLAY_NAMES
|
||||
|
||||
// Tool helper functions
|
||||
export function getToolName(toolConfig: string | readonly [ToolName, ...any[]]): ToolName {
|
||||
return typeof toolConfig === "string" ? (toolConfig as ToolName) : toolConfig[0]
|
||||
}
|
||||
|
||||
export function getToolOptions(toolConfig: string | readonly [ToolName, ...any[]]): any {
|
||||
return typeof toolConfig === "string" ? undefined : toolConfig[1]
|
||||
}
|
||||
|
||||
// Display names for groups in UI
|
||||
export const GROUP_DISPLAY_NAMES: Record<ToolGroup, string> = {
|
||||
read: "Read Files",
|
||||
edit: "Edit Files",
|
||||
browser: "Use Browser",
|
||||
command: "Run Commands",
|
||||
mcp: "Use MCP",
|
||||
}
|
||||
Reference in New Issue
Block a user