mirror of
https://github.com/EthanMarti/infio-copilot.git
synced 2026-05-09 08:30:09 +00:00
feat: Enhance file search with core plugin and Omnisearch integration
- Introduces a new match_search_files tool for fuzzy/keyword search, integrating with Obsidian's core search plugin and updating Omnisearch integration for improved file search capabilities. - Adds settings for selecting search backends (core plugin, Omnisearch, ripgrep) for both regex and match searches. - Updates language files, prompts, and types to support the new functionality. - Restructures search-related files for better organization.
This commit is contained in:
85
src/core/search/match/coreplugin-match.ts
Normal file
85
src/core/search/match/coreplugin-match.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { App } from "obsidian";
|
||||
import {
|
||||
MAX_RESULTS,
|
||||
truncateLine,
|
||||
SearchResult,
|
||||
formatResults,
|
||||
} from '../search-common';
|
||||
|
||||
/**
|
||||
* Searches using Obsidian's core search plugin and builds context for each match.
|
||||
*
|
||||
* @param app The Obsidian App instance.
|
||||
* @param query The query to search for.
|
||||
* @returns A promise that resolves to a formatted string of search results.
|
||||
*/
|
||||
export async function searchFilesWithCorePlugin(
|
||||
query: string,
|
||||
app: App,
|
||||
): Promise<string> {
|
||||
const searchPlugin = (app as any).internalPlugins.plugins['global-search']?.instance;
|
||||
if (!searchPlugin) {
|
||||
throw new Error("Core search plugin is not available.");
|
||||
}
|
||||
|
||||
// The core search function is not officially documented and may change.
|
||||
// This is based on community findings and common usage in other plugins.
|
||||
const searchResults = await new Promise<any[]>((resolve) => {
|
||||
const unregister = searchPlugin.on("search-results", (results: any) => {
|
||||
unregister();
|
||||
resolve(results);
|
||||
});
|
||||
searchPlugin.openGlobalSearch(query);
|
||||
});
|
||||
|
||||
const results: SearchResult[] = [];
|
||||
const vault = app.vault;
|
||||
|
||||
for (const fileMatches of Object.values(searchResults) as any) {
|
||||
if (results.length >= MAX_RESULTS) {
|
||||
break;
|
||||
}
|
||||
|
||||
const file = vault.getAbstractFileByPath(fileMatches.file.path);
|
||||
if (!file || !('read' in file)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const content = await vault.cachedRead(file as any);
|
||||
const lines = content.split('\n');
|
||||
|
||||
for (const match of fileMatches.result.content) {
|
||||
if (results.length >= MAX_RESULTS) {
|
||||
break;
|
||||
}
|
||||
|
||||
const [matchText, startOffset] = match;
|
||||
let charCount = 0;
|
||||
let lineNumber = 0;
|
||||
let column = 0;
|
||||
let lineContent = "";
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const lineLength = lines[i].length + 1; // +1 for the newline character
|
||||
if (charCount + lineLength > startOffset) {
|
||||
lineNumber = i + 1;
|
||||
column = startOffset - charCount + 1;
|
||||
lineContent = lines[i];
|
||||
break;
|
||||
}
|
||||
charCount += lineLength;
|
||||
}
|
||||
|
||||
results.push({
|
||||
file: fileMatches.file.path,
|
||||
line: lineNumber,
|
||||
column: column,
|
||||
match: truncateLine(lineContent.trimEnd()),
|
||||
beforeContext: lineNumber > 1 ? [truncateLine(lines[lineNumber - 2].trimEnd())] : [],
|
||||
afterContext: lineNumber < lines.length ? [truncateLine(lines[lineNumber].trimEnd())] : [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return formatResults(results, ".\\");
|
||||
}
|
||||
136
src/core/search/match/omnisearch-match.ts
Normal file
136
src/core/search/match/omnisearch-match.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import { App } from "obsidian";
|
||||
import {
|
||||
MAX_RESULTS,
|
||||
truncateLine,
|
||||
SearchResult,
|
||||
formatResults,
|
||||
} from '../search-common';
|
||||
|
||||
type SearchMatchApi = {
|
||||
match: string;
|
||||
offset: number;
|
||||
};
|
||||
|
||||
type ResultNoteApi = {
|
||||
score: number;
|
||||
vault: string;
|
||||
path: string;
|
||||
basename: string;
|
||||
foundWords: string[];
|
||||
matches: SearchMatchApi[];
|
||||
excerpt: string;
|
||||
};
|
||||
|
||||
type OmnisearchApi = {
|
||||
search: (query: string) => Promise<ResultNoteApi[]>;
|
||||
// ... other API methods
|
||||
};
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
omnisearch: OmnisearchApi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the Omnisearch plugin's API is available.
|
||||
* @returns {boolean} True if the API is ready, false otherwise.
|
||||
*/
|
||||
function isOmnisearchAvailable(): boolean {
|
||||
return window.omnisearch && typeof window.omnisearch.search === "function";
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the line number, column number, and content for a given character offset in a file.
|
||||
* @param allLines All lines in the file.
|
||||
* @param offset The character offset of the match.
|
||||
* @returns An object with line number, column number, and the full line content.
|
||||
*/
|
||||
function findLineAndColumnFromOffset(
|
||||
allLines: string[],
|
||||
offset: number
|
||||
): { lineNumber: number; columnNumber: number; lineContent: string } {
|
||||
let charCount = 0;
|
||||
for (let i = 0; i < allLines.length; i++) {
|
||||
const line = allLines[i];
|
||||
// The line ending length (1 for \n, 2 for \r\n) can vary.
|
||||
// A simple +1 is a reasonable approximation for this calculation.
|
||||
const lineEndOffset = charCount + line.length + 1;
|
||||
|
||||
if (offset < lineEndOffset) {
|
||||
const columnNumber = offset - charCount;
|
||||
return { lineNumber: i, columnNumber, lineContent: line };
|
||||
}
|
||||
charCount = lineEndOffset;
|
||||
}
|
||||
return { lineNumber: -1, columnNumber: -1, lineContent: "" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches using Omnisearch and builds context for each match.
|
||||
* @param query The search query for Omnisearch. Note: Omnisearch does not support full regex.
|
||||
* @param app The Obsidian App instance.
|
||||
* @returns A formatted string of search results.
|
||||
*/
|
||||
export async function searchFilesWithOmnisearch(
|
||||
query: string,
|
||||
app: App,
|
||||
): Promise<string> {
|
||||
try {
|
||||
if (!isOmnisearchAvailable()) {
|
||||
throw new Error(
|
||||
"Omnisearch plugin not found or not active. Please install and enable it to use this search feature."
|
||||
);
|
||||
}
|
||||
|
||||
// Omnisearch is not a regex engine.
|
||||
// The `query` will be treated as a keyword/fuzzy search by the plugin.
|
||||
const apiResults = await window.omnisearch.search(query);
|
||||
if (!apiResults || apiResults.length === 0) {
|
||||
throw new Error("No results found.");
|
||||
}
|
||||
|
||||
const results: SearchResult[] = [];
|
||||
|
||||
for (const result of apiResults) {
|
||||
if (results.length >= MAX_RESULTS) {
|
||||
break; // Stop processing new files if we have enough results
|
||||
}
|
||||
if (!result.matches || result.matches.length === 0) continue;
|
||||
|
||||
const fileContent = await app.vault.adapter.read(result.path);
|
||||
const allLines = fileContent.split("\n");
|
||||
|
||||
for (const match of result.matches) {
|
||||
if (results.length >= MAX_RESULTS) {
|
||||
break; // Stop processing matches if we have enough results
|
||||
}
|
||||
|
||||
const { lineNumber, columnNumber, lineContent } = findLineAndColumnFromOffset(
|
||||
allLines,
|
||||
match.offset
|
||||
);
|
||||
|
||||
if (lineNumber === -1) continue;
|
||||
|
||||
const searchResult: SearchResult = {
|
||||
file: result.path,
|
||||
line: lineNumber + 1, // ripgrep is 1-based, so we adjust
|
||||
column: columnNumber + 1,
|
||||
match: truncateLine(lineContent.trimEnd()),
|
||||
beforeContext: lineNumber > 0 ? [truncateLine(allLines[lineNumber - 1].trimEnd())] : [],
|
||||
afterContext:
|
||||
lineNumber < allLines.length - 1
|
||||
? [truncateLine(allLines[lineNumber + 1].trimEnd())]
|
||||
: [],
|
||||
};
|
||||
results.push(searchResult);
|
||||
}
|
||||
}
|
||||
|
||||
return formatResults(results, ".\\");
|
||||
} catch (error) {
|
||||
console.error("Error during Omnisearch processing:", error);
|
||||
return "An error occurred during the search.";
|
||||
}
|
||||
}
|
||||
17
src/core/search/regex/coreplugin-regex.ts
Normal file
17
src/core/search/regex/coreplugin-regex.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { App } from "obsidian";
|
||||
import { searchFilesWithCorePlugin } from '../match/coreplugin-match'
|
||||
|
||||
/**
|
||||
* Performs a regular expression search using Obsidian's core search plugin.
|
||||
*
|
||||
* @param app The Obsidian App instance.
|
||||
* @param regex The regular expression to search for.
|
||||
* @returns A promise that resolves to a formatted string of search results.
|
||||
*/
|
||||
export async function regexSearchFilesWithCorePlugin(
|
||||
regex: string,
|
||||
app: App,
|
||||
): Promise<string> {
|
||||
const query = "/" + regex + "/";
|
||||
return searchFilesWithCorePlugin(query, app);
|
||||
}
|
||||
147
src/core/search/regex/ripgrep-regex.ts
Normal file
147
src/core/search/regex/ripgrep-regex.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
// import * as vscode from "vscode"
|
||||
import * as childProcess from "child_process"
|
||||
import * as fs from "fs"
|
||||
import * as path from "path"
|
||||
import * as readline from "readline"
|
||||
import {
|
||||
MAX_RESULTS,
|
||||
truncateLine,
|
||||
SearchResult,
|
||||
formatResults
|
||||
} from '../search-common'
|
||||
|
||||
const isWindows = /^win/.test(process.platform)
|
||||
const binName = isWindows ? "rg.exe" : "rg"
|
||||
|
||||
async function getBinPath(ripgrepPath: string): Promise<string | undefined> {
|
||||
const binPath = path.join(ripgrepPath, binName)
|
||||
return (await pathExists(binPath)) ? binPath : undefined
|
||||
}
|
||||
|
||||
async function pathExists(path: string): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
fs.access(path, (err) => {
|
||||
resolve(err === null)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function execRipgrep(bin: string, args: string[]): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const rgProcess = childProcess.spawn(bin, args)
|
||||
// cross-platform alternative to head, which is ripgrep author's recommendation for limiting output.
|
||||
const rl = readline.createInterface({
|
||||
input: rgProcess.stdout,
|
||||
crlfDelay: Infinity, // treat \r\n as a single line break even if it's split across chunks. This ensures consistent behavior across different operating systems.
|
||||
})
|
||||
|
||||
let output = ""
|
||||
let lineCount = 0
|
||||
const maxLines = MAX_RESULTS * 5 // limiting ripgrep output with max lines since there's no other way to limit results. it's okay that we're outputting as json, since we're parsing it line by line and ignore anything that's not part of a match. This assumes each result is at most 5 lines.
|
||||
|
||||
rl.on("line", (line) => {
|
||||
if (lineCount < maxLines) {
|
||||
output += line + "\n"
|
||||
lineCount++
|
||||
} else {
|
||||
rl.close()
|
||||
rgProcess.kill()
|
||||
}
|
||||
})
|
||||
|
||||
let errorOutput = ""
|
||||
rgProcess.stderr.on("data", (data) => {
|
||||
errorOutput += data.toString()
|
||||
})
|
||||
rl.on("close", () => {
|
||||
if (errorOutput) {
|
||||
reject(new Error(`ripgrep process error: ${errorOutput}`))
|
||||
} else {
|
||||
resolve(output)
|
||||
}
|
||||
})
|
||||
rgProcess.on("error", (error) => {
|
||||
reject(new Error(`ripgrep process error: ${error.message}`))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function regexSearchFilesWithRipgrep(
|
||||
directoryPath: string,
|
||||
regex: string,
|
||||
ripgrepPath: string,
|
||||
): Promise<string> {
|
||||
const rgPath = await getBinPath(ripgrepPath)
|
||||
|
||||
if (!rgPath) {
|
||||
throw new Error("Could not find ripgrep binary")
|
||||
}
|
||||
|
||||
// use --glob param to exclude .obsidian directory
|
||||
const args = [
|
||||
"--json",
|
||||
"-e",
|
||||
regex,
|
||||
"--glob",
|
||||
"!.obsidian/**", // exclude .obsidian directory and all its subdirectories
|
||||
"--glob",
|
||||
"!.git/**",
|
||||
"--context",
|
||||
"1",
|
||||
directoryPath
|
||||
]
|
||||
|
||||
let output: string
|
||||
try {
|
||||
output = await execRipgrep(rgPath, args)
|
||||
} catch (error) {
|
||||
console.error("Error executing ripgrep:", error)
|
||||
return "No results found"
|
||||
}
|
||||
const results: SearchResult[] = []
|
||||
let currentResult: Partial<SearchResult> | null = null
|
||||
|
||||
output.split("\n").forEach((line) => {
|
||||
if (line) {
|
||||
try {
|
||||
const parsed = JSON.parse(line)
|
||||
if (parsed.type === "match") {
|
||||
if (currentResult) {
|
||||
results.push(currentResult as SearchResult)
|
||||
}
|
||||
|
||||
// Safety check: truncate extremely long lines to prevent excessive output
|
||||
const matchText = parsed.data.lines.text
|
||||
const truncatedMatch = truncateLine(matchText)
|
||||
|
||||
currentResult = {
|
||||
file: parsed.data.path.text,
|
||||
line: parsed.data.line_number,
|
||||
column: parsed.data.submatches[0].start,
|
||||
match: truncatedMatch,
|
||||
beforeContext: [],
|
||||
afterContext: [],
|
||||
}
|
||||
} else if (parsed.type === "context" && currentResult) {
|
||||
// Apply the same truncation logic to context lines
|
||||
const contextText = parsed.data.lines.text
|
||||
const truncatedContext = truncateLine(contextText)
|
||||
|
||||
if (parsed.data.line_number < currentResult.line!) {
|
||||
currentResult.beforeContext!.push(truncatedContext)
|
||||
} else {
|
||||
currentResult.afterContext!.push(truncatedContext)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error parsing ripgrep output:", error)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (currentResult) {
|
||||
results.push(currentResult as SearchResult)
|
||||
}
|
||||
|
||||
return formatResults(results, directoryPath)
|
||||
}
|
||||
63
src/core/search/search-common.ts
Normal file
63
src/core/search/search-common.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import * as path from "path"
|
||||
|
||||
// Constants
|
||||
export const MAX_RESULTS = 300
|
||||
export const MAX_LINE_LENGTH = 500
|
||||
|
||||
/**
|
||||
* Truncates a line if it exceeds the maximum length
|
||||
* @param line The line to truncate
|
||||
* @param maxLength The maximum allowed length (defaults to MAX_LINE_LENGTH)
|
||||
* @returns The truncated line, or the original line if it's shorter than maxLength
|
||||
*/
|
||||
export function truncateLine(line: string, maxLength: number = MAX_LINE_LENGTH): string {
|
||||
return line.length > maxLength ? line.substring(0, maxLength) + " [truncated...]" : line
|
||||
}
|
||||
|
||||
export interface SearchResult {
|
||||
file: string
|
||||
line: number
|
||||
column?: number
|
||||
match?: string
|
||||
beforeContext: string[]
|
||||
afterContext: string[]
|
||||
}
|
||||
|
||||
export function formatResults(results: SearchResult[], cwd: string): string {
|
||||
const groupedResults: { [key: string]: SearchResult[] } = {}
|
||||
|
||||
let output = ""
|
||||
if (results.length >= MAX_RESULTS) {
|
||||
output += `Showing first ${MAX_RESULTS} of ${MAX_RESULTS}+ results. Use a more specific search if necessary.\n\n`
|
||||
} else {
|
||||
output += `Found ${results.length === 1 ? "1 result" : `${results.length.toLocaleString()} results`}.\n\n`
|
||||
}
|
||||
|
||||
// Group results by file name
|
||||
results.slice(0, MAX_RESULTS).forEach((result) => {
|
||||
const relativeFilePath = path.relative(cwd, result.file)
|
||||
if (!groupedResults[relativeFilePath]) {
|
||||
groupedResults[relativeFilePath] = []
|
||||
}
|
||||
groupedResults[relativeFilePath].push(result)
|
||||
})
|
||||
|
||||
for (const [filePath, fileResults] of Object.entries(groupedResults)) {
|
||||
output += `${filePath.toPosix()}\n│----\n`
|
||||
|
||||
fileResults.forEach((result, index) => {
|
||||
const allLines = [...result.beforeContext, result.match, ...result.afterContext]
|
||||
allLines.forEach((line) => {
|
||||
output += `│${line?.trimEnd() ?? ""}\n`
|
||||
})
|
||||
|
||||
if (index < fileResults.length - 1) {
|
||||
output += "│----\n"
|
||||
}
|
||||
})
|
||||
|
||||
output += "│----\n\n"
|
||||
}
|
||||
|
||||
return output.trim()
|
||||
}
|
||||
Reference in New Issue
Block a user