mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 04:11:10 -05:00
Globby level by level and truncate at 1000 results to keep recursive list_files efficient
This commit is contained in:
@@ -12,7 +12,7 @@ import { serializeError } from "serialize-error"
|
||||
import treeKill from "tree-kill"
|
||||
import * as vscode from "vscode"
|
||||
import { ApiHandler, buildApiHandler } from "./api"
|
||||
import { listFiles, parseSourceCodeForDefinitionsTopLevel } from "./parse-source-code"
|
||||
import { LIST_FILES_LIMIT, listFiles, parseSourceCodeForDefinitionsTopLevel } from "./parse-source-code"
|
||||
import { ClaudeDevProvider } from "./providers/ClaudeDevProvider"
|
||||
import { ApiConfiguration } from "./shared/api"
|
||||
import { ClaudeRequestResult } from "./shared/ClaudeRequestResult"
|
||||
@@ -1016,10 +1016,9 @@ export class ClaudeDev {
|
||||
return a.localeCompare(b, undefined, { numeric: true, sensitivity: "base" })
|
||||
})
|
||||
|
||||
if (sorted.length > 1000) {
|
||||
const truncatedList = sorted.slice(0, 1000).join("\n")
|
||||
const remainingCount = sorted.length - 1000
|
||||
return `${truncatedList}\n\n(${remainingCount} files not listed due to automatic truncation. Try listing files in subdirectories if you need to explore further.)`
|
||||
if (sorted.length >= LIST_FILES_LIMIT) {
|
||||
const truncatedList = sorted.slice(0, LIST_FILES_LIMIT).join("\n")
|
||||
return `${truncatedList}\n\n(Truncated at ${LIST_FILES_LIMIT} results. Try listing files in subdirectories if you need to explore further.)`
|
||||
} else if (sorted.length === 0 || (sorted.length === 1 && sorted[0] === "")) {
|
||||
return "No files found or you do not have permission to view this directory."
|
||||
} else {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import * as fs from "fs/promises"
|
||||
import { globby } from "globby"
|
||||
import { globby, Options } from "globby"
|
||||
import os from "os"
|
||||
import * as path from "path"
|
||||
import { LanguageParser, loadRequiredLanguageParsers } from "./languageParser"
|
||||
|
||||
export const LIST_FILES_LIMIT = 1000
|
||||
|
||||
// TODO: implement caching behavior to avoid having to keep analyzing project for new tasks.
|
||||
export async function parseSourceCodeForDefinitionsTopLevel(dirPath: string): Promise<string> {
|
||||
// check if the path exists
|
||||
@@ -96,10 +98,42 @@ export async function listFiles(dirPath: string, recursive: boolean): Promise<st
|
||||
onlyFiles: false, // true by default, false means it will list directories on their own too
|
||||
}
|
||||
// * globs all files in one dir, ** globs files in nested directories
|
||||
const files = await globby(recursive ? "**" : "*", options)
|
||||
const files = recursive ? await globbyLevelByLevel(options) : await globby("*", options)
|
||||
return files
|
||||
}
|
||||
|
||||
// globby doesnt natively support top down level by level globbing, so we implement it ourselves
|
||||
async function globbyLevelByLevel(options?: Options) {
|
||||
let results: string[] = []
|
||||
let currentLevel = 1
|
||||
while (results.length < LIST_FILES_LIMIT) {
|
||||
// Construct the glob pattern for the current level
|
||||
const pattern = `${"*/".repeat(currentLevel)}*`
|
||||
|
||||
// Get files and directories at the current level
|
||||
const filesAtLevel = await globby(pattern, options)
|
||||
|
||||
// If no more files found at this level, break the loop
|
||||
if (filesAtLevel.length === 0) {
|
||||
break
|
||||
}
|
||||
|
||||
// Add the files found at this level to the result
|
||||
results.push(...filesAtLevel)
|
||||
|
||||
// If we have reached the max limit, slice the array to the limit and break
|
||||
if (results.length >= LIST_FILES_LIMIT) {
|
||||
results = results.slice(0, LIST_FILES_LIMIT)
|
||||
break
|
||||
}
|
||||
|
||||
// Move to the next level
|
||||
currentLevel++
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
function separateFiles(allFiles: string[]): { filesToParse: string[]; remainingFiles: string[] } {
|
||||
const extensions = [
|
||||
"js",
|
||||
|
||||
Reference in New Issue
Block a user