abyss-ai
Version:
Autonomous AI coding agent - enhanced OpenCode with autonomous capabilities
115 lines (101 loc) • 3.78 kB
text/typescript
import { App } from "../app/app"
import { Log } from "../util/log"
export namespace FileTime {
const log = Log.create({ service: "file.time" })
export const state = App.state("tool.filetimes", () => {
const read: {
[sessionID: string]: {
[path: string]: Date | undefined
}
} = {}
const readCache: {
[sessionID: string]: {
[path: string]: {
content: string
timestamp: Date
offset?: number
limit?: number
}
}
} = {}
return {
read,
readCache,
}
})
export function read(sessionID: string, file: string) {
const { read } = state()
read[sessionID] = read[sessionID] || {}
// Check if we already have a recent read for this file in this session
const existingRead = read[sessionID][file]
const now = new Date()
// Only log if this is a new read or it's been more than 1 second since last read
if (!existingRead || (now.getTime() - existingRead.getTime() > 1000)) {
log.info("read", { sessionID, file })
read[sessionID][file] = now
} else {
log.debug("read (deduplicated)", { sessionID, file, lastRead: existingRead })
}
}
export function get(sessionID: string, file: string) {
return state().read[sessionID]?.[file]
}
export async function assert(sessionID: string, filepath: string) {
const time = get(sessionID, filepath)
if (!time) throw new Error(`You must read the file ${filepath} before overwriting it. Use the Read tool first`)
const stats = await Bun.file(filepath).stat()
if (stats.mtime.getTime() > time.getTime()) {
throw new Error(
`File ${filepath} has been modified since it was last read.\nLast modification: ${stats.mtime.toISOString()}\nLast read: ${time.toISOString()}\n\nPlease read the file again before modifying it.`,
)
}
}
export function cacheFileContent(sessionID: string, file: string, content: string, offset?: number, limit?: number) {
const { readCache } = state()
readCache[sessionID] = readCache[sessionID] || {}
readCache[sessionID][file] = {
content,
timestamp: new Date(),
offset,
limit
}
log.debug("cached file content", { sessionID, file, contentLength: content.length })
}
export function getCachedContent(sessionID: string, file: string, offset?: number, limit?: number): string | null {
const { readCache } = state()
const cached = readCache[sessionID]?.[file]
if (!cached) {
return null
}
// Check if the cached content matches the requested offset/limit
if (offset !== undefined || limit !== undefined) {
if (cached.offset !== offset || cached.limit !== limit) {
return null // Different range requested
}
} else if (cached.offset !== undefined || cached.limit !== undefined) {
return null // Full file requested but cache has partial content
}
// Check if cache is still fresh (within 5 seconds)
const now = new Date()
if (now.getTime() - cached.timestamp.getTime() > 5000) {
log.debug("cache expired", { sessionID, file })
return null
}
log.debug("using cached content", { sessionID, file, contentLength: cached.content.length })
return cached.content
}
export function invalidateCache(sessionID: string, file?: string) {
const { readCache } = state()
if (file) {
// Invalidate specific file
if (readCache[sessionID]) {
delete readCache[sessionID][file]
log.debug("invalidated cache for file", { sessionID, file })
}
} else {
// Invalidate all files for session
delete readCache[sessionID]
log.debug("invalidated all cache for session", { sessionID })
}
}
}