Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
"license": "MIT",
"dependencies": {
"@ant-design/icons": "^6.0.0",
"@anthropic-ai/bedrock-sdk": "^0.27.0",
"@anthropic-ai/sdk": "^0.81.0",
"@date-fns/tz": "^1.4.1",
"@fontsource/fira-code": "^5.2.7",
"antd": "^6.0.0",
Expand All @@ -46,7 +48,9 @@
"react": "^18.0.0",
"react-dom": "^18.0.0",
"react-json-tree": "0.18.0",
"react-markdown": "^8.0.0",
"react-virtualized": "patch:react-virtualized@npm%3A9.22.5#~/.yarn/patches/react-virtualized-npm-9.22.5-be95b8e1a8.patch",
"remark-gfm": "^3.0.0",
"tmp": "0.2.4",
"tslib": "^2.8.1",
"update-electron-app": "^2.0.1",
Expand Down
43 changes: 43 additions & 0 deletions src/ai-interfaces.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
export interface AiMessage {
role: 'user' | 'assistant';
content: string;
}

export interface SerializedLogEntry {
timestamp: string;
level: string;
message: string;
line: number;
sourceFile: string;
meta?: string;
repeated?: string[];
}

export interface SerializedLogFile {
fileName: string;
logType: string;
entryCount: number;
entries: SerializedLogEntry[];
}

export interface SerializedLogContext {
files: SerializedLogFile[];
stateFiles?: Array<{
fileName: string;
content: string;
}>;
}

export interface AiStreamChunkData {
requestId: string;
chunk: string;
}

export interface AiStreamDoneData {
requestId: string;
}

export interface AiStreamErrorData {
requestId: string;
error: string;
}
9 changes: 9 additions & 0 deletions src/ipc-events.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,13 @@ export const enum IpcEvents {
READ_ANY_FILE = 'READ_ANY_FILE',
TRACE_CHECK_SOURCEMAP = 'TRACE_CHECK_SOURCEMAP',
OPEN_LINE_IN_SOURCE = 'OPEN_LINE_IN_SOURCE',
AI_SEND_MESSAGE = 'AI_SEND_MESSAGE',
AI_STREAM_CHUNK = 'AI_STREAM_CHUNK',
AI_STREAM_DONE = 'AI_STREAM_DONE',
AI_STREAM_ERROR = 'AI_STREAM_ERROR',
AI_ABORT = 'AI_ABORT',
AI_SHOW_DIRECTORY_PICKER = 'AI_SHOW_DIRECTORY_PICKER',
AI_SSO_LOGIN = 'AI_SSO_LOGIN',
AI_CHECK_AVAILABLE = 'AI_CHECK_AVAILABLE',
TOGGLE_AI_SIDEBAR = 'TOGGLE_AI_SIDEBAR',
}
62 changes: 62 additions & 0 deletions src/main/ai/ai-config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import fs from 'node:fs';
import path from 'node:path';
import os from 'node:os';

const CONFIG_PATH = path.join(os.homedir(), '.config', 'sleuth', 'ai.json');

interface AiConfig {
fmaRole: string;
model: string;
}

let loadedConfig: AiConfig | null = null;

/**
* Read AI configuration from ~/.config/sleuth/ai.json.
*
* The config file is expected to contain:
* { "fmaRole": "<account>/<role>/<session>", "model": "<bedrock-model-id>" }
*
* Values are cached after first read. Environment variables take precedence
* when set, allowing developers to override without editing the file.
*/
function loadConfig(): AiConfig {
if (loadedConfig) return loadedConfig;

try {
const raw = fs.readFileSync(CONFIG_PATH, 'utf-8');
const parsed: unknown = JSON.parse(raw);

if (
parsed &&
typeof parsed === 'object' &&
'fmaRole' in parsed &&
'model' in parsed &&
typeof (parsed as AiConfig).fmaRole === 'string' &&
typeof (parsed as AiConfig).model === 'string'
) {
loadedConfig = parsed as AiConfig;
return loadedConfig;
}
} catch {
// Config file missing or malformed — fall through to empty defaults
}

loadedConfig = { fmaRole: '', model: '' };
return loadedConfig;
}

/** FMA role ARN, sourced from env var or config file. */
export function getFmaRole(): string {
return process.env.SLEUTH_AI_FMA_ROLE ?? loadConfig().fmaRole;
}

/** Bedrock model ID, sourced from env var or config file. */
export function getModel(): string {
return process.env.SLEUTH_AI_MODEL ?? loadConfig().model;
}

/** AWS region override (env-only, defaults to us-east-1). */
export function getAwsRegion(): string {
return process.env.SLEUTH_AI_AWS_REGION ?? 'us-east-1';
}
191 changes: 191 additions & 0 deletions src/main/ai/ai-service.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
import { AnthropicBedrock } from '@anthropic-ai/bedrock-sdk';
import type { BrowserWindow } from 'electron';
import type {
ContentBlockParam,
MessageParam,
ToolUseBlock,
} from '@anthropic-ai/sdk/resources';

import { IpcEvents } from '../../ipc-events';
import type { AiMessage, SerializedLogContext } from '../../ai-interfaces';
import { buildSystemPrompt } from './log-context-formatter';
import {
CODEBASE_TOOL_DEFINITIONS,
LOG_TOOL_DEFINITIONS,
REPO_CONTEXT_TOOL_DEFINITIONS,
executeTools,
} from './tools';
import { getAwsCredentials, clearCredentialCache } from './aws-credentials';
import { getModel, getAwsRegion } from './ai-config';

export class AiService {
private client: AnthropicBedrock | null = null;
private activeRequests = new Map<string, AbortController>();

private async getClient(): Promise<AnthropicBedrock> {
// Always get fresh credentials (cached internally for 10 min)
const creds = await getAwsCredentials();

// Recreate client if credentials changed
this.client = new AnthropicBedrock({
awsRegion: getAwsRegion(),
awsAccessKey: creds.accessKeyId,
awsSecretKey: creds.secretAccessKey,
awsSessionToken: creds.sessionToken,
});

return this.client;
}

async sendMessage(
window: BrowserWindow,
requestId: string,
messages: AiMessage[],
logContext: SerializedLogContext,
codebasePaths: string[],
): Promise<void> {
let client: AnthropicBedrock;
try {
client = await this.getClient();
} catch (error) {
if (!window.isDestroyed()) {
window.webContents.send(IpcEvents.AI_STREAM_ERROR, {
requestId,
error: error instanceof Error ? error.message : String(error),
});
}
return;
}

const controller = new AbortController();
this.activeRequests.set(requestId, controller);

try {
const systemPrompt = buildSystemPrompt(logContext);

// Always include log tools and repo context tools;
// include codebase tools only if paths are configured
const tools = [
...LOG_TOOL_DEFINITIONS,
...REPO_CONTEXT_TOOL_DEFINITIONS,
...(codebasePaths.length > 0 ? CODEBASE_TOOL_DEFINITIONS : []),
];

let currentMessages: MessageParam[] = messages.map((m) => ({
role: m.role,
content: m.content,
}));

// Cap tool-use iterations so a model that keeps asking for tools
// cannot loop forever (bounds cost and latency on pathological inputs).
const MAX_TOOL_ITERATIONS = 20;

// Tool-use loop
let hitToolLimit = false;
for (let iteration = 0; iteration < MAX_TOOL_ITERATIONS; iteration++) {
const stream = client.messages.stream(
{
model: getModel(),
max_tokens: 8192,
system: systemPrompt,
messages: currentMessages,
tools,
},
{ signal: controller.signal },
);

stream.on('text', (text) => {
if (!window.isDestroyed()) {
window.webContents.send(IpcEvents.AI_STREAM_CHUNK, {
requestId,
chunk: text,
});
}
});

const finalMessage = await stream.finalMessage();

const toolUseBlocks = finalMessage.content.filter(
(b) => b.type === 'tool_use',
);

if (
finalMessage.stop_reason === 'tool_use' &&
toolUseBlocks.length > 0
) {
// Notify renderer about tool calls
for (const block of toolUseBlocks) {
if (block.type === 'tool_use' && !window.isDestroyed()) {
window.webContents.send(IpcEvents.AI_STREAM_CHUNK, {
requestId,
chunk: `\n\n> *Using tool: ${block.name}*\n\n`,
});
}
}

// Execute tools and continue conversation
currentMessages.push({
role: 'assistant',
content: finalMessage.content as ContentBlockParam[],
});
const toolResults = await executeTools(
toolUseBlocks as ToolUseBlock[],
codebasePaths,
logContext,
);
currentMessages.push({
role: 'user',
content: toolResults,
});
} else {
// Done - no more tool calls
break;
}

if (iteration === MAX_TOOL_ITERATIONS - 1) {
hitToolLimit = true;
}
}

if (hitToolLimit && !window.isDestroyed()) {
window.webContents.send(IpcEvents.AI_STREAM_CHUNK, {
requestId,
chunk: `\n\n> *Stopped after ${MAX_TOOL_ITERATIONS} tool iterations to avoid runaway loops. Ask a follow-up if you'd like me to continue.*\n\n`,
});
}

if (!window.isDestroyed()) {
window.webContents.send(IpcEvents.AI_STREAM_DONE, { requestId });
}
} catch (error) {
const msg = error instanceof Error ? error.message : String(error);

// If we get an auth error mid-stream, clear the credential cache
if (
msg.includes('ExpiredToken') ||
msg.includes('InvalidSignature') ||
msg.includes('UnrecognizedClient') ||
msg.includes('403')
) {
clearCredentialCache();
}

if (!window.isDestroyed()) {
window.webContents.send(IpcEvents.AI_STREAM_ERROR, {
requestId,
error: msg,
});
}
} finally {
this.activeRequests.delete(requestId);
}
}

abort(requestId: string): void {
const controller = this.activeRequests.get(requestId);
if (controller) {
controller.abort();
this.activeRequests.delete(requestId);
}
}
}
Loading