mirror of
https://github.com/QwenLM/qwen-code.git
synced 2026-05-05 23:42:03 +00:00
Merge remote-tracking branch 'origin/main' into feat/channels-telegram
This commit is contained in:
commit
811ccdd02d
101 changed files with 10988 additions and 1091 deletions
|
|
@ -701,7 +701,13 @@ export class CoreToolScheduler {
|
|||
// This check should happen before registry lookup to provide a clear permission error
|
||||
const pm = this.config.getPermissionManager?.();
|
||||
if (pm && !pm.isToolEnabled(reqInfo.name)) {
|
||||
const permissionErrorMessage = `Qwen Code requires permission to use "${reqInfo.name}", but that permission was declined.`;
|
||||
const matchingRule = pm.findMatchingDenyRule({
|
||||
toolName: reqInfo.name,
|
||||
});
|
||||
const ruleInfo = matchingRule
|
||||
? ` Matching deny rule: "${matchingRule}".`
|
||||
: '';
|
||||
const permissionErrorMessage = `Qwen Code requires permission to use "${reqInfo.name}", but that permission was declined.${ruleInfo}`;
|
||||
return {
|
||||
status: 'error',
|
||||
request: reqInfo,
|
||||
|
|
@ -914,10 +920,16 @@ export class CoreToolScheduler {
|
|||
|
||||
if (finalPermission === 'deny') {
|
||||
// Hard deny: security violation or PM explicit deny
|
||||
const denyMessage =
|
||||
defaultPermission === 'deny'
|
||||
? `Tool "${reqInfo.name}" is denied: command substitution is not allowed for security reasons.`
|
||||
: `Tool "${reqInfo.name}" is denied by permission rules.`;
|
||||
let denyMessage: string;
|
||||
if (defaultPermission === 'deny') {
|
||||
denyMessage = `Tool "${reqInfo.name}" is denied: command substitution is not allowed for security reasons.`;
|
||||
} else {
|
||||
const matchingRule = pm?.findMatchingDenyRule(pmCtx);
|
||||
const ruleInfo = matchingRule
|
||||
? ` Matching deny rule: "${matchingRule}".`
|
||||
: '';
|
||||
denyMessage = `Tool "${reqInfo.name}" is denied by permission rules.${ruleInfo}`;
|
||||
}
|
||||
this.setStatusInternal(
|
||||
reqInfo.callId,
|
||||
'error',
|
||||
|
|
@ -1002,7 +1014,7 @@ export class CoreToolScheduler {
|
|||
this.config.getInputFormat() !== InputFormat.STREAM_JSON;
|
||||
|
||||
if (shouldAutoDeny) {
|
||||
const errorMessage = `Qwen Code requires permission to use "${reqInfo.name}", but that permission was declined.`;
|
||||
const errorMessage = `Qwen Code requires permission to use "${reqInfo.name}", but that permission was declined (non-interactive mode cannot prompt for confirmation).`;
|
||||
this.setStatusInternal(
|
||||
reqInfo.callId,
|
||||
'error',
|
||||
|
|
|
|||
|
|
@ -755,6 +755,20 @@ describe('extension tests', () => {
|
|||
const id = getExtensionId(config, metadata);
|
||||
expect(id).toBe(hashValue('https://github.com/owner/repo'));
|
||||
});
|
||||
|
||||
it('should use source as-is for non-GitHub git URLs (e.g., GitLab)', () => {
|
||||
// For non-GitHub git servers, fall back to using the source URL directly
|
||||
const config: ExtensionConfig = { name: 'test-ext', version: '1.0.0' };
|
||||
const metadata = {
|
||||
type: 'git' as const,
|
||||
source: 'https://gitlab.company.com/team/extension-repo',
|
||||
};
|
||||
|
||||
const id = getExtensionId(config, metadata);
|
||||
expect(id).toBe(
|
||||
hashValue('https://gitlab.company.com/team/extension-repo'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1373,12 +1373,18 @@ export function getExtensionId(
|
|||
installMetadata?: ExtensionInstallMetadata,
|
||||
): string {
|
||||
let idValue = config.name;
|
||||
const githubUrlParts =
|
||||
let githubUrlParts = null;
|
||||
if (
|
||||
installMetadata &&
|
||||
(installMetadata.type === 'git' ||
|
||||
installMetadata.type === 'github-release')
|
||||
? parseGitHubRepoForReleases(installMetadata.source)
|
||||
: null;
|
||||
) {
|
||||
try {
|
||||
githubUrlParts = parseGitHubRepoForReleases(installMetadata.source);
|
||||
} catch {
|
||||
// Non-GitHub URL (GitLab, Bitbucket, etc.) - use source as-is
|
||||
}
|
||||
}
|
||||
if (githubUrlParts) {
|
||||
idValue = `https://github.com/${githubUrlParts.owner}/${githubUrlParts.repo}`;
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@
|
|||
import { type VariableSchema, VARIABLE_SCHEMA } from './variableSchema.js';
|
||||
import path from 'node:path';
|
||||
import { QWEN_DIR } from '../config/storage.js';
|
||||
import type { HookEventName, HookDefinition } from '../hooks/types.js';
|
||||
import type { HookDefinition } from '../hooks/types.js';
|
||||
import type { HookEventName } from '../hooks/types.js';
|
||||
import * as fs from 'node:fs';
|
||||
import { glob } from 'glob';
|
||||
import { createDebugLogger } from '../utils/debugLogger.js';
|
||||
|
|
@ -15,7 +16,7 @@ import { createDebugLogger } from '../utils/debugLogger.js';
|
|||
const debugLogger = createDebugLogger('Extension:variables');
|
||||
|
||||
// Re-export types for substituteHookVariables
|
||||
export type { HookEventName, HookDefinition };
|
||||
export type { HookDefinition };
|
||||
|
||||
export const EXTENSIONS_DIRECTORY_NAME = path.join(QWEN_DIR, 'extensions');
|
||||
export const EXTENSIONS_CONFIG_FILENAME = 'qwen-extension.json';
|
||||
|
|
|
|||
|
|
@ -25,6 +25,13 @@ import type {
|
|||
AggregatedHookResult,
|
||||
} from './index.js';
|
||||
import type { HookConfig, HookOutput, PermissionSuggestion } from './types.js';
|
||||
import type { HookExecutionResult } from './types.js';
|
||||
import { logHookCall } from '../telemetry/loggers.js';
|
||||
|
||||
// Mock the telemetry loggers module
|
||||
vi.mock('../telemetry/loggers.js', () => ({
|
||||
logHookCall: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('HookEventHandler', () => {
|
||||
let mockConfig: Config;
|
||||
|
|
@ -2246,4 +2253,361 @@ describe('HookEventHandler', () => {
|
|||
expect(input.stop_hook_active).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('telemetry', () => {
|
||||
const createMockHookExecutionResult = (
|
||||
success: boolean,
|
||||
hookConfig: HookConfig,
|
||||
duration: number = 100,
|
||||
output?: HookOutput,
|
||||
error?: Error,
|
||||
): HookExecutionResult => ({
|
||||
hookConfig,
|
||||
eventName: HookEventName.PreToolUse,
|
||||
success,
|
||||
output,
|
||||
stdout: 'stdout',
|
||||
stderr: success ? undefined : 'stderr',
|
||||
exitCode: success ? 0 : 1,
|
||||
duration,
|
||||
error,
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(logHookCall).mockClear();
|
||||
});
|
||||
|
||||
it('should call logHookCall for each hook execution', async () => {
|
||||
const hookConfig1: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'hook1.sh',
|
||||
name: 'first-hook',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
const hookConfig2: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'hook2.sh',
|
||||
name: 'second-hook',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig1, hookConfig2]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result1 = createMockHookExecutionResult(true, hookConfig1, 50);
|
||||
const result2 = createMockHookExecutionResult(true, hookConfig2, 75);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result1,
|
||||
result2,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test prompt');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should log hook call with correct event name', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'test.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(true, hookConfig);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.firePreToolUseEvent(
|
||||
'read_file',
|
||||
{ path: '/test' },
|
||||
'tool-123',
|
||||
PermissionMode.Default,
|
||||
);
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
hook_event_name: HookEventName.PreToolUse,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log hook call with hook name from config', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: '/path/to/my-hook.sh',
|
||||
name: 'my-custom-hook',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(true, hookConfig);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
hook_name: 'my-custom-hook',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log hook call with command as name when no name specified', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: '/path/to/hook-script.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(true, hookConfig);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
hook_name: '/path/to/hook-script.sh',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log hook call with duration', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'test.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const duration = 250;
|
||||
const result = createMockHookExecutionResult(true, hookConfig, duration);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
duration_ms: duration,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log hook call with success status', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'test.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(true, hookConfig);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
success: true,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log hook call with failure status', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'failing-hook.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(
|
||||
false,
|
||||
hookConfig,
|
||||
100,
|
||||
undefined,
|
||||
new Error('Hook failed'),
|
||||
);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(false),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
success: false,
|
||||
error: 'Hook failed',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log hook call with exit code', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'test.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(true, hookConfig);
|
||||
result.exitCode = 0;
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
exit_code: 0,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log hook call with hook type', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'test.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(true, hookConfig);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
hook_type: 'command',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not call logHookCall when no hooks are configured', async () => {
|
||||
const mockPlan = createMockExecutionPlan([]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
await hookEventHandler.fireUserPromptSubmitEvent('test');
|
||||
|
||||
expect(logHookCall).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log telemetry for different event types', async () => {
|
||||
const hookConfig: HookConfig = {
|
||||
type: HookType.Command,
|
||||
command: 'test.sh',
|
||||
source: HooksConfigSource.Project,
|
||||
};
|
||||
|
||||
const mockPlan = createMockExecutionPlan([hookConfig]);
|
||||
vi.mocked(mockHookPlanner.createExecutionPlan).mockReturnValue(mockPlan);
|
||||
|
||||
const result = createMockHookExecutionResult(true, hookConfig);
|
||||
vi.mocked(mockHookRunner.executeHooksParallel).mockResolvedValue([
|
||||
result,
|
||||
]);
|
||||
vi.mocked(mockHookAggregator.aggregateResults).mockReturnValue(
|
||||
createMockAggregatedResult(true),
|
||||
);
|
||||
|
||||
// Test SessionStart
|
||||
await hookEventHandler.fireSessionStartEvent(
|
||||
SessionStartSource.Startup,
|
||||
'test-model',
|
||||
);
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
hook_event_name: HookEventName.SessionStart,
|
||||
}),
|
||||
);
|
||||
|
||||
vi.mocked(logHookCall).mockClear();
|
||||
|
||||
// Test SessionEnd
|
||||
await hookEventHandler.fireSessionEndEvent(SessionEndReason.Clear);
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
hook_event_name: HookEventName.SessionEnd,
|
||||
}),
|
||||
);
|
||||
|
||||
vi.mocked(logHookCall).mockClear();
|
||||
|
||||
// Test Stop
|
||||
await hookEventHandler.fireStopEvent(true, 'last message');
|
||||
expect(logHookCall).toHaveBeenCalledWith(
|
||||
mockConfig,
|
||||
expect.objectContaining({
|
||||
hook_event_name: HookEventName.Stop,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -34,6 +34,8 @@ import type {
|
|||
} from './types.js';
|
||||
import { PermissionMode } from './types.js';
|
||||
import { createDebugLogger } from '../utils/debugLogger.js';
|
||||
import { logHookCall } from '../telemetry/loggers.js';
|
||||
import { HookCallEvent } from '../telemetry/types.js';
|
||||
|
||||
const debugLogger = createDebugLogger('TRUSTED_HOOKS');
|
||||
|
||||
|
|
@ -415,12 +417,18 @@ export class HookEventHandler {
|
|||
};
|
||||
}
|
||||
|
||||
const onHookStart = (_config: HookConfig, _index: number) => {
|
||||
// Hook start event (telemetry removed)
|
||||
const onHookStart = (config: HookConfig, index: number) => {
|
||||
const hookName = this.getHookName(config);
|
||||
debugLogger.debug(
|
||||
`Hook ${hookName} started for event ${eventName} (${index + 1}/${plan.hookConfigs.length})`,
|
||||
);
|
||||
};
|
||||
|
||||
const onHookEnd = (_config: HookConfig, _result: HookExecutionResult) => {
|
||||
// Hook end event (telemetry removed)
|
||||
const onHookEnd = (config: HookConfig, result: HookExecutionResult) => {
|
||||
const hookName = this.getHookName(config);
|
||||
debugLogger.debug(
|
||||
`Hook ${hookName} ended for event ${eventName}: ${result.success ? 'success' : 'failed'}`,
|
||||
);
|
||||
};
|
||||
|
||||
// Execute hooks according to the plan's strategy
|
||||
|
|
@ -451,6 +459,9 @@ export class HookEventHandler {
|
|||
// Process common hook output fields centrally
|
||||
this.processCommonHookOutputFields(aggregated);
|
||||
|
||||
// Log hook execution for telemetry
|
||||
this.logHookExecution(eventName, input, results, aggregated);
|
||||
|
||||
return aggregated;
|
||||
} catch (error) {
|
||||
debugLogger.error(`Hook event bus error for ${eventName}: ${error}`);
|
||||
|
|
@ -496,8 +507,6 @@ export class HookEventHandler {
|
|||
debugLogger.warn(`Hook system message: ${systemMessage}`);
|
||||
}
|
||||
|
||||
// Handle suppressOutput - already handled by not logging above when true
|
||||
|
||||
// Handle continue=false - this should stop the entire agent execution
|
||||
if (aggregated.finalOutput.continue === false) {
|
||||
const stopReason =
|
||||
|
|
@ -505,10 +514,84 @@ export class HookEventHandler {
|
|||
aggregated.finalOutput.reason ||
|
||||
'No reason provided';
|
||||
debugLogger.debug(`Hook requested to stop execution: ${stopReason}`);
|
||||
|
||||
// Note: The actual stopping of execution must be handled by integration points
|
||||
// as they need to interpret this signal in the context of their specific workflow
|
||||
// This is just logging the request centrally
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log hook execution for observability
|
||||
*/
|
||||
private logHookExecution(
|
||||
eventName: HookEventName,
|
||||
input: HookInput,
|
||||
results: HookExecutionResult[],
|
||||
aggregated: AggregatedHookResult,
|
||||
): void {
|
||||
const failedHooks = results.filter((r) => !r.success);
|
||||
const successCount = results.length - failedHooks.length;
|
||||
const errorCount = failedHooks.length;
|
||||
|
||||
if (errorCount > 0) {
|
||||
const failedNames = failedHooks
|
||||
.map((r) => this.getHookNameFromResult(r))
|
||||
.join(', ');
|
||||
|
||||
debugLogger.warn(
|
||||
`Hook(s) [${failedNames}] failed for event ${eventName}. Check debug logs for more details.`,
|
||||
);
|
||||
} else {
|
||||
debugLogger.debug(
|
||||
`Hook execution for ${eventName}: ${successCount} hooks executed successfully, ` +
|
||||
`total duration: ${aggregated.totalDuration}ms`,
|
||||
);
|
||||
}
|
||||
|
||||
for (const result of results) {
|
||||
const hookName = this.getHookNameFromResult(result);
|
||||
const hookType = this.getHookTypeFromResult(result);
|
||||
|
||||
const hookCallEvent = new HookCallEvent(
|
||||
eventName,
|
||||
hookType,
|
||||
hookName,
|
||||
{ ...input },
|
||||
result.duration,
|
||||
result.success,
|
||||
result.output ? { ...result.output } : undefined,
|
||||
result.exitCode,
|
||||
result.stdout,
|
||||
result.stderr,
|
||||
result.error?.message,
|
||||
);
|
||||
|
||||
logHookCall(this.config, hookCallEvent);
|
||||
}
|
||||
|
||||
for (const error of aggregated.errors) {
|
||||
debugLogger.warn(`Hook execution error: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get hook name from config for display or telemetry
|
||||
*/
|
||||
private getHookName(config: HookConfig): string {
|
||||
if (config.type === 'command') {
|
||||
return config.name || config.command || 'unknown-command';
|
||||
}
|
||||
return config.name || 'unknown-hook';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get hook name from execution result for telemetry
|
||||
*/
|
||||
private getHookNameFromResult(result: HookExecutionResult): string {
|
||||
return this.getHookName(result.hookConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get hook type from execution result for telemetry
|
||||
*/
|
||||
private getHookTypeFromResult(result: HookExecutionResult): 'command' {
|
||||
return result.hookConfig.type as 'command';
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -127,7 +127,6 @@ export * from './ide/types.js';
|
|||
export * from './lsp/constants.js';
|
||||
export * from './lsp/LspConfigLoader.js';
|
||||
export * from './lsp/LspConnectionFactory.js';
|
||||
export * from './lsp/LspLanguageDetector.js';
|
||||
export * from './lsp/LspResponseNormalizer.js';
|
||||
export * from './lsp/LspServerManager.js';
|
||||
export * from './lsp/NativeLspClient.js';
|
||||
|
|
|
|||
|
|
@ -9,6 +9,104 @@ import mock from 'mock-fs';
|
|||
import { LspConfigLoader } from './LspConfigLoader.js';
|
||||
import type { Extension } from '../extension/extensionManager.js';
|
||||
|
||||
describe('LspConfigLoader config-driven behavior', () => {
|
||||
const workspaceRoot = '/workspace';
|
||||
|
||||
it('does not generate any presets when no user or extension config provided', () => {
|
||||
const loader = new LspConfigLoader(workspaceRoot);
|
||||
// Even if languages are detected, no built-in presets should be generated
|
||||
const configs = loader.mergeConfigs(['java', 'cpp', 'typescript'], [], []);
|
||||
|
||||
expect(configs).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('respects user-provided configs via .lsp.json', () => {
|
||||
const loader = new LspConfigLoader(workspaceRoot);
|
||||
const userConfigs = [
|
||||
{
|
||||
name: 'jdtls',
|
||||
languages: ['java'],
|
||||
command: 'jdtls',
|
||||
args: [],
|
||||
transport: 'stdio' as const,
|
||||
initializationOptions: {},
|
||||
rootUri: 'file:///workspace',
|
||||
workspaceFolder: workspaceRoot,
|
||||
trustRequired: true,
|
||||
},
|
||||
];
|
||||
|
||||
const configs = loader.mergeConfigs(['java'], [], userConfigs);
|
||||
|
||||
expect(configs).toHaveLength(1);
|
||||
expect(configs[0]?.name).toBe('jdtls');
|
||||
expect(configs[0]?.languages).toEqual(['java']);
|
||||
});
|
||||
|
||||
it('respects extension-provided configs', () => {
|
||||
const loader = new LspConfigLoader(workspaceRoot);
|
||||
const extensionConfigs = [
|
||||
{
|
||||
name: 'clangd',
|
||||
languages: ['cpp', 'c'],
|
||||
command: 'clangd',
|
||||
args: ['--background-index'],
|
||||
transport: 'stdio' as const,
|
||||
initializationOptions: {},
|
||||
rootUri: 'file:///workspace',
|
||||
workspaceFolder: workspaceRoot,
|
||||
trustRequired: true,
|
||||
},
|
||||
];
|
||||
|
||||
const configs = loader.mergeConfigs(['cpp'], extensionConfigs, []);
|
||||
|
||||
expect(configs).toHaveLength(1);
|
||||
expect(configs[0]?.name).toBe('clangd');
|
||||
expect(configs[0]?.command).toBe('clangd');
|
||||
});
|
||||
|
||||
it('user configs override extension configs with same name', () => {
|
||||
const loader = new LspConfigLoader(workspaceRoot);
|
||||
const extensionConfigs = [
|
||||
{
|
||||
name: 'jdtls',
|
||||
languages: ['java'],
|
||||
command: 'jdtls',
|
||||
args: [],
|
||||
transport: 'stdio' as const,
|
||||
initializationOptions: {},
|
||||
rootUri: 'file:///workspace',
|
||||
workspaceFolder: workspaceRoot,
|
||||
trustRequired: true,
|
||||
},
|
||||
];
|
||||
const userConfigs = [
|
||||
{
|
||||
name: 'jdtls',
|
||||
languages: ['java'],
|
||||
command: '/custom/path/jdtls',
|
||||
args: ['--custom-flag'],
|
||||
transport: 'stdio' as const,
|
||||
initializationOptions: {},
|
||||
rootUri: 'file:///workspace',
|
||||
workspaceFolder: workspaceRoot,
|
||||
trustRequired: true,
|
||||
},
|
||||
];
|
||||
|
||||
const configs = loader.mergeConfigs(
|
||||
['java'],
|
||||
extensionConfigs,
|
||||
userConfigs,
|
||||
);
|
||||
|
||||
expect(configs).toHaveLength(1);
|
||||
expect(configs[0]?.command).toBe('/custom/path/jdtls');
|
||||
expect(configs[0]?.args).toEqual(['--custom-flag']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('LspConfigLoader extension configs', () => {
|
||||
const workspaceRoot = '/workspace';
|
||||
const extensionPath = '/extensions/ts-plugin';
|
||||
|
|
|
|||
|
|
@ -106,18 +106,17 @@ export class LspConfigLoader {
|
|||
}
|
||||
|
||||
/**
|
||||
* Merge configs: built-in presets + extension configs + user configs
|
||||
* Merge configs: extension configs + user configs
|
||||
* Note: Built-in presets are disabled. LSP servers must be explicitly configured
|
||||
* by the user via .lsp.json or through extensions.
|
||||
*/
|
||||
mergeConfigs(
|
||||
detectedLanguages: string[],
|
||||
_detectedLanguages: string[],
|
||||
extensionConfigs: LspServerConfig[],
|
||||
userConfigs: LspServerConfig[],
|
||||
): LspServerConfig[] {
|
||||
// Built-in preset configurations
|
||||
const presets = this.getBuiltInPresets(detectedLanguages);
|
||||
|
||||
// Merge configs, user configs take priority
|
||||
const mergedConfigs = [...presets];
|
||||
const mergedConfigs: LspServerConfig[] = [];
|
||||
|
||||
const applyConfigs = (configs: LspServerConfig[]) => {
|
||||
for (const config of configs) {
|
||||
|
|
@ -161,71 +160,6 @@ export class LspConfigLoader {
|
|||
return overrides;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get built-in preset configurations
|
||||
*/
|
||||
private getBuiltInPresets(detectedLanguages: string[]): LspServerConfig[] {
|
||||
const presets: LspServerConfig[] = [];
|
||||
|
||||
// Convert directory path to file URI format
|
||||
const rootUri = pathToFileURL(this.workspaceRoot).toString();
|
||||
|
||||
// Generate corresponding LSP server config based on detected languages
|
||||
if (
|
||||
detectedLanguages.includes('typescript') ||
|
||||
detectedLanguages.includes('javascript')
|
||||
) {
|
||||
presets.push({
|
||||
name: 'typescript-language-server',
|
||||
languages: [
|
||||
'typescript',
|
||||
'javascript',
|
||||
'typescriptreact',
|
||||
'javascriptreact',
|
||||
],
|
||||
command: 'typescript-language-server',
|
||||
args: ['--stdio'],
|
||||
transport: 'stdio',
|
||||
initializationOptions: {},
|
||||
rootUri,
|
||||
workspaceFolder: this.workspaceRoot,
|
||||
trustRequired: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (detectedLanguages.includes('python')) {
|
||||
presets.push({
|
||||
name: 'pylsp',
|
||||
languages: ['python'],
|
||||
command: 'pylsp',
|
||||
args: [],
|
||||
transport: 'stdio',
|
||||
initializationOptions: {},
|
||||
rootUri,
|
||||
workspaceFolder: this.workspaceRoot,
|
||||
trustRequired: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (detectedLanguages.includes('go')) {
|
||||
presets.push({
|
||||
name: 'gopls',
|
||||
languages: ['go'],
|
||||
command: 'gopls',
|
||||
args: [],
|
||||
transport: 'stdio',
|
||||
initializationOptions: {},
|
||||
rootUri,
|
||||
workspaceFolder: this.workspaceRoot,
|
||||
trustRequired: true,
|
||||
});
|
||||
}
|
||||
|
||||
// Additional language presets can be added as needed
|
||||
|
||||
return presets;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse configuration source and extract server configs.
|
||||
* Expects basic format keyed by language identifier.
|
||||
|
|
|
|||
|
|
@ -1,226 +0,0 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen Team
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* LSP Language Detector
|
||||
*
|
||||
* Detects programming languages in a workspace by analyzing file extensions
|
||||
* and root marker files (e.g., package.json, tsconfig.json).
|
||||
*/
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'path';
|
||||
import { globSync } from 'glob';
|
||||
import type { FileDiscoveryService } from '../services/fileDiscoveryService.js';
|
||||
import type { WorkspaceContext } from '../utils/workspaceContext.js';
|
||||
|
||||
/**
|
||||
* Extension to language ID mapping
|
||||
*/
|
||||
const DEFAULT_EXTENSION_TO_LANGUAGE: Record<string, string> = {
|
||||
js: 'javascript',
|
||||
ts: 'typescript',
|
||||
jsx: 'javascriptreact',
|
||||
tsx: 'typescriptreact',
|
||||
py: 'python',
|
||||
go: 'go',
|
||||
rs: 'rust',
|
||||
java: 'java',
|
||||
cpp: 'cpp',
|
||||
c: 'c',
|
||||
php: 'php',
|
||||
rb: 'ruby',
|
||||
cs: 'csharp',
|
||||
vue: 'vue',
|
||||
svelte: 'svelte',
|
||||
html: 'html',
|
||||
css: 'css',
|
||||
json: 'json',
|
||||
yaml: 'yaml',
|
||||
yml: 'yaml',
|
||||
};
|
||||
|
||||
/**
|
||||
* Root marker file to language ID mapping
|
||||
*/
|
||||
const MARKER_TO_LANGUAGE: Record<string, string> = {
|
||||
'package.json': 'javascript',
|
||||
'tsconfig.json': 'typescript',
|
||||
'pyproject.toml': 'python',
|
||||
'go.mod': 'go',
|
||||
'Cargo.toml': 'rust',
|
||||
'pom.xml': 'java',
|
||||
'build.gradle': 'java',
|
||||
'composer.json': 'php',
|
||||
Gemfile: 'ruby',
|
||||
'*.sln': 'csharp',
|
||||
'mix.exs': 'elixir',
|
||||
'deno.json': 'deno',
|
||||
};
|
||||
|
||||
/**
|
||||
* Common root marker files to look for
|
||||
*/
|
||||
const COMMON_MARKERS = [
|
||||
'package.json',
|
||||
'tsconfig.json',
|
||||
'pyproject.toml',
|
||||
'go.mod',
|
||||
'Cargo.toml',
|
||||
'pom.xml',
|
||||
'build.gradle',
|
||||
'composer.json',
|
||||
'Gemfile',
|
||||
'mix.exs',
|
||||
'deno.json',
|
||||
];
|
||||
|
||||
/**
|
||||
* Default exclude patterns for file search
|
||||
*/
|
||||
const DEFAULT_EXCLUDE_PATTERNS = [
|
||||
'**/node_modules/**',
|
||||
'**/.git/**',
|
||||
'**/dist/**',
|
||||
'**/build/**',
|
||||
];
|
||||
|
||||
/**
|
||||
* Detects programming languages in a workspace.
|
||||
*/
|
||||
export class LspLanguageDetector {
|
||||
constructor(
|
||||
private readonly workspaceContext: WorkspaceContext,
|
||||
private readonly fileDiscoveryService: FileDiscoveryService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Detect programming languages in workspace by analyzing files and markers.
|
||||
* Returns languages sorted by frequency (most common first).
|
||||
*
|
||||
* @param extensionOverrides - Custom extension to language mappings
|
||||
* @returns Array of detected language IDs
|
||||
*/
|
||||
async detectLanguages(
|
||||
extensionOverrides: Record<string, string> = {},
|
||||
): Promise<string[]> {
|
||||
const extensionMap = this.getExtensionToLanguageMap(extensionOverrides);
|
||||
const extensions = Object.keys(extensionMap);
|
||||
const patterns =
|
||||
extensions.length > 0 ? [`**/*.{${extensions.join(',')}}`] : ['**/*'];
|
||||
|
||||
const files = new Set<string>();
|
||||
const searchRoots = this.workspaceContext.getDirectories();
|
||||
|
||||
for (const root of searchRoots) {
|
||||
for (const pattern of patterns) {
|
||||
try {
|
||||
const matches = globSync(pattern, {
|
||||
cwd: root,
|
||||
ignore: DEFAULT_EXCLUDE_PATTERNS,
|
||||
absolute: true,
|
||||
nodir: true,
|
||||
});
|
||||
|
||||
for (const match of matches) {
|
||||
if (this.fileDiscoveryService.shouldIgnoreFile(match)) {
|
||||
continue;
|
||||
}
|
||||
files.add(match);
|
||||
}
|
||||
} catch {
|
||||
// Ignore glob errors for missing/invalid directories
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Count files per language
|
||||
const languageCounts = new Map<string, number>();
|
||||
for (const file of Array.from(files)) {
|
||||
const ext = path.extname(file).slice(1).toLowerCase();
|
||||
if (ext) {
|
||||
const lang = this.mapExtensionToLanguage(ext, extensionMap);
|
||||
if (lang) {
|
||||
languageCounts.set(lang, (languageCounts.get(lang) || 0) + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also detect languages via root marker files
|
||||
const rootMarkers = await this.detectRootMarkers();
|
||||
for (const marker of rootMarkers) {
|
||||
const lang = this.mapMarkerToLanguage(marker);
|
||||
if (lang) {
|
||||
// Give higher weight to config files
|
||||
const currentCount = languageCounts.get(lang) || 0;
|
||||
languageCounts.set(lang, currentCount + 100);
|
||||
}
|
||||
}
|
||||
|
||||
// Return languages sorted by count (descending)
|
||||
return Array.from(languageCounts.entries())
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.map(([lang]) => lang);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect root marker files in workspace directories
|
||||
*/
|
||||
private async detectRootMarkers(): Promise<string[]> {
|
||||
const markers = new Set<string>();
|
||||
|
||||
for (const root of this.workspaceContext.getDirectories()) {
|
||||
for (const marker of COMMON_MARKERS) {
|
||||
try {
|
||||
const fullPath = path.join(root, marker);
|
||||
if (fs.existsSync(fullPath)) {
|
||||
markers.add(marker);
|
||||
}
|
||||
} catch {
|
||||
// ignore missing files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(markers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map file extension to programming language ID
|
||||
*/
|
||||
private mapExtensionToLanguage(
|
||||
ext: string,
|
||||
extensionMap: Record<string, string>,
|
||||
): string | null {
|
||||
return extensionMap[ext] || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get extension to language mapping with overrides applied
|
||||
*/
|
||||
private getExtensionToLanguageMap(
|
||||
extensionOverrides: Record<string, string> = {},
|
||||
): Record<string, string> {
|
||||
const extToLang = { ...DEFAULT_EXTENSION_TO_LANGUAGE };
|
||||
|
||||
for (const [key, value] of Object.entries(extensionOverrides)) {
|
||||
const normalized = key.startsWith('.') ? key.slice(1) : key;
|
||||
if (!normalized) {
|
||||
continue;
|
||||
}
|
||||
extToLang[normalized.toLowerCase()] = value;
|
||||
}
|
||||
|
||||
return extToLang;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map root marker file to programming language ID
|
||||
*/
|
||||
private mapMarkerToLanguage(marker: string): string | null {
|
||||
return MARKER_TO_LANGUAGE[marker] || null;
|
||||
}
|
||||
}
|
||||
|
|
@ -522,12 +522,21 @@ export class LspResponseNormalizer {
|
|||
itemObj['range'] ??
|
||||
undefined) as { start?: unknown; end?: unknown } | undefined;
|
||||
|
||||
if (!locationObj['uri'] || !range?.start || !range?.end) {
|
||||
// Only require uri; range is optional per LSP 3.17 WorkspaceSymbol spec
|
||||
// where location may be { uri } without a range.
|
||||
if (!locationObj['uri']) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const start = range.start as { line?: number; character?: number };
|
||||
const end = range.end as { line?: number; character?: number };
|
||||
// LSP 3.17 WorkspaceSymbol format may have location with only uri (no range).
|
||||
// Servers like jdtls use this format, requiring a workspaceSymbol/resolve call
|
||||
// for the full range. Default to file start when range is absent.
|
||||
const start = (range?.start as
|
||||
| { line?: number; character?: number }
|
||||
| undefined) ?? { line: 0, character: 0 };
|
||||
const end = (range?.end as
|
||||
| { line?: number; character?: number }
|
||||
| undefined) ?? { line: 0, character: 0 };
|
||||
|
||||
return {
|
||||
name: (itemObj['name'] ?? itemObj['label'] ?? 'symbol') as string,
|
||||
|
|
|
|||
|
|
@ -94,20 +94,24 @@ export class LspServerManager {
|
|||
/**
|
||||
* Ensure tsserver has at least one file open so navto/navtree requests succeed.
|
||||
* Sets warmedUp flag only after successful warm-up to allow retry on failure.
|
||||
*
|
||||
* @param handle - The LSP server handle
|
||||
* @param force - Force re-warmup even if already warmed up
|
||||
* @returns The URI of the file opened during warmup, or undefined if no file was opened
|
||||
*/
|
||||
async warmupTypescriptServer(
|
||||
handle: LspServerHandle,
|
||||
force = false,
|
||||
): Promise<void> {
|
||||
): Promise<string | undefined> {
|
||||
if (!handle.connection || !this.isTypescriptServer(handle)) {
|
||||
return;
|
||||
return undefined;
|
||||
}
|
||||
if (handle.warmedUp && !force) {
|
||||
return;
|
||||
return undefined;
|
||||
}
|
||||
const tsFile = this.findFirstTypescriptFile();
|
||||
if (!tsFile) {
|
||||
return;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const uri = pathToFileURL(tsFile).toString();
|
||||
|
|
@ -138,9 +142,11 @@ export class LspServerManager {
|
|||
);
|
||||
// Only mark as warmed up after successful completion
|
||||
handle.warmedUp = true;
|
||||
return uri;
|
||||
} catch (error) {
|
||||
// Do not set warmedUp to true on failure, allowing retry
|
||||
debugLogger.warn('TypeScript server warm-up failed:', error);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -559,40 +565,22 @@ export class LspServerManager {
|
|||
});
|
||||
}
|
||||
|
||||
// Warm up TypeScript server by opening a workspace file so it can create a project.
|
||||
if (
|
||||
config.name.includes('typescript') ||
|
||||
(config.command?.includes('typescript') ?? false)
|
||||
) {
|
||||
try {
|
||||
const tsFile = this.findFirstTypescriptFile();
|
||||
if (tsFile) {
|
||||
const uri = pathToFileURL(tsFile).toString();
|
||||
const languageId = tsFile.endsWith('.tsx')
|
||||
? 'typescriptreact'
|
||||
: 'typescript';
|
||||
const text = fs.readFileSync(tsFile, 'utf-8');
|
||||
connection.connection.send({
|
||||
jsonrpc: '2.0',
|
||||
method: 'textDocument/didOpen',
|
||||
params: {
|
||||
textDocument: {
|
||||
uri,
|
||||
languageId,
|
||||
version: 1,
|
||||
text,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
debugLogger.warn('TypeScript LSP warm-up failed:', error);
|
||||
}
|
||||
}
|
||||
// Note: TypeScript server warm-up is handled by warmupTypescriptServer()
|
||||
// which is called before every LSP request. This avoids duplicate
|
||||
// textDocument/didOpen notifications that aren't tracked in openedDocuments.
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if command exists
|
||||
* Check if command exists by spawning it with --version.
|
||||
* Only returns false when the spawn itself fails (e.g. ENOENT).
|
||||
* A timeout means the process started successfully (command exists)
|
||||
* but didn't exit in time — common for servers like jdtls that
|
||||
* don't support --version and start their full runtime instead.
|
||||
*
|
||||
* @param command - The command to check
|
||||
* @param env - Optional environment variables
|
||||
* @param cwd - Optional working directory
|
||||
* @returns true if the command can be spawned, false if not found
|
||||
*/
|
||||
private async commandExists(
|
||||
command: string,
|
||||
|
|
@ -616,16 +604,20 @@ export class LspServerManager {
|
|||
if (settled) {
|
||||
return;
|
||||
}
|
||||
// If command exists, it typically returns 0 or other non-error codes
|
||||
// Some commands with --version may return non-0, but won't throw error
|
||||
resolve(code !== 127); // 127 typically indicates command not found
|
||||
settled = true;
|
||||
// 127 typically indicates command not found in shell
|
||||
resolve(code !== 127);
|
||||
});
|
||||
|
||||
// Set timeout to avoid long waits
|
||||
// If the process is still running after the timeout, it means the
|
||||
// command was found and started — it just didn't finish in time.
|
||||
// This is expected for servers like jdtls that don't support --version.
|
||||
setTimeout(() => {
|
||||
settled = true;
|
||||
child.kill();
|
||||
resolve(false);
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
child.kill();
|
||||
resolve(true);
|
||||
}
|
||||
}, DEFAULT_LSP_COMMAND_CHECK_TIMEOUT_MS);
|
||||
});
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -27,8 +27,12 @@ import type {
|
|||
LspWorkspaceEdit,
|
||||
} from './types.js';
|
||||
import type { EventEmitter } from 'events';
|
||||
import {
|
||||
DEFAULT_LSP_DOCUMENT_OPEN_DELAY_MS,
|
||||
DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS,
|
||||
DEFAULT_LSP_WORKSPACE_SYMBOL_WARMUP_DELAY_MS,
|
||||
} from './constants.js';
|
||||
import { LspConfigLoader } from './LspConfigLoader.js';
|
||||
import { LspLanguageDetector } from './LspLanguageDetector.js';
|
||||
import { LspResponseNormalizer } from './LspResponseNormalizer.js';
|
||||
import { LspServerManager } from './LspServerManager.js';
|
||||
import type {
|
||||
|
|
@ -38,12 +42,36 @@ import type {
|
|||
NativeLspServiceOptions,
|
||||
} from './types.js';
|
||||
import * as path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { fileURLToPath, pathToFileURL } from 'url';
|
||||
import * as fs from 'node:fs';
|
||||
import { createDebugLogger } from '../utils/debugLogger.js';
|
||||
import { globSync } from 'glob';
|
||||
|
||||
const debugLogger = createDebugLogger('LSP');
|
||||
|
||||
/**
|
||||
* Mapping from LSP language identifiers to file extensions, only for cases
|
||||
* where the language ID does NOT match the file extension directly.
|
||||
* Languages whose ID is already a valid extension (e.g. "cpp", "java", "go")
|
||||
* are handled by the fallback in getWorkspaceSymbolExtensions().
|
||||
*/
|
||||
const LANGUAGE_ID_TO_EXTENSIONS: Record<string, string[]> = {
|
||||
typescript: ['ts', 'tsx'],
|
||||
typescriptreact: ['tsx'],
|
||||
javascript: ['js', 'jsx'],
|
||||
javascriptreact: ['jsx'],
|
||||
python: ['py'],
|
||||
csharp: ['cs'],
|
||||
ruby: ['rb'],
|
||||
};
|
||||
|
||||
const DEFAULT_EXCLUDE_PATTERNS = [
|
||||
'**/node_modules/**',
|
||||
'**/.git/**',
|
||||
'**/dist/**',
|
||||
'**/build/**',
|
||||
];
|
||||
|
||||
export class NativeLspService {
|
||||
private config: CoreConfig;
|
||||
private workspaceContext: WorkspaceContext;
|
||||
|
|
@ -52,8 +80,9 @@ export class NativeLspService {
|
|||
private workspaceRoot: string;
|
||||
private configLoader: LspConfigLoader;
|
||||
private serverManager: LspServerManager;
|
||||
private languageDetector: LspLanguageDetector;
|
||||
private normalizer: LspResponseNormalizer;
|
||||
private openedDocuments = new Map<string, Set<string>>();
|
||||
private lastConnections = new Map<string, LspConnectionInterface>();
|
||||
|
||||
constructor(
|
||||
config: CoreConfig,
|
||||
|
|
@ -71,10 +100,6 @@ export class NativeLspService {
|
|||
options.workspaceRoot ??
|
||||
(config as { getProjectRoot: () => string }).getProjectRoot();
|
||||
this.configLoader = new LspConfigLoader(this.workspaceRoot);
|
||||
this.languageDetector = new LspLanguageDetector(
|
||||
this.workspaceContext,
|
||||
this.fileDiscoveryService,
|
||||
);
|
||||
this.normalizer = new LspResponseNormalizer();
|
||||
this.serverManager = new LspServerManager(
|
||||
this.config,
|
||||
|
|
@ -102,22 +127,14 @@ export class NativeLspService {
|
|||
return;
|
||||
}
|
||||
|
||||
// Detect languages in workspace
|
||||
// Load LSP configs
|
||||
const userConfigs = await this.configLoader.loadUserConfigs();
|
||||
const extensionConfigs = await this.configLoader.loadExtensionConfigs(
|
||||
this.getActiveExtensions(),
|
||||
);
|
||||
const extensionOverrides =
|
||||
this.configLoader.collectExtensionToLanguageOverrides([
|
||||
...extensionConfigs,
|
||||
...userConfigs,
|
||||
]);
|
||||
const detectedLanguages =
|
||||
await this.languageDetector.detectLanguages(extensionOverrides);
|
||||
|
||||
// Merge configs: built-in presets + extension LSP configs + user .lsp.json
|
||||
// Merge configs: extension LSP configs + user .lsp.json
|
||||
const serverConfigs = this.configLoader.mergeConfigs(
|
||||
detectedLanguages,
|
||||
[],
|
||||
extensionConfigs,
|
||||
userConfigs,
|
||||
);
|
||||
|
|
@ -177,6 +194,264 @@ export class NativeLspService {
|
|||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure a document is open on the given LSP server. Sends textDocument/didOpen
|
||||
* if not already tracked, then waits for the server to process the file before
|
||||
* returning. This delay prevents empty results when the server hasn't analyzed
|
||||
* the file yet.
|
||||
*
|
||||
* @param serverName - The name of the LSP server
|
||||
* @param handle - The server handle with an active connection
|
||||
* @param uri - The document URI to open
|
||||
* @returns true if a new didOpen was sent; false if already open or failed
|
||||
*/
|
||||
private async ensureDocumentOpen(
|
||||
serverName: string,
|
||||
handle: LspServerHandle & { connection: LspConnectionInterface },
|
||||
uri: string,
|
||||
): Promise<boolean> {
|
||||
const lastConnection = this.lastConnections.get(serverName);
|
||||
if (lastConnection && lastConnection !== handle.connection) {
|
||||
this.openedDocuments.delete(serverName);
|
||||
}
|
||||
this.lastConnections.set(serverName, handle.connection);
|
||||
|
||||
if (!uri.startsWith('file://')) {
|
||||
return false;
|
||||
}
|
||||
const openedForServer = this.openedDocuments.get(serverName);
|
||||
if (openedForServer?.has(uri)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let filePath: string;
|
||||
try {
|
||||
filePath = fileURLToPath(uri);
|
||||
} catch (error) {
|
||||
debugLogger.warn(`Failed to resolve file path for ${uri}:`, error);
|
||||
return false;
|
||||
}
|
||||
|
||||
let text: string;
|
||||
try {
|
||||
text = fs.readFileSync(filePath, 'utf-8');
|
||||
} catch (error) {
|
||||
debugLogger.warn(
|
||||
`Failed to read file for LSP didOpen: ${filePath}`,
|
||||
error,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
const languageId = this.resolveLanguageId(filePath, handle) ?? 'plaintext';
|
||||
|
||||
handle.connection.send({
|
||||
jsonrpc: '2.0',
|
||||
method: 'textDocument/didOpen',
|
||||
params: {
|
||||
textDocument: {
|
||||
uri,
|
||||
languageId,
|
||||
version: 1,
|
||||
text,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const nextOpened = openedForServer ?? new Set<string>();
|
||||
nextOpened.add(uri);
|
||||
this.openedDocuments.set(serverName, nextOpened);
|
||||
|
||||
// Wait for the LSP server to process the newly opened document.
|
||||
// Without this delay, requests sent immediately after didOpen may return
|
||||
// empty results because the server hasn't finished analyzing the file.
|
||||
await this.delay(DEFAULT_LSP_DOCUMENT_OPEN_DELAY_MS);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a URI that was opened externally (e.g. by warmupTypescriptServer)
|
||||
* so that ensureDocumentOpen does not send a duplicate textDocument/didOpen.
|
||||
*
|
||||
* @param serverName - The name of the LSP server
|
||||
* @param uri - The document URI to track as already opened
|
||||
*/
|
||||
private trackExternallyOpenedDocument(serverName: string, uri: string): void {
|
||||
const openedForServer =
|
||||
this.openedDocuments.get(serverName) ?? new Set<string>();
|
||||
openedForServer.add(uri);
|
||||
this.openedDocuments.set(serverName, openedForServer);
|
||||
}
|
||||
|
||||
private resolveLanguageId(
|
||||
filePath: string,
|
||||
handle: LspServerHandle,
|
||||
): string | undefined {
|
||||
const ext = path.extname(filePath).slice(1).toLowerCase();
|
||||
if (ext && handle.config.extensionToLanguage) {
|
||||
const mapping = handle.config.extensionToLanguage;
|
||||
return mapping[ext] ?? mapping['.' + ext];
|
||||
}
|
||||
if (handle.config.languages && handle.config.languages.length > 0) {
|
||||
return handle.config.languages[0];
|
||||
}
|
||||
return ext || undefined;
|
||||
}
|
||||
|
||||
private async warmupWorkspaceSymbols(
|
||||
serverName: string,
|
||||
handle: LspServerHandle,
|
||||
): Promise<boolean> {
|
||||
if (!handle.connection) {
|
||||
return false;
|
||||
}
|
||||
const openedForServer = this.openedDocuments.get(serverName);
|
||||
if (openedForServer && openedForServer.size > 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const filePath = this.findWorkspaceFileForServer(handle);
|
||||
if (!filePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const uri = pathToFileURL(filePath).toString();
|
||||
const didOpen = await this.ensureDocumentOpen(
|
||||
serverName,
|
||||
handle as LspServerHandle & { connection: LspConnectionInterface },
|
||||
uri,
|
||||
);
|
||||
if (!didOpen) {
|
||||
return false;
|
||||
}
|
||||
await this.delay(DEFAULT_LSP_WORKSPACE_SYMBOL_WARMUP_DELAY_MS);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the first source file in the workspace that matches the server's
|
||||
* language extensions. Used to open a file for workspace symbol warmup.
|
||||
*
|
||||
* @param handle - The LSP server handle to determine target extensions
|
||||
* @returns Absolute path of the first matching file, or undefined
|
||||
*/
|
||||
private findWorkspaceFileForServer(
|
||||
handle: LspServerHandle,
|
||||
): string | undefined {
|
||||
const extensions = this.getWorkspaceSymbolExtensions(handle);
|
||||
if (extensions.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
// Brace expansion requires at least 2 items; use plain glob for a single ext
|
||||
const extGlob =
|
||||
extensions.length === 1 ? extensions[0]! : `{${extensions.join(',')}}`;
|
||||
const pattern = `**/*.${extGlob}`;
|
||||
const roots = this.workspaceContext.getDirectories();
|
||||
|
||||
for (const root of roots) {
|
||||
try {
|
||||
// Use maxDepth to avoid scanning deeply nested directories;
|
||||
// we only need one file to trigger server indexing.
|
||||
const matches = globSync(pattern, {
|
||||
cwd: root,
|
||||
ignore: DEFAULT_EXCLUDE_PATTERNS,
|
||||
absolute: true,
|
||||
nodir: true,
|
||||
maxDepth: 5,
|
||||
});
|
||||
for (const match of matches) {
|
||||
if (this.fileDiscoveryService.shouldIgnoreFile(match)) {
|
||||
continue;
|
||||
}
|
||||
return match;
|
||||
}
|
||||
} catch (_error) {
|
||||
// ignore glob errors
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine file extensions this server can handle, used to find a workspace
|
||||
* file to open for warmup. Resolution order:
|
||||
* 1. Keys from config.extensionToLanguage (explicit user/extension mapping)
|
||||
* 2. Derived from config.languages via LANGUAGE_ID_TO_EXTENSIONS, falling
|
||||
* back to treating the language ID itself as a file extension
|
||||
*/
|
||||
private getWorkspaceSymbolExtensions(handle: LspServerHandle): string[] {
|
||||
const extensions = new Set<string>();
|
||||
|
||||
// Prefer explicit extension-to-language mapping from server config
|
||||
const extMapping = handle.config.extensionToLanguage;
|
||||
if (extMapping) {
|
||||
for (const key of Object.keys(extMapping)) {
|
||||
const normalized = key.startsWith('.') ? key.slice(1) : key;
|
||||
if (normalized) {
|
||||
extensions.add(normalized.toLowerCase());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to deriving extensions from language identifiers
|
||||
if (extensions.size === 0) {
|
||||
for (const language of handle.config.languages) {
|
||||
const mapped = LANGUAGE_ID_TO_EXTENSIONS[language];
|
||||
if (mapped) {
|
||||
for (const ext of mapped) {
|
||||
extensions.add(ext);
|
||||
}
|
||||
} else {
|
||||
// For languages like "cpp", "java", "go", "rust" etc.,
|
||||
// the language ID itself is a valid file extension
|
||||
extensions.add(language.toLowerCase());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(extensions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run TypeScript server warmup and track the opened URI to prevent
|
||||
* duplicate didOpen notifications.
|
||||
*
|
||||
* @param serverName - The name of the LSP server
|
||||
* @param handle - The server handle
|
||||
* @param force - Force re-warmup even if already warmed up
|
||||
*/
|
||||
private async warmupAndTrack(
|
||||
serverName: string,
|
||||
handle: LspServerHandle,
|
||||
force = false,
|
||||
): Promise<void> {
|
||||
const warmupUri = await this.serverManager.warmupTypescriptServer(
|
||||
handle,
|
||||
force,
|
||||
);
|
||||
if (warmupUri) {
|
||||
this.trackExternallyOpenedDocument(serverName, warmupUri);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether we should retry a document-level operation that returned empty
|
||||
* results. We retry when a textDocument/didOpen was just sent (the server
|
||||
* may still be indexing) AND the server is not a fast TypeScript server.
|
||||
*/
|
||||
private shouldRetryAfterOpen(
|
||||
justOpened: boolean,
|
||||
handle: LspServerHandle,
|
||||
): boolean {
|
||||
return justOpened && !this.serverManager.isTypescriptServer(handle);
|
||||
}
|
||||
|
||||
private async delay(ms: number): Promise<void> {
|
||||
await new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Workspace symbol search across all ready LSP servers.
|
||||
*/
|
||||
|
|
@ -193,15 +468,29 @@ export class NativeLspService {
|
|||
continue;
|
||||
}
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
await this.warmupAndTrack(serverName, handle);
|
||||
const warmedUp = this.serverManager.isTypescriptServer(handle)
|
||||
? false
|
||||
: await this.warmupWorkspaceSymbols(serverName, handle);
|
||||
let response = await handle.connection.request('workspace/symbol', {
|
||||
query,
|
||||
});
|
||||
if (
|
||||
!this.serverManager.isTypescriptServer(handle) &&
|
||||
Array.isArray(response) &&
|
||||
response.length === 0 &&
|
||||
warmedUp
|
||||
) {
|
||||
await this.delay(DEFAULT_LSP_WORKSPACE_SYMBOL_WARMUP_DELAY_MS);
|
||||
response = await handle.connection.request('workspace/symbol', {
|
||||
query,
|
||||
});
|
||||
}
|
||||
if (
|
||||
this.serverManager.isTypescriptServer(handle) &&
|
||||
this.isNoProjectErrorResponse(response)
|
||||
) {
|
||||
await this.serverManager.warmupTypescriptServer(handle, true);
|
||||
await this.warmupAndTrack(serverName, handle, true);
|
||||
response = await handle.connection.request('workspace/symbol', {
|
||||
query,
|
||||
});
|
||||
|
|
@ -241,17 +530,36 @@ export class NativeLspService {
|
|||
limit = 50,
|
||||
): Promise<LspDefinition[]> {
|
||||
const handles = this.getReadyHandles(serverName);
|
||||
const requestParams = {
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
};
|
||||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
const response = await handle.connection.request(
|
||||
'textDocument/definition',
|
||||
{
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
},
|
||||
const justOpened = await this.ensureDocumentOpen(
|
||||
name,
|
||||
handle,
|
||||
location.uri,
|
||||
);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
let response = await handle.connection.request(
|
||||
'textDocument/definition',
|
||||
requestParams,
|
||||
);
|
||||
|
||||
if (
|
||||
this.isEmptyResponse(response) &&
|
||||
this.shouldRetryAfterOpen(justOpened, handle)
|
||||
) {
|
||||
await this.delay(DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS);
|
||||
response = await handle.connection.request(
|
||||
'textDocument/definition',
|
||||
requestParams,
|
||||
);
|
||||
}
|
||||
|
||||
const candidates = Array.isArray(response)
|
||||
? response
|
||||
: response
|
||||
|
|
@ -291,18 +599,37 @@ export class NativeLspService {
|
|||
limit = 200,
|
||||
): Promise<LspReference[]> {
|
||||
const handles = this.getReadyHandles(serverName);
|
||||
const requestParams = {
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
context: { includeDeclaration },
|
||||
};
|
||||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
const response = await handle.connection.request(
|
||||
'textDocument/references',
|
||||
{
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
context: { includeDeclaration },
|
||||
},
|
||||
const justOpened = await this.ensureDocumentOpen(
|
||||
name,
|
||||
handle,
|
||||
location.uri,
|
||||
);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
let response = await handle.connection.request(
|
||||
'textDocument/references',
|
||||
requestParams,
|
||||
);
|
||||
|
||||
if (
|
||||
this.isEmptyResponse(response) &&
|
||||
this.shouldRetryAfterOpen(justOpened, handle)
|
||||
) {
|
||||
await this.delay(DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS);
|
||||
response = await handle.connection.request(
|
||||
'textDocument/references',
|
||||
requestParams,
|
||||
);
|
||||
}
|
||||
|
||||
if (!Array.isArray(response)) {
|
||||
continue;
|
||||
}
|
||||
|
|
@ -338,14 +665,36 @@ export class NativeLspService {
|
|||
serverName?: string,
|
||||
): Promise<LspHoverResult | null> {
|
||||
const handles = this.getReadyHandles(serverName);
|
||||
const requestParams = {
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
};
|
||||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
const response = await handle.connection.request('textDocument/hover', {
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
});
|
||||
const justOpened = await this.ensureDocumentOpen(
|
||||
name,
|
||||
handle,
|
||||
location.uri,
|
||||
);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
let response = await handle.connection.request(
|
||||
'textDocument/hover',
|
||||
requestParams,
|
||||
);
|
||||
|
||||
if (
|
||||
this.isEmptyResponse(response) &&
|
||||
this.shouldRetryAfterOpen(justOpened, handle)
|
||||
) {
|
||||
await this.delay(DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS);
|
||||
response = await handle.connection.request(
|
||||
'textDocument/hover',
|
||||
requestParams,
|
||||
);
|
||||
}
|
||||
|
||||
const normalized = this.normalizer.normalizeHoverResult(response, name);
|
||||
if (normalized) {
|
||||
return normalized;
|
||||
|
|
@ -367,16 +716,29 @@ export class NativeLspService {
|
|||
limit = 200,
|
||||
): Promise<LspSymbolInformation[]> {
|
||||
const handles = this.getReadyHandles(serverName);
|
||||
const requestParams = { textDocument: { uri } };
|
||||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
const response = await handle.connection.request(
|
||||
const justOpened = await this.ensureDocumentOpen(name, handle, uri);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
let response = await handle.connection.request(
|
||||
'textDocument/documentSymbol',
|
||||
{
|
||||
textDocument: { uri },
|
||||
},
|
||||
requestParams,
|
||||
);
|
||||
|
||||
if (
|
||||
this.isEmptyResponse(response) &&
|
||||
this.shouldRetryAfterOpen(justOpened, handle)
|
||||
) {
|
||||
await this.delay(DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS);
|
||||
response = await handle.connection.request(
|
||||
'textDocument/documentSymbol',
|
||||
requestParams,
|
||||
);
|
||||
}
|
||||
|
||||
if (!Array.isArray(response)) {
|
||||
continue;
|
||||
}
|
||||
|
|
@ -430,17 +792,36 @@ export class NativeLspService {
|
|||
limit = 50,
|
||||
): Promise<LspDefinition[]> {
|
||||
const handles = this.getReadyHandles(serverName);
|
||||
const requestParams = {
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
};
|
||||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
const response = await handle.connection.request(
|
||||
'textDocument/implementation',
|
||||
{
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
},
|
||||
const justOpened = await this.ensureDocumentOpen(
|
||||
name,
|
||||
handle,
|
||||
location.uri,
|
||||
);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
let response = await handle.connection.request(
|
||||
'textDocument/implementation',
|
||||
requestParams,
|
||||
);
|
||||
|
||||
if (
|
||||
this.isEmptyResponse(response) &&
|
||||
this.shouldRetryAfterOpen(justOpened, handle)
|
||||
) {
|
||||
await this.delay(DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS);
|
||||
response = await handle.connection.request(
|
||||
'textDocument/implementation',
|
||||
requestParams,
|
||||
);
|
||||
}
|
||||
|
||||
const candidates = Array.isArray(response)
|
||||
? response
|
||||
: response
|
||||
|
|
@ -482,17 +863,36 @@ export class NativeLspService {
|
|||
limit = 50,
|
||||
): Promise<LspCallHierarchyItem[]> {
|
||||
const handles = this.getReadyHandles(serverName);
|
||||
const requestParams = {
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
};
|
||||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
const response = await handle.connection.request(
|
||||
'textDocument/prepareCallHierarchy',
|
||||
{
|
||||
textDocument: { uri: location.uri },
|
||||
position: location.range.start,
|
||||
},
|
||||
const justOpened = await this.ensureDocumentOpen(
|
||||
name,
|
||||
handle,
|
||||
location.uri,
|
||||
);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
let response = await handle.connection.request(
|
||||
'textDocument/prepareCallHierarchy',
|
||||
requestParams,
|
||||
);
|
||||
|
||||
if (
|
||||
this.isEmptyResponse(response) &&
|
||||
this.shouldRetryAfterOpen(justOpened, handle)
|
||||
) {
|
||||
await this.delay(DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS);
|
||||
response = await handle.connection.request(
|
||||
'textDocument/prepareCallHierarchy',
|
||||
requestParams,
|
||||
);
|
||||
}
|
||||
|
||||
const candidates = Array.isArray(response)
|
||||
? response
|
||||
: response
|
||||
|
|
@ -538,7 +938,7 @@ export class NativeLspService {
|
|||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
const response = await handle.connection.request(
|
||||
'callHierarchy/incomingCalls',
|
||||
{
|
||||
|
|
@ -585,7 +985,7 @@ export class NativeLspService {
|
|||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
const response = await handle.connection.request(
|
||||
'callHierarchy/outgoingCalls',
|
||||
{
|
||||
|
|
@ -631,7 +1031,8 @@ export class NativeLspService {
|
|||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
await this.ensureDocumentOpen(name, handle, uri);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
// Request pull diagnostics if the server supports it
|
||||
const response = await handle.connection.request(
|
||||
|
|
@ -681,7 +1082,7 @@ export class NativeLspService {
|
|||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
// Request workspace diagnostics if supported
|
||||
const response = await handle.connection.request(
|
||||
|
|
@ -735,7 +1136,8 @@ export class NativeLspService {
|
|||
|
||||
for (const [name, handle] of handles) {
|
||||
try {
|
||||
await this.serverManager.warmupTypescriptServer(handle);
|
||||
await this.ensureDocumentOpen(name, handle, uri);
|
||||
await this.warmupAndTrack(name, handle);
|
||||
|
||||
// Convert context diagnostics to LSP format
|
||||
const lspDiagnostics = context.diagnostics.map((d: LspDiagnostic) =>
|
||||
|
|
@ -879,6 +1281,20 @@ export class NativeLspService {
|
|||
fs.writeFileSync(filePath, lines.join('\n'), 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an LSP response represents an empty/null result, used to decide
|
||||
* whether a retry is worthwhile after a freshly opened document.
|
||||
*/
|
||||
private isEmptyResponse(response: unknown): boolean {
|
||||
if (response === null || response === undefined) {
|
||||
return true;
|
||||
}
|
||||
if (Array.isArray(response) && response.length === 0) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private isNoProjectErrorResponse(response: unknown): boolean {
|
||||
if (!response) {
|
||||
return false;
|
||||
|
|
|
|||
687
packages/core/src/lsp/__e2e__/lsp-e2e-test.ts
Normal file
687
packages/core/src/lsp/__e2e__/lsp-e2e-test.ts
Normal file
|
|
@ -0,0 +1,687 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2025 Qwen Team
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/* eslint-disable no-console, @typescript-eslint/no-explicit-any */
|
||||
/**
|
||||
* LSP End-to-End Test Script
|
||||
*
|
||||
* Directly instantiates NativeLspService against real LSP servers
|
||||
* (typescript-language-server, clangd, jdtls) to verify all 12 LSP methods
|
||||
* return correct results after the ensureDocumentOpen delay fix.
|
||||
*
|
||||
* Key design decisions:
|
||||
* - Uses per-method cursor positions (different LSP methods need different
|
||||
* positions, e.g. implementations requires an interface, call hierarchy
|
||||
* requires a function with both callers and callees).
|
||||
* - Warms up the server by calling documentSymbols first (opens the file),
|
||||
* then waits for the server to index before testing timing-sensitive
|
||||
* methods like hover and definitions.
|
||||
*
|
||||
* Usage: npx tsx packages/core/src/lsp/__e2e__/lsp-e2e-test.ts
|
||||
*/
|
||||
|
||||
import { NativeLspService } from '../NativeLspService.js';
|
||||
import { EventEmitter } from 'events';
|
||||
import { pathToFileURL } from 'url';
|
||||
import * as path from 'path';
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Helpers */
|
||||
/* ------------------------------------------------------------------ */
|
||||
const green = (s: string) => `\x1b[32m${s}\x1b[0m`;
|
||||
const red = (s: string) => `\x1b[31m${s}\x1b[0m`;
|
||||
const yellow = (s: string) => `\x1b[33m${s}\x1b[0m`;
|
||||
const bold = (s: string) => `\x1b[1m${s}\x1b[0m`;
|
||||
|
||||
interface TestResult {
|
||||
method: string;
|
||||
language: string;
|
||||
passed: boolean;
|
||||
detail: string;
|
||||
}
|
||||
|
||||
const results: TestResult[] = [];
|
||||
|
||||
function record(
|
||||
method: string,
|
||||
language: string,
|
||||
passed: boolean,
|
||||
detail: string,
|
||||
): void {
|
||||
results.push({ method, language, passed, detail });
|
||||
const icon = passed ? green('PASS') : red('FAIL');
|
||||
console.log(` [${icon}] ${language}/${method}: ${detail}`);
|
||||
}
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/** Build an LSP location object from file path + 0-indexed line/char. */
|
||||
function loc(filePath: string, line: number, char: number) {
|
||||
return {
|
||||
uri: pathToFileURL(filePath).toString(),
|
||||
range: {
|
||||
start: { line, character: char },
|
||||
end: { line, character: char },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Per-method cursor position config */
|
||||
/* ------------------------------------------------------------------ */
|
||||
interface MethodPositions {
|
||||
/** File + position for hover (on a type name or variable) */
|
||||
hover: { file: string; line: number; char: number };
|
||||
/** File + position for go-to-definition (on a function/method call) */
|
||||
definitions: { file: string; line: number; char: number };
|
||||
/** File + position for find-references (on a function/method name) */
|
||||
references: { file: string; line: number; char: number };
|
||||
/** File for documentSymbols (any file with multiple symbols) */
|
||||
documentSymbolsFile: string;
|
||||
/** Query string for workspaceSymbols */
|
||||
symbolQuery: string;
|
||||
/** File + position for implementations (on an interface/base class) */
|
||||
implementations: { file: string; line: number; char: number };
|
||||
/** File + position for call hierarchy (on a function that has callers AND callees) */
|
||||
callHierarchy: { file: string; line: number; char: number };
|
||||
/** File for diagnostics / codeActions */
|
||||
diagnosticsFile: string;
|
||||
}
|
||||
|
||||
interface LanguageTestConfig {
|
||||
langName: string;
|
||||
workspaceRoot: string;
|
||||
positions: MethodPositions;
|
||||
/** Extra wait time (ms) after opening a file for server to index. */
|
||||
indexWaitMs: number;
|
||||
/**
|
||||
* Methods where empty results are acceptable due to known server
|
||||
* limitations (e.g. clangd doesn't implement callHierarchy/outgoingCalls).
|
||||
* These methods will pass with a "Server limitation" note instead of failing.
|
||||
*/
|
||||
serverLimitedMethods?: Set<string>;
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Service factory (lightweight mocks for config/workspace) */
|
||||
/* ------------------------------------------------------------------ */
|
||||
function createService(workspaceRoot: string): NativeLspService {
|
||||
const config = {
|
||||
isTrustedFolder: () => true,
|
||||
getProjectRoot: () => workspaceRoot,
|
||||
get: () => undefined,
|
||||
getActiveExtensions: () => [],
|
||||
};
|
||||
const workspaceContext = {
|
||||
getDirectories: () => [workspaceRoot],
|
||||
isPathWithinWorkspace: () => true,
|
||||
fileExists: async () => false,
|
||||
readFile: async () => '{}',
|
||||
resolvePath: (p: string) => path.resolve(workspaceRoot, p),
|
||||
};
|
||||
const fileDiscovery = {
|
||||
discoverFiles: async () => [],
|
||||
shouldIgnoreFile: () => false,
|
||||
};
|
||||
|
||||
return new NativeLspService(
|
||||
config as any,
|
||||
workspaceContext as any,
|
||||
new EventEmitter(),
|
||||
fileDiscovery as any,
|
||||
{} as any,
|
||||
{ workspaceRoot, requireTrustedWorkspace: false },
|
||||
);
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Per-language test runner */
|
||||
/* ------------------------------------------------------------------ */
|
||||
async function testLanguage(cfg: LanguageTestConfig): Promise<void> {
|
||||
const {
|
||||
langName,
|
||||
workspaceRoot,
|
||||
positions,
|
||||
indexWaitMs,
|
||||
serverLimitedMethods,
|
||||
} = cfg;
|
||||
const isServerLimited = (method: string) =>
|
||||
serverLimitedMethods?.has(method) ?? false;
|
||||
|
||||
console.log(bold(`\n=============== ${langName} ===============`));
|
||||
console.log(` workspace : ${workspaceRoot}`);
|
||||
|
||||
const service = createService(workspaceRoot);
|
||||
|
||||
try {
|
||||
/* ---------- startup ---------- */
|
||||
console.log(` Discovering and starting LSP server...`);
|
||||
await service.discoverAndPrepare();
|
||||
await service.start();
|
||||
|
||||
const status = service.getStatus();
|
||||
const serverStatuses = Array.from(status.entries());
|
||||
if (serverStatuses.length === 0) {
|
||||
record('startup', langName, false, 'No servers discovered');
|
||||
return;
|
||||
}
|
||||
let anyReady = false;
|
||||
for (const [name, s] of serverStatuses) {
|
||||
console.log(` Server "${name}": ${s}`);
|
||||
if (s === 'READY') anyReady = true;
|
||||
}
|
||||
if (!anyReady) {
|
||||
record('startup', langName, false, 'No server reached READY');
|
||||
return;
|
||||
}
|
||||
record('startup', langName, true, 'Server ready');
|
||||
|
||||
/* ---------- warmup: open main files via documentSymbols ---------- */
|
||||
// This triggers ensureDocumentOpen for each file, so the server starts
|
||||
// indexing. We then wait for full indexing before timing-sensitive tests.
|
||||
const filesToWarmUp = new Set<string>();
|
||||
filesToWarmUp.add(positions.hover.file);
|
||||
filesToWarmUp.add(positions.definitions.file);
|
||||
filesToWarmUp.add(positions.references.file);
|
||||
filesToWarmUp.add(positions.documentSymbolsFile);
|
||||
filesToWarmUp.add(positions.implementations.file);
|
||||
filesToWarmUp.add(positions.callHierarchy.file);
|
||||
filesToWarmUp.add(positions.diagnosticsFile);
|
||||
|
||||
console.log(` Warming up ${filesToWarmUp.size} file(s)...`);
|
||||
for (const file of filesToWarmUp) {
|
||||
const fileUri = pathToFileURL(file).toString();
|
||||
try {
|
||||
await service.documentSymbols(fileUri);
|
||||
} catch {
|
||||
// Ignore errors during warmup; files will be retried in actual tests
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` Waiting ${indexWaitMs}ms for server to index...`);
|
||||
await sleep(indexWaitMs);
|
||||
|
||||
/* ---------- 1. hover ---------- */
|
||||
try {
|
||||
const hoverLoc = loc(
|
||||
positions.hover.file,
|
||||
positions.hover.line,
|
||||
positions.hover.char,
|
||||
);
|
||||
const hover = await service.hover(hoverLoc);
|
||||
if (hover?.contents) {
|
||||
record(
|
||||
'hover',
|
||||
langName,
|
||||
true,
|
||||
`"${hover.contents.substring(0, 100)}"`,
|
||||
);
|
||||
} else {
|
||||
record('hover', langName, false, 'Empty/null result');
|
||||
}
|
||||
} catch (e: any) {
|
||||
record('hover', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 2. definitions ---------- */
|
||||
try {
|
||||
const defLoc = loc(
|
||||
positions.definitions.file,
|
||||
positions.definitions.line,
|
||||
positions.definitions.char,
|
||||
);
|
||||
const defs = await service.definitions(defLoc);
|
||||
record(
|
||||
'definitions',
|
||||
langName,
|
||||
defs.length > 0,
|
||||
defs.length > 0 ? `${defs.length} def(s)` : 'Empty result',
|
||||
);
|
||||
} catch (e: any) {
|
||||
record('definitions', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 3. references ---------- */
|
||||
try {
|
||||
const refLoc = loc(
|
||||
positions.references.file,
|
||||
positions.references.line,
|
||||
positions.references.char,
|
||||
);
|
||||
const refs = await service.references(refLoc, undefined, true);
|
||||
record(
|
||||
'references',
|
||||
langName,
|
||||
refs.length > 0,
|
||||
refs.length > 0 ? `${refs.length} ref(s)` : 'Empty result',
|
||||
);
|
||||
} catch (e: any) {
|
||||
record('references', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 4. documentSymbols ---------- */
|
||||
try {
|
||||
const docSymUri = pathToFileURL(positions.documentSymbolsFile).toString();
|
||||
const symbols = await service.documentSymbols(docSymUri);
|
||||
if (symbols.length > 0) {
|
||||
const names = symbols
|
||||
.slice(0, 5)
|
||||
.map((s) => s.name)
|
||||
.join(', ');
|
||||
record(
|
||||
'documentSymbols',
|
||||
langName,
|
||||
true,
|
||||
`${symbols.length} symbol(s): ${names}`,
|
||||
);
|
||||
} else {
|
||||
record('documentSymbols', langName, false, 'Empty result');
|
||||
}
|
||||
} catch (e: any) {
|
||||
record('documentSymbols', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 5. workspaceSymbols ---------- */
|
||||
try {
|
||||
const wsSymbols = await service.workspaceSymbols(positions.symbolQuery);
|
||||
if (wsSymbols.length > 0) {
|
||||
const names = wsSymbols
|
||||
.slice(0, 5)
|
||||
.map((s) => s.name)
|
||||
.join(', ');
|
||||
record(
|
||||
'workspaceSymbols',
|
||||
langName,
|
||||
true,
|
||||
`${wsSymbols.length} symbol(s): ${names}`,
|
||||
);
|
||||
} else {
|
||||
record('workspaceSymbols', langName, false, 'Empty result');
|
||||
}
|
||||
} catch (e: any) {
|
||||
record('workspaceSymbols', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 6. implementations ---------- */
|
||||
try {
|
||||
const implLoc = loc(
|
||||
positions.implementations.file,
|
||||
positions.implementations.line,
|
||||
positions.implementations.char,
|
||||
);
|
||||
const impls = await service.implementations(implLoc);
|
||||
record(
|
||||
'implementations',
|
||||
langName,
|
||||
impls.length > 0,
|
||||
impls.length > 0 ? `${impls.length} impl(s)` : 'Empty result',
|
||||
);
|
||||
} catch (e: any) {
|
||||
record('implementations', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 7/8/9. call hierarchy ---------- */
|
||||
try {
|
||||
const callLoc = loc(
|
||||
positions.callHierarchy.file,
|
||||
positions.callHierarchy.line,
|
||||
positions.callHierarchy.char,
|
||||
);
|
||||
const callItems = await service.prepareCallHierarchy(callLoc);
|
||||
if (callItems.length > 0) {
|
||||
record(
|
||||
'prepareCallHierarchy',
|
||||
langName,
|
||||
true,
|
||||
`${callItems.length} item(s): ${callItems[0]!.name}`,
|
||||
);
|
||||
|
||||
try {
|
||||
const incoming = await service.incomingCalls(callItems[0]!);
|
||||
record(
|
||||
'incomingCalls',
|
||||
langName,
|
||||
incoming.length > 0,
|
||||
incoming.length > 0
|
||||
? `${incoming.length} caller(s)`
|
||||
: 'Empty (no callers found)',
|
||||
);
|
||||
} catch (e: any) {
|
||||
record('incomingCalls', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
try {
|
||||
const outgoing = await service.outgoingCalls(callItems[0]!);
|
||||
if (outgoing.length > 0) {
|
||||
record(
|
||||
'outgoingCalls',
|
||||
langName,
|
||||
true,
|
||||
`${outgoing.length} callee(s)`,
|
||||
);
|
||||
} else if (isServerLimited('outgoingCalls')) {
|
||||
record(
|
||||
'outgoingCalls',
|
||||
langName,
|
||||
true,
|
||||
'Empty (server does not implement this method)',
|
||||
);
|
||||
} else {
|
||||
record(
|
||||
'outgoingCalls',
|
||||
langName,
|
||||
false,
|
||||
'Empty (no callees found)',
|
||||
);
|
||||
}
|
||||
} catch (e: any) {
|
||||
record('outgoingCalls', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
} else {
|
||||
record('prepareCallHierarchy', langName, false, 'Empty result');
|
||||
record('incomingCalls', langName, false, 'Skipped');
|
||||
record('outgoingCalls', langName, false, 'Skipped');
|
||||
}
|
||||
} catch (e: any) {
|
||||
record('prepareCallHierarchy', langName, false, `Error: ${e.message}`);
|
||||
record('incomingCalls', langName, false, 'Skipped');
|
||||
record('outgoingCalls', langName, false, 'Skipped');
|
||||
}
|
||||
|
||||
/* ---------- 10. diagnostics ---------- */
|
||||
try {
|
||||
const diagUri = pathToFileURL(positions.diagnosticsFile).toString();
|
||||
const diags = await service.diagnostics(diagUri);
|
||||
// 0 diagnostics is fine for clean code
|
||||
record('diagnostics', langName, true, `${diags.length} diagnostic(s)`);
|
||||
} catch (e: any) {
|
||||
record('diagnostics', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 11. codeActions ---------- */
|
||||
try {
|
||||
const caUri = pathToFileURL(positions.diagnosticsFile).toString();
|
||||
const actions = await service.codeActions(
|
||||
caUri,
|
||||
{ start: { line: 0, character: 0 }, end: { line: 0, character: 10 } },
|
||||
{ diagnostics: [], triggerKind: 'invoked' as const },
|
||||
);
|
||||
// 0 actions is fine when there are no diagnostics
|
||||
record('codeActions', langName, true, `${actions.length} action(s)`);
|
||||
} catch (e: any) {
|
||||
record('codeActions', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
/* ---------- 12. workspaceDiagnostics ---------- */
|
||||
try {
|
||||
const wsDiags = await service.workspaceDiagnostics();
|
||||
record(
|
||||
'workspaceDiagnostics',
|
||||
langName,
|
||||
true,
|
||||
`${wsDiags.length} file(s) with diagnostics`,
|
||||
);
|
||||
} catch (e: any) {
|
||||
record('workspaceDiagnostics', langName, false, `Error: ${e.message}`);
|
||||
}
|
||||
|
||||
await service.stop();
|
||||
} catch (e: any) {
|
||||
console.log(red(` Fatal error: ${e.message}`));
|
||||
console.log(e.stack);
|
||||
try {
|
||||
await service.stop();
|
||||
} catch {
|
||||
// Best-effort cleanup; ignore errors during shutdown
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Language configs */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
const TS_ROOT = '/tmp/lsp-e2e-test/ts-project';
|
||||
const CPP_ROOT = '/tmp/lsp-e2e-test/cpp-project';
|
||||
const JAVA_ROOT = '/tmp/lsp-e2e-test/java-project';
|
||||
|
||||
/**
|
||||
* TypeScript positions (all in index.ts / math.ts):
|
||||
*
|
||||
* index.ts:
|
||||
* L0: import { createCalculator, Calculator } from './math.js';
|
||||
* L1: (empty)
|
||||
* L2: const calc: Calculator = createCalculator();
|
||||
* L3: console.log(calc.add(1, 2));
|
||||
* L4: console.log(calc.subtract(5, 3));
|
||||
*
|
||||
* math.ts:
|
||||
* L0: export interface Calculator {
|
||||
* L5: export class SimpleCalculator implements Calculator {
|
||||
* L15: export function createCalculator(): Calculator {
|
||||
*/
|
||||
const tsConfig: LanguageTestConfig = {
|
||||
langName: 'TypeScript',
|
||||
workspaceRoot: TS_ROOT,
|
||||
indexWaitMs: 3000,
|
||||
positions: {
|
||||
// hover on `createCalculator` call: L2 char 27
|
||||
hover: { file: `${TS_ROOT}/src/index.ts`, line: 2, char: 27 },
|
||||
// definitions on `createCalculator` call → math.ts definition
|
||||
definitions: { file: `${TS_ROOT}/src/index.ts`, line: 2, char: 27 },
|
||||
// references on `Calculator` → found in both files
|
||||
references: { file: `${TS_ROOT}/src/index.ts`, line: 2, char: 12 },
|
||||
// documentSymbols on math.ts (has interface, class, function)
|
||||
documentSymbolsFile: `${TS_ROOT}/src/math.ts`,
|
||||
symbolQuery: 'Calculator',
|
||||
// implementations on `Calculator` interface → SimpleCalculator
|
||||
implementations: { file: `${TS_ROOT}/src/math.ts`, line: 0, char: 17 },
|
||||
// call hierarchy on `createCalculator` (called by index.ts, calls SimpleCalculator)
|
||||
callHierarchy: { file: `${TS_ROOT}/src/math.ts`, line: 15, char: 16 },
|
||||
diagnosticsFile: `${TS_ROOT}/src/index.ts`,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* C++ positions (main.cpp / calculator.h / calculator.cpp):
|
||||
*
|
||||
* main.cpp:
|
||||
* L0: #include "calculator.h"
|
||||
* L1: #include <iostream>
|
||||
* L2: (empty)
|
||||
* L3: int addValues(Calculator& calc, int a, int b) {
|
||||
* L4: return calc.add(a, b);
|
||||
* L5: }
|
||||
* ...
|
||||
* L11: int computeSum(Calculator& calc) {
|
||||
* L12: return addValues(calc, 1, 2) + subtractValues(calc, 5, 3);
|
||||
* L13: }
|
||||
* ...
|
||||
* L15: int main() {
|
||||
* L16: Calculator calc;
|
||||
* L17: int result = computeSum(calc);
|
||||
* L18: std::cout << result << std::endl;
|
||||
* ...
|
||||
*
|
||||
* calculator.h:
|
||||
* L0: #pragma once
|
||||
* L1: (empty)
|
||||
* L2: class Calculator {
|
||||
* L3: public:
|
||||
* L4: int add(int a, int b);
|
||||
* L5: int subtract(int a, int b);
|
||||
* ...
|
||||
* L9: class AdvancedCalculator : public Calculator {
|
||||
*
|
||||
* calculator.cpp:
|
||||
* L0: #include "calculator.h"
|
||||
* L1: (empty)
|
||||
* L2: int Calculator::add(int a, int b) {
|
||||
*/
|
||||
const cppConfig: LanguageTestConfig = {
|
||||
langName: 'C++',
|
||||
workspaceRoot: CPP_ROOT,
|
||||
indexWaitMs: 5000,
|
||||
// clangd v19.x does not implement callHierarchy/outgoingCalls (returns -32601)
|
||||
serverLimitedMethods: new Set(['outgoingCalls']),
|
||||
positions: {
|
||||
// hover on `Calculator` type at main.cpp L16:4 → class info
|
||||
hover: { file: `${CPP_ROOT}/src/main.cpp`, line: 16, char: 4 },
|
||||
// definitions on `computeSum` call at main.cpp L17:17 → L11 definition
|
||||
definitions: { file: `${CPP_ROOT}/src/main.cpp`, line: 17, char: 17 },
|
||||
// references on `add` method at calculator.h L4:8 → all usages
|
||||
references: { file: `${CPP_ROOT}/src/calculator.h`, line: 4, char: 8 },
|
||||
// documentSymbols on main.cpp → addValues, subtractValues, computeSum, main
|
||||
documentSymbolsFile: `${CPP_ROOT}/src/main.cpp`,
|
||||
symbolQuery: 'Calculator',
|
||||
// implementations on `Calculator` class at calculator.h L2:6
|
||||
// → should find AdvancedCalculator (derived class)
|
||||
implementations: { file: `${CPP_ROOT}/src/calculator.h`, line: 2, char: 6 },
|
||||
// call hierarchy on `computeSum` at main.cpp L11:4
|
||||
// → incomingCalls: main; outgoingCalls: addValues, subtractValues
|
||||
callHierarchy: { file: `${CPP_ROOT}/src/main.cpp`, line: 11, char: 4 },
|
||||
diagnosticsFile: `${CPP_ROOT}/src/main.cpp`,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Java positions (Main.java / Calculator.java / SimpleCalculator.java):
|
||||
*
|
||||
* Main.java:
|
||||
* L0: package com.test;
|
||||
* L1: (empty)
|
||||
* L2: public class Main {
|
||||
* L3: public static int computeSum(Calculator calc) {
|
||||
* L4: return calc.add(1, 2) + calc.subtract(5, 3);
|
||||
* L5: }
|
||||
* L6: (empty)
|
||||
* L7: public static void main(String[] args) {
|
||||
* L8: Calculator calc = new SimpleCalculator();
|
||||
* L9: int result = computeSum(calc);
|
||||
* L10: System.out.println(result);
|
||||
* L11: }
|
||||
* L12: }
|
||||
*
|
||||
* Calculator.java:
|
||||
* L0: package com.test;
|
||||
* L1: (empty)
|
||||
* L2: public interface Calculator {
|
||||
* L3: int add(int a, int b);
|
||||
*
|
||||
* SimpleCalculator.java:
|
||||
* L2: public class SimpleCalculator implements Calculator {
|
||||
* L4: public int add(int a, int b) {
|
||||
*/
|
||||
const javaConfig: LanguageTestConfig = {
|
||||
langName: 'Java',
|
||||
workspaceRoot: JAVA_ROOT,
|
||||
indexWaitMs: 20000,
|
||||
positions: {
|
||||
// hover on `Calculator` type at Main.java L8:8 → interface info
|
||||
hover: {
|
||||
file: `${JAVA_ROOT}/src/main/java/com/test/Main.java`,
|
||||
line: 8,
|
||||
char: 8,
|
||||
},
|
||||
// definitions on `computeSum` call at Main.java L9:21 → L3 definition
|
||||
definitions: {
|
||||
file: `${JAVA_ROOT}/src/main/java/com/test/Main.java`,
|
||||
line: 9,
|
||||
char: 21,
|
||||
},
|
||||
// references on `add` at Calculator.java L3:8 → all usages
|
||||
references: {
|
||||
file: `${JAVA_ROOT}/src/main/java/com/test/Calculator.java`,
|
||||
line: 3,
|
||||
char: 8,
|
||||
},
|
||||
// documentSymbols on Main.java → Main class, computeSum, main
|
||||
documentSymbolsFile: `${JAVA_ROOT}/src/main/java/com/test/Main.java`,
|
||||
symbolQuery: 'Calculator',
|
||||
// implementations on `Calculator` interface at Calculator.java L2:17
|
||||
implementations: {
|
||||
file: `${JAVA_ROOT}/src/main/java/com/test/Calculator.java`,
|
||||
line: 2,
|
||||
char: 17,
|
||||
},
|
||||
// call hierarchy on `computeSum` at Main.java L3:22
|
||||
// → incomingCalls: main; outgoingCalls: add, subtract
|
||||
callHierarchy: {
|
||||
file: `${JAVA_ROOT}/src/main/java/com/test/Main.java`,
|
||||
line: 3,
|
||||
char: 22,
|
||||
},
|
||||
diagnosticsFile: `${JAVA_ROOT}/src/main/java/com/test/Main.java`,
|
||||
},
|
||||
};
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Main */
|
||||
/* ------------------------------------------------------------------ */
|
||||
async function main(): Promise<void> {
|
||||
console.log(bold('LSP End-to-End Test Suite'));
|
||||
console.log(
|
||||
'Verifying all 12 LSP methods with real servers (TS / C++ / Java)\n',
|
||||
);
|
||||
|
||||
await testLanguage(tsConfig);
|
||||
await testLanguage(cppConfig);
|
||||
await testLanguage(javaConfig);
|
||||
|
||||
/* ---------- Summary ---------- */
|
||||
console.log(bold('\n================== Summary =================='));
|
||||
const passed = results.filter((r) => r.passed).length;
|
||||
const failed = results.filter((r) => !r.passed).length;
|
||||
console.log(
|
||||
`Total: ${results.length} | ${green(`Passed: ${passed}`)} | ${red(`Failed: ${failed}`)}`,
|
||||
);
|
||||
|
||||
console.log(bold('\nPer Language:'));
|
||||
for (const lang of ['TypeScript', 'C++', 'Java']) {
|
||||
const lr = results.filter((r) => r.language === lang);
|
||||
const lp = lr.filter((r) => r.passed).length;
|
||||
const icon =
|
||||
lp === lr.length ? green('ALL PASS') : yellow(`${lp}/${lr.length}`);
|
||||
console.log(` ${lang}: ${icon}`);
|
||||
}
|
||||
|
||||
console.log(bold('\nPer Method:'));
|
||||
const methods = [
|
||||
'startup',
|
||||
'hover',
|
||||
'definitions',
|
||||
'references',
|
||||
'documentSymbols',
|
||||
'workspaceSymbols',
|
||||
'implementations',
|
||||
'prepareCallHierarchy',
|
||||
'incomingCalls',
|
||||
'outgoingCalls',
|
||||
'diagnostics',
|
||||
'codeActions',
|
||||
'workspaceDiagnostics',
|
||||
];
|
||||
for (const m of methods) {
|
||||
const mr = results.filter((r) => r.method === m);
|
||||
const langs = mr
|
||||
.map((r) => (r.passed ? green(r.language) : red(r.language)))
|
||||
.join(' | ');
|
||||
console.log(` ${m}: ${langs}`);
|
||||
}
|
||||
|
||||
if (failed > 0) {
|
||||
console.log(yellow('\nFailed tests:'));
|
||||
for (const r of results.filter((rr) => !rr.passed)) {
|
||||
console.log(red(` ${r.language}/${r.method}: ${r.detail}`));
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(failed > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
main();
|
||||
|
|
@ -19,9 +19,25 @@ export const DEFAULT_LSP_REQUEST_TIMEOUT_MS = 15000;
|
|||
/** Default delay for TypeScript server warm-up in milliseconds */
|
||||
export const DEFAULT_LSP_WARMUP_DELAY_MS = 150;
|
||||
|
||||
/** Default delay after opening a document to allow the LSP server to process it */
|
||||
export const DEFAULT_LSP_DOCUMENT_OPEN_DELAY_MS = 200;
|
||||
|
||||
/** Default timeout for command existence check in milliseconds */
|
||||
export const DEFAULT_LSP_COMMAND_CHECK_TIMEOUT_MS = 2000;
|
||||
|
||||
/** Default delay for workspace symbol warmup after opening a file, in milliseconds */
|
||||
export const DEFAULT_LSP_WORKSPACE_SYMBOL_WARMUP_DELAY_MS = 1500;
|
||||
|
||||
/**
|
||||
* Default delay before retrying a document-level operation (definitions,
|
||||
* references, hover, documentSymbols, etc.) when the first attempt returns
|
||||
* empty results right after we sent textDocument/didOpen.
|
||||
*
|
||||
* Slow servers like jdtls (Java) and clangd (C++) need significantly more
|
||||
* time than the initial 200ms didOpen delay to build their AST / index.
|
||||
*/
|
||||
export const DEFAULT_LSP_DOCUMENT_RETRY_DELAY_MS = 2000;
|
||||
|
||||
// ============================================================================
|
||||
// Retry Constants
|
||||
// ============================================================================
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import {
|
|||
splitCompoundCommand,
|
||||
buildPermissionRules,
|
||||
getRuleDisplayName,
|
||||
buildHumanReadableRuleLabel,
|
||||
} from './rule-parser.js';
|
||||
import { PermissionManager } from './permission-manager.js';
|
||||
import type { PermissionManagerConfig } from './permission-manager.js';
|
||||
|
|
@ -1519,3 +1520,174 @@ describe('buildPermissionRules', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
// ─── buildHumanReadableRuleLabel ─────────────────────────────────────────────
|
||||
|
||||
describe('buildHumanReadableRuleLabel', () => {
|
||||
it('returns empty string for empty rules array', () => {
|
||||
expect(buildHumanReadableRuleLabel([])).toBe('');
|
||||
});
|
||||
|
||||
it('converts bare Read rule to "read files"', () => {
|
||||
expect(buildHumanReadableRuleLabel(['Read'])).toBe('read files');
|
||||
});
|
||||
|
||||
it('converts bare Bash rule to "run commands"', () => {
|
||||
expect(buildHumanReadableRuleLabel(['Bash'])).toBe('run commands');
|
||||
});
|
||||
|
||||
it('converts bare WebSearch rule to "search the web"', () => {
|
||||
expect(buildHumanReadableRuleLabel(['WebSearch'])).toBe('search the web');
|
||||
});
|
||||
|
||||
it('converts Read with absolute path specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['Read(//Users/mochi/.qwen/**)']);
|
||||
expect(label).toBe('read files in /Users/mochi/.qwen/');
|
||||
});
|
||||
|
||||
it('converts Read with relative path specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['Read(/src/**)']);
|
||||
expect(label).toBe('read files in /src/');
|
||||
});
|
||||
|
||||
it('converts Edit with path specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['Edit(//tmp/**)']);
|
||||
expect(label).toBe('edit files in /tmp/');
|
||||
});
|
||||
|
||||
it('converts Bash with command specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['Bash(git *)']);
|
||||
expect(label).toBe("run 'git *' commands");
|
||||
});
|
||||
|
||||
it('converts WebFetch with domain specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['WebFetch(github.com)']);
|
||||
expect(label).toBe('fetch from github.com');
|
||||
});
|
||||
|
||||
it('converts Skill with literal specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['Skill(Explore)']);
|
||||
expect(label).toBe('use skill "Explore"');
|
||||
});
|
||||
|
||||
it('converts Agent with literal specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['Agent(research)']);
|
||||
expect(label).toBe('use agent "research"');
|
||||
});
|
||||
|
||||
it('joins multiple rules with commas', () => {
|
||||
const label = buildHumanReadableRuleLabel([
|
||||
'Read(//Users/alice/**)',
|
||||
'Bash(npm *)',
|
||||
]);
|
||||
expect(label).toBe("read files in /Users/alice/, run 'npm *' commands");
|
||||
});
|
||||
|
||||
it('handles unknown display names gracefully', () => {
|
||||
const label = buildHumanReadableRuleLabel(['mcp__server__tool']);
|
||||
expect(label).toBe('mcp__server__tool');
|
||||
});
|
||||
|
||||
it('handles unknown display name with specifier', () => {
|
||||
const label = buildHumanReadableRuleLabel(['UnknownCategory(someValue)']);
|
||||
expect(label).toBe('unknowncategory "someValue"');
|
||||
});
|
||||
|
||||
it('cleans path with /* suffix', () => {
|
||||
const label = buildHumanReadableRuleLabel(['Read(//home/user/docs/*)']);
|
||||
expect(label).toBe('read files in /home/user/docs/');
|
||||
});
|
||||
|
||||
it('round-trips from buildPermissionRules for file tool', () => {
|
||||
const rules = buildPermissionRules({
|
||||
toolName: 'read_file',
|
||||
filePath: '/Users/alice/.secrets',
|
||||
});
|
||||
const label = buildHumanReadableRuleLabel(rules);
|
||||
expect(label).toBe('read files in /Users/alice/');
|
||||
});
|
||||
|
||||
it('round-trips from buildPermissionRules for shell command', () => {
|
||||
const rules = buildPermissionRules({
|
||||
toolName: 'run_shell_command',
|
||||
command: 'git status',
|
||||
});
|
||||
const label = buildHumanReadableRuleLabel(rules);
|
||||
expect(label).toBe("run 'git status' commands");
|
||||
});
|
||||
|
||||
it('round-trips from buildPermissionRules for web fetch', () => {
|
||||
const rules = buildPermissionRules({
|
||||
toolName: 'web_fetch',
|
||||
domain: 'example.com',
|
||||
});
|
||||
const label = buildHumanReadableRuleLabel(rules);
|
||||
expect(label).toBe('fetch from example.com');
|
||||
});
|
||||
});
|
||||
|
||||
// ─── PermissionManager.findMatchingDenyRule ──────────────────────────────────
|
||||
|
||||
describe('PermissionManager.findMatchingDenyRule', () => {
|
||||
it('returns the raw deny rule string when context matches', () => {
|
||||
const pm = new PermissionManager(
|
||||
makeConfig({ permissionsDeny: ['Bash(rm *)'] }),
|
||||
);
|
||||
pm.initialize();
|
||||
|
||||
const result = pm.findMatchingDenyRule({
|
||||
toolName: 'run_shell_command',
|
||||
command: 'rm -rf /tmp/foo',
|
||||
});
|
||||
expect(result).toBe('Bash(rm *)');
|
||||
});
|
||||
|
||||
it('returns undefined when no deny rule matches', () => {
|
||||
const pm = new PermissionManager(
|
||||
makeConfig({ permissionsDeny: ['Bash(rm *)'] }),
|
||||
);
|
||||
pm.initialize();
|
||||
|
||||
const result = pm.findMatchingDenyRule({
|
||||
toolName: 'run_shell_command',
|
||||
command: 'git status',
|
||||
});
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('matches session deny rules', () => {
|
||||
const pm = new PermissionManager(makeConfig());
|
||||
pm.initialize();
|
||||
pm.addSessionDenyRule('Read(//secret/**)');
|
||||
|
||||
const result = pm.findMatchingDenyRule({
|
||||
toolName: 'read_file',
|
||||
filePath: '/secret/key.pem',
|
||||
});
|
||||
expect(result).toBe('Read(//secret/**)');
|
||||
});
|
||||
|
||||
it('returns undefined for non-denied tool', () => {
|
||||
const pm = new PermissionManager(
|
||||
makeConfig({ permissionsDeny: ['ShellTool'] }),
|
||||
);
|
||||
pm.initialize();
|
||||
|
||||
const result = pm.findMatchingDenyRule({ toolName: 'read_file' });
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('matches bare tool deny rule', () => {
|
||||
const pm = new PermissionManager(
|
||||
makeConfig({ permissionsDeny: ['ShellTool'] }),
|
||||
);
|
||||
pm.initialize();
|
||||
|
||||
const result = pm.findMatchingDenyRule({
|
||||
toolName: 'run_shell_command',
|
||||
command: 'echo hello',
|
||||
});
|
||||
// rule.raw preserves the original rule string as written in config
|
||||
expect(result).toBe('ShellTool');
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -365,6 +365,43 @@ export class PermissionManager {
|
|||
return decision !== 'deny';
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the first deny rule that matches the given context.
|
||||
* Returns the raw rule string if found, or undefined if no deny rule matches.
|
||||
*
|
||||
* Useful for providing user-visible feedback about which rule caused a denial.
|
||||
*/
|
||||
findMatchingDenyRule(ctx: PermissionCheckContext): string | undefined {
|
||||
const { toolName, command, filePath, domain, specifier } = ctx;
|
||||
|
||||
const pathCtx: PathMatchContext | undefined =
|
||||
this.config.getProjectRoot && this.config.getCwd
|
||||
? {
|
||||
projectRoot: this.config.getProjectRoot(),
|
||||
cwd: this.config.getCwd(),
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const matchArgs = [
|
||||
toolName,
|
||||
command,
|
||||
filePath,
|
||||
domain,
|
||||
pathCtx,
|
||||
specifier,
|
||||
] as const;
|
||||
|
||||
for (const rule of [
|
||||
...this.sessionRules.deny,
|
||||
...this.persistentRules.deny,
|
||||
]) {
|
||||
if (matchesRule(rule, ...matchArgs)) {
|
||||
return rule.raw;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shell command helper
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
|
|||
|
|
@ -405,6 +405,106 @@ export function buildPermissionRules(ctx: PermissionCheckContext): string[] {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Human-readable display names for permission rule categories.
|
||||
* Maps display name → verb phrase for use in "Always allow [verb phrase] in this project".
|
||||
*/
|
||||
const DISPLAY_NAME_TO_VERB: Readonly<Record<string, string>> = {
|
||||
Read: 'read files',
|
||||
Edit: 'edit files',
|
||||
Bash: 'run commands',
|
||||
WebFetch: 'fetch from',
|
||||
WebSearch: 'search the web',
|
||||
Agent: 'use agent',
|
||||
Skill: 'use skill',
|
||||
SaveMemory: 'save memory',
|
||||
TodoWrite: 'write todos',
|
||||
Lsp: 'use LSP',
|
||||
ExitPlanMode: 'exit plan mode',
|
||||
};
|
||||
|
||||
/**
|
||||
* Strip the glob suffix (e.g. `/**`) and the leading `//` from an absolute
|
||||
* path specifier so it reads cleanly in a UI label.
|
||||
*
|
||||
* `//Users/mochi/.qwen/**` → `/Users/mochi/.qwen/`
|
||||
* `/src/**` → `src/`
|
||||
*/
|
||||
function cleanPathSpecifier(specifier: string): string {
|
||||
let cleaned = specifier;
|
||||
// Remove trailing glob patterns like /** or /*
|
||||
cleaned = cleaned.replace(/\/\*\*$/, '/').replace(/\/\*$/, '/');
|
||||
// Convert rule grammar `//absolute` → `/absolute`
|
||||
if (cleaned.startsWith('//')) {
|
||||
cleaned = cleaned.substring(1);
|
||||
}
|
||||
// Ensure trailing slash for directories
|
||||
if (!cleaned.endsWith('/')) {
|
||||
cleaned += '/';
|
||||
}
|
||||
return cleaned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a human-readable label describing what a set of permission rules allow.
|
||||
*
|
||||
* Used in "Always Allow" UI options to give users a clear, natural-language
|
||||
* description instead of raw rule syntax.
|
||||
*
|
||||
* Examples:
|
||||
* `["Read(//Users/mochi/.qwen/**)"]` → `"read files in /Users/mochi/.qwen/"`
|
||||
* `["Bash(git *)"]` → `"run 'git *' commands"`
|
||||
* `["WebFetch(github.com)"]` → `"fetch from github.com"`
|
||||
* `["Read"]` → `"read files"`
|
||||
*
|
||||
* @param rules - Array of rule strings from buildPermissionRules()
|
||||
* @returns A human-readable description string
|
||||
*/
|
||||
export function buildHumanReadableRuleLabel(rules: string[]): string {
|
||||
if (!rules.length) return '';
|
||||
|
||||
const parts: string[] = [];
|
||||
for (const rule of rules) {
|
||||
// Parse "DisplayName(specifier)" or bare "DisplayName"
|
||||
const parenIdx = rule.indexOf('(');
|
||||
if (parenIdx === -1) {
|
||||
// Bare rule like "Read" or "Bash"
|
||||
const verb = DISPLAY_NAME_TO_VERB[rule] ?? rule.toLowerCase();
|
||||
parts.push(verb);
|
||||
continue;
|
||||
}
|
||||
|
||||
const displayName = rule.substring(0, parenIdx);
|
||||
const specifier = rule.substring(parenIdx + 1, rule.length - 1); // strip parens
|
||||
const verb = DISPLAY_NAME_TO_VERB[displayName] ?? displayName.toLowerCase();
|
||||
|
||||
const canonicalName = Object.entries(CANONICAL_TO_RULE_DISPLAY).find(
|
||||
([, v]) => v === displayName,
|
||||
)?.[0];
|
||||
const kind = canonicalName ? getSpecifierKind(canonicalName) : 'literal';
|
||||
|
||||
switch (kind) {
|
||||
case 'path': {
|
||||
const cleanPath = cleanPathSpecifier(specifier);
|
||||
parts.push(`${verb} in ${cleanPath}`);
|
||||
break;
|
||||
}
|
||||
case 'command':
|
||||
parts.push(`run '${specifier}' commands`);
|
||||
break;
|
||||
case 'domain':
|
||||
parts.push(`${verb} ${specifier}`);
|
||||
break;
|
||||
case 'literal':
|
||||
default:
|
||||
parts.push(`${verb} "${specifier}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return parts.join(', ');
|
||||
}
|
||||
|
||||
// ─────────────────────────────────────────────────────────────────────────────
|
||||
// Shell command matching
|
||||
// ─────────────────────────────────────────────────────────────────────────────
|
||||
|
|
|
|||
|
|
@ -413,6 +413,67 @@ describe('ShellExecutionService', () => {
|
|||
expect(mockHeadlessTerminal.resize).toHaveBeenCalledWith(100, 40);
|
||||
});
|
||||
|
||||
it('should ignore expected PTY read EIO errors on process exit', async () => {
|
||||
const { result } = await simulateExecution('ls -l', (pty) => {
|
||||
const eioError = Object.assign(new Error('read EIO'), { code: 'EIO' });
|
||||
pty.emit('error', eioError);
|
||||
pty.onExit.mock.calls[0][0]({ exitCode: 0, signal: null });
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it('should throw unexpected PTY errors from error event', async () => {
|
||||
const abortController = new AbortController();
|
||||
const handle = await ShellExecutionService.execute(
|
||||
'ls -l',
|
||||
'/test/dir',
|
||||
onOutputEventMock,
|
||||
abortController.signal,
|
||||
true,
|
||||
shellExecutionConfig,
|
||||
);
|
||||
await new Promise((resolve) => process.nextTick(resolve));
|
||||
|
||||
const unexpectedError = Object.assign(new Error('unexpected pty error'), {
|
||||
code: 'EPIPE',
|
||||
});
|
||||
expect(() => mockPtyProcess.emit('error', unexpectedError)).toThrow(
|
||||
'unexpected pty error',
|
||||
);
|
||||
|
||||
mockPtyProcess.onExit.mock.calls[0][0]({ exitCode: 0, signal: null });
|
||||
await handle.result;
|
||||
});
|
||||
|
||||
it('should ignore ioctl EBADF message-only resize race errors', async () => {
|
||||
mockPtyProcess.resize.mockImplementationOnce(() => {
|
||||
throw new Error('ioctl(2) failed, EBADF');
|
||||
});
|
||||
|
||||
await simulateExecution('ls -l', (pty) => {
|
||||
pty.onData.mock.calls[0][0]('file1.txt\n');
|
||||
expect(() =>
|
||||
ShellExecutionService.resizePty(pty.pid!, 100, 40),
|
||||
).not.toThrow();
|
||||
pty.onExit.mock.calls[0][0]({ exitCode: 0, signal: null });
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore exited-pty message-only resize race errors', async () => {
|
||||
mockPtyProcess.resize.mockImplementationOnce(() => {
|
||||
throw new Error('Cannot resize a pty that has already exited');
|
||||
});
|
||||
|
||||
await simulateExecution('ls -l', (pty) => {
|
||||
pty.onData.mock.calls[0][0]('file1.txt\n');
|
||||
expect(() =>
|
||||
ShellExecutionService.resizePty(pty.pid!, 100, 40),
|
||||
).not.toThrow();
|
||||
pty.onExit.mock.calls[0][0]({ exitCode: 0, signal: null });
|
||||
});
|
||||
});
|
||||
|
||||
it('should scroll the headless terminal', async () => {
|
||||
await simulateExecution('ls -l', (pty) => {
|
||||
pty.onData.mock.calls[0][0]('file1.txt\n');
|
||||
|
|
|
|||
|
|
@ -185,6 +185,40 @@ interface ActivePty {
|
|||
headlessTerminal: pkg.Terminal;
|
||||
}
|
||||
|
||||
const getErrnoCode = (error: unknown): string | undefined => {
|
||||
if (!error || typeof error !== 'object' || !('code' in error)) {
|
||||
return undefined;
|
||||
}
|
||||
const code = (error as { code?: unknown }).code;
|
||||
return typeof code === 'string' ? code : undefined;
|
||||
};
|
||||
|
||||
const getErrorMessage = (error: unknown): string =>
|
||||
error instanceof Error ? error.message : String(error);
|
||||
|
||||
const isExpectedPtyReadExitError = (error: unknown): boolean => {
|
||||
const code = getErrnoCode(error);
|
||||
if (code === 'EIO') {
|
||||
return true;
|
||||
}
|
||||
|
||||
const message = getErrorMessage(error);
|
||||
return message.includes('read EIO');
|
||||
};
|
||||
|
||||
const isExpectedPtyExitRaceError = (error: unknown): boolean => {
|
||||
const code = getErrnoCode(error);
|
||||
if (code === 'ESRCH' || code === 'EBADF') {
|
||||
return true;
|
||||
}
|
||||
|
||||
const message = getErrorMessage(error);
|
||||
return (
|
||||
message.includes('ioctl(2) failed, EBADF') ||
|
||||
message.includes('Cannot resize a pty that has already exited')
|
||||
);
|
||||
};
|
||||
|
||||
const getFullBufferText = (terminal: pkg.Terminal): string => {
|
||||
const buffer = terminal.buffer.active;
|
||||
const lines: string[] = [];
|
||||
|
|
@ -768,6 +802,20 @@ export class ShellExecutionService {
|
|||
handleOutput(bufferData);
|
||||
});
|
||||
|
||||
// Handle PTY errors - EIO is expected when the PTY process exits
|
||||
// due to race conditions between the exit event and read operations.
|
||||
// This is a normal behavior on macOS/Linux and should not crash the app.
|
||||
// See: https://github.com/microsoft/node-pty/issues/178
|
||||
ptyProcess.on('error', (err: NodeJS.ErrnoException) => {
|
||||
if (isExpectedPtyReadExitError(err)) {
|
||||
// EIO is expected when the PTY process exits - ignore it
|
||||
return;
|
||||
}
|
||||
|
||||
// Surface unexpected PTY errors to preserve existing crash behavior.
|
||||
throw err;
|
||||
});
|
||||
|
||||
ptyProcess.onExit(
|
||||
({ exitCode, signal }: { exitCode: number; signal?: number }) => {
|
||||
exited = true;
|
||||
|
|
@ -938,7 +986,9 @@ export class ShellExecutionService {
|
|||
} catch (e) {
|
||||
// Ignore errors if the pty has already exited, which can happen
|
||||
// due to a race condition between the exit event and this call.
|
||||
if (e instanceof Error && 'code' in e && e.code === 'ESRCH') {
|
||||
// - ESRCH: No such process (process no longer exists)
|
||||
// - EBADF: Bad file descriptor (PTY fd closed, e.g., "ioctl(2) failed, EBADF")
|
||||
if (isExpectedPtyExitRaceError(e)) {
|
||||
// ignore
|
||||
} else {
|
||||
throw e;
|
||||
|
|
@ -968,7 +1018,9 @@ export class ShellExecutionService {
|
|||
} catch (e) {
|
||||
// Ignore errors if the pty has already exited, which can happen
|
||||
// due to a race condition between the exit event and this call.
|
||||
if (e instanceof Error && 'code' in e && e.code === 'ESRCH') {
|
||||
// - ESRCH: No such process (process no longer exists)
|
||||
// - EBADF: Bad file descriptor (PTY fd closed, e.g., "ioctl(2) failed, EBADF")
|
||||
if (isExpectedPtyExitRaceError(e)) {
|
||||
// ignore
|
||||
} else {
|
||||
throw e;
|
||||
|
|
|
|||
|
|
@ -125,3 +125,4 @@ export {
|
|||
FileOperation,
|
||||
} from './metrics.js';
|
||||
export { QwenLogger } from './qwen-logger/qwen-logger.js';
|
||||
export { sanitizeHookName } from './sanitize.js';
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ import {
|
|||
logExtensionDisable,
|
||||
logExtensionInstallEvent,
|
||||
logExtensionUninstall,
|
||||
logHookCall,
|
||||
} from './loggers.js';
|
||||
import * as metrics from './metrics.js';
|
||||
import { QwenLogger } from './qwen-logger/qwen-logger.js';
|
||||
|
|
@ -75,6 +76,7 @@ import {
|
|||
ExtensionDisableEvent,
|
||||
ExtensionInstallEvent,
|
||||
ExtensionUninstallEvent,
|
||||
HookCallEvent,
|
||||
} from './types.js';
|
||||
import { FileOperation } from './metrics.js';
|
||||
import type {
|
||||
|
|
@ -1281,4 +1283,230 @@ describe('loggers', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logHookCall', () => {
|
||||
const mockConfig = {
|
||||
getSessionId: () => 'test-session-id',
|
||||
getTargetDir: () => 'target-dir',
|
||||
getUsageStatisticsEnabled: () => true,
|
||||
getTelemetryEnabled: () => true,
|
||||
getTelemetryLogPromptsEnabled: () => true,
|
||||
} as unknown as Config;
|
||||
|
||||
const mockQwenLogger = {
|
||||
logHookCallEvent: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.spyOn(QwenLogger, 'getInstance').mockReturnValue(
|
||||
mockQwenLogger as unknown as QwenLogger,
|
||||
);
|
||||
mockQwenLogger.logHookCallEvent.mockClear();
|
||||
});
|
||||
|
||||
it('should log a successful hook call to QwenLogger', () => {
|
||||
const event = new HookCallEvent(
|
||||
'UserPromptSubmit',
|
||||
'command',
|
||||
'check-secrets.sh',
|
||||
{ prompt: 'test prompt' },
|
||||
150,
|
||||
true,
|
||||
{ output: 'success' },
|
||||
0,
|
||||
'stdout message',
|
||||
'stderr message',
|
||||
undefined,
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
// Should call QwenLogger
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
});
|
||||
|
||||
it('should log a failed hook call with error', () => {
|
||||
const event = new HookCallEvent(
|
||||
'Stop',
|
||||
'command',
|
||||
'cleanup.sh',
|
||||
{ last_assistant_message: 'final message' },
|
||||
200,
|
||||
false,
|
||||
undefined,
|
||||
1,
|
||||
'stdout message',
|
||||
'stderr message',
|
||||
'Error occurred',
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
// Should call QwenLogger
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
});
|
||||
|
||||
it('should handle when QwenLogger is not available', () => {
|
||||
vi.spyOn(QwenLogger, 'getInstance').mockReturnValue(undefined);
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'UserPromptSubmit',
|
||||
'command',
|
||||
'test-hook.sh',
|
||||
{ prompt: 'test' },
|
||||
100,
|
||||
true,
|
||||
);
|
||||
|
||||
// Should not throw when QwenLogger is not available
|
||||
expect(() => logHookCall(mockConfig, event)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should log hook call with all optional fields', () => {
|
||||
const event = new HookCallEvent(
|
||||
'PreToolUse',
|
||||
'command',
|
||||
'validator.sh',
|
||||
{ tool_name: 'read_file', path: '/test/file.txt' },
|
||||
250,
|
||||
true,
|
||||
{ decision: 'allow', reason: 'validated' },
|
||||
0,
|
||||
'validation passed',
|
||||
'',
|
||||
undefined,
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
});
|
||||
|
||||
it('should log hook call with minimal fields', () => {
|
||||
const event = new HookCallEvent(
|
||||
'SessionStart',
|
||||
'command',
|
||||
'init.sh',
|
||||
{},
|
||||
10,
|
||||
true,
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
});
|
||||
|
||||
it('should log hook call with exit code', () => {
|
||||
const event = new HookCallEvent(
|
||||
'PostToolUseFailure',
|
||||
'command',
|
||||
'error-handler.sh',
|
||||
{ tool_name: 'shell' },
|
||||
50,
|
||||
false,
|
||||
undefined,
|
||||
1,
|
||||
'',
|
||||
'error output',
|
||||
'Command failed with exit code 1',
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
});
|
||||
|
||||
it('should log hook call with zero exit code on success', () => {
|
||||
const event = new HookCallEvent(
|
||||
'PostToolUse',
|
||||
'command',
|
||||
'success-handler.sh',
|
||||
{ tool_name: 'write_file' },
|
||||
100,
|
||||
true,
|
||||
{ result: 'ok' },
|
||||
0,
|
||||
'done',
|
||||
'',
|
||||
undefined,
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
});
|
||||
|
||||
it('should log hook call with non-zero exit code on failure', () => {
|
||||
const event = new HookCallEvent(
|
||||
'PostToolUseFailure',
|
||||
'command',
|
||||
'failure-handler.sh',
|
||||
{ tool_name: 'shell' },
|
||||
75,
|
||||
false,
|
||||
undefined,
|
||||
127,
|
||||
'',
|
||||
'command not found',
|
||||
'Hook command not found',
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
});
|
||||
|
||||
it('should log all hook event types', () => {
|
||||
const eventTypes = [
|
||||
'PreToolUse',
|
||||
'PostToolUse',
|
||||
'PostToolUseFailure',
|
||||
'Notification',
|
||||
'UserPromptSubmit',
|
||||
'SessionStart',
|
||||
'SessionEnd',
|
||||
'Stop',
|
||||
'SubagentStart',
|
||||
'SubagentStop',
|
||||
'PreCompact',
|
||||
'PermissionRequest',
|
||||
];
|
||||
|
||||
for (const eventType of eventTypes) {
|
||||
mockQwenLogger.logHookCallEvent.mockClear();
|
||||
|
||||
const event = new HookCallEvent(
|
||||
eventType,
|
||||
'command',
|
||||
'test-hook.sh',
|
||||
{},
|
||||
100,
|
||||
true,
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledWith(event);
|
||||
}
|
||||
});
|
||||
|
||||
it('should pass the exact event object to QwenLogger', () => {
|
||||
const event = new HookCallEvent(
|
||||
'PreToolUse',
|
||||
'command',
|
||||
'test-hook.sh',
|
||||
{ tool_name: 'read_file' },
|
||||
100,
|
||||
true,
|
||||
);
|
||||
|
||||
logHookCall(mockConfig, event);
|
||||
|
||||
// Verify the exact event object is passed
|
||||
expect(mockQwenLogger.logHookCallEvent).toHaveBeenCalledTimes(1);
|
||||
const passedEvent = mockQwenLogger.logHookCallEvent.mock.calls[0][0];
|
||||
expect(passedEvent).toBe(event);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -102,6 +102,7 @@ import type {
|
|||
ArenaAgentCompletedEvent,
|
||||
ArenaSessionEndedEvent,
|
||||
} from './types.js';
|
||||
import type { HookCallEvent } from './types.js';
|
||||
import type { UiEvent } from './uiTelemetry.js';
|
||||
import { uiTelemetryService } from './uiTelemetry.js';
|
||||
|
||||
|
|
@ -114,6 +115,8 @@ function getCommonAttributes(config: Config): LogAttributes {
|
|||
};
|
||||
}
|
||||
|
||||
export { getCommonAttributes };
|
||||
|
||||
export function logStartSession(
|
||||
config: Config,
|
||||
event: StartSessionEvent,
|
||||
|
|
@ -787,6 +790,11 @@ export function logModelSlashCommand(
|
|||
recordModelSlashCommand(config, event);
|
||||
}
|
||||
|
||||
export function logHookCall(config: Config, event: HookCallEvent): void {
|
||||
// Log to QwenLogger for RUM telemetry only
|
||||
QwenLogger.getInstance(config)?.logHookCallEvent(event);
|
||||
}
|
||||
|
||||
export function logExtensionInstallEvent(
|
||||
config: Config,
|
||||
event: ExtensionInstallEvent,
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import {
|
|||
IdeConnectionEvent,
|
||||
KittySequenceOverflowEvent,
|
||||
IdeConnectionType,
|
||||
HookCallEvent,
|
||||
} from '../types.js';
|
||||
import type { RumEvent, RumPayload } from './event-types.js';
|
||||
|
||||
|
|
@ -517,4 +518,312 @@ describe('QwenLogger', () => {
|
|||
expect(TEST_ONLY.FLUSH_INTERVAL_MS).toBe(60000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('logHookCallEvent', () => {
|
||||
it('should log a successful hook call event', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'PreToolUse',
|
||||
'command',
|
||||
'check-secrets.sh',
|
||||
{ tool_name: 'read_file' },
|
||||
150,
|
||||
true,
|
||||
{ result: 'valid' },
|
||||
0,
|
||||
'stdout',
|
||||
'stderr',
|
||||
undefined,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
event_type: 'action',
|
||||
type: 'hook',
|
||||
name: 'hook_call#PreToolUse',
|
||||
properties: expect.objectContaining({
|
||||
hook_event_name: 'PreToolUse',
|
||||
hook_type: 'command',
|
||||
hook_name: 'check-secrets.sh',
|
||||
duration_ms: 150,
|
||||
success: 1,
|
||||
exit_code: 0,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should log a failed hook call event with error when telemetry log prompts enabled', () => {
|
||||
const configWithLogPrompts = makeFakeConfig({
|
||||
getTelemetryLogPromptsEnabled: () => true,
|
||||
});
|
||||
const logger = QwenLogger.getInstance(configWithLogPrompts)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'PostToolUse',
|
||||
'command',
|
||||
'cleanup.sh',
|
||||
{ tool_name: 'shell' },
|
||||
200,
|
||||
false,
|
||||
undefined,
|
||||
1,
|
||||
'',
|
||||
'error output',
|
||||
'Command failed',
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
event_type: 'action',
|
||||
type: 'hook',
|
||||
name: 'hook_call#PostToolUse',
|
||||
properties: expect.objectContaining({
|
||||
hook_event_name: 'PostToolUse',
|
||||
hook_type: 'command',
|
||||
hook_name: 'cleanup.sh',
|
||||
duration_ms: 200,
|
||||
success: 0,
|
||||
exit_code: 1,
|
||||
error: 'Command failed',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not include error when telemetry log prompts disabled', () => {
|
||||
const configWithoutLogPrompts = makeFakeConfig({
|
||||
getTelemetryLogPromptsEnabled: () => false,
|
||||
});
|
||||
// Clear singleton to create new instance with different config
|
||||
(QwenLogger as unknown as { instance: undefined }).instance = undefined;
|
||||
const logger = QwenLogger.getInstance(configWithoutLogPrompts)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'PostToolUse',
|
||||
'command',
|
||||
'cleanup.sh',
|
||||
{ tool_name: 'shell' },
|
||||
200,
|
||||
false,
|
||||
undefined,
|
||||
1,
|
||||
'',
|
||||
'error output',
|
||||
'Command failed with sensitive data',
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
properties: expect.objectContaining({
|
||||
hook_event_name: 'PostToolUse',
|
||||
hook_type: 'command',
|
||||
hook_name: 'cleanup.sh',
|
||||
duration_ms: 200,
|
||||
success: 0,
|
||||
exit_code: 1,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Error should NOT be in properties
|
||||
const callArgs = enqueueSpy.mock.calls[0][0];
|
||||
expect(callArgs.properties).not.toHaveProperty('error');
|
||||
});
|
||||
|
||||
it('should sanitize hook name to remove sensitive information', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
// Hook name with full path and sensitive arguments
|
||||
const event = new HookCallEvent(
|
||||
'PreToolUse',
|
||||
'command',
|
||||
'/home/user/.qwen/hooks/check-secrets.sh --api-key=secret123',
|
||||
{ tool_name: 'read_file' },
|
||||
100,
|
||||
true,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
properties: expect.objectContaining({
|
||||
// Should be sanitized to just the basename without arguments
|
||||
hook_name: 'check-secrets.sh',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should sanitize hook name with Windows path', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'Stop',
|
||||
'command',
|
||||
'C:\\Users\\user\\hooks\\cleanup.bat --token=xyz',
|
||||
{},
|
||||
50,
|
||||
true,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
properties: expect.objectContaining({
|
||||
hook_name: 'cleanup.bat',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle empty hook name', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'SessionStart',
|
||||
'command',
|
||||
'',
|
||||
{},
|
||||
10,
|
||||
true,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
properties: expect.objectContaining({
|
||||
hook_name: 'unknown-command',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle hook name with only whitespace', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'SessionEnd',
|
||||
'command',
|
||||
' ',
|
||||
{},
|
||||
10,
|
||||
true,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
properties: expect.objectContaining({
|
||||
hook_name: 'unknown-command',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle hook name that is just a command without path', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'Notification',
|
||||
'command',
|
||||
'python --arg=value',
|
||||
{},
|
||||
100,
|
||||
true,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
properties: expect.objectContaining({
|
||||
// Should be sanitized to just the command name
|
||||
hook_name: 'python',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should call flushIfNeeded after logging', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const flushSpy = vi.spyOn(logger, 'flushIfNeeded');
|
||||
|
||||
const event = new HookCallEvent(
|
||||
'PreToolUse',
|
||||
'command',
|
||||
'test-hook.sh',
|
||||
{},
|
||||
100,
|
||||
true,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(flushSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle all hook event types', () => {
|
||||
const logger = QwenLogger.getInstance(mockConfig)!;
|
||||
const enqueueSpy = vi.spyOn(logger, 'enqueueLogEvent');
|
||||
|
||||
const eventTypes = [
|
||||
'PreToolUse',
|
||||
'PostToolUse',
|
||||
'PostToolUseFailure',
|
||||
'Notification',
|
||||
'UserPromptSubmit',
|
||||
'SessionStart',
|
||||
'SessionEnd',
|
||||
'Stop',
|
||||
'SubagentStart',
|
||||
'SubagentStop',
|
||||
'PreCompact',
|
||||
'PermissionRequest',
|
||||
];
|
||||
|
||||
for (const eventType of eventTypes) {
|
||||
enqueueSpy.mockClear();
|
||||
|
||||
const event = new HookCallEvent(
|
||||
eventType,
|
||||
'command',
|
||||
'test-hook.sh',
|
||||
{},
|
||||
100,
|
||||
true,
|
||||
);
|
||||
|
||||
logger.logHookCallEvent(event);
|
||||
|
||||
expect(enqueueSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
name: `hook_call#${eventType}`,
|
||||
properties: expect.objectContaining({
|
||||
hook_event_name: eventType,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ import type {
|
|||
ArenaSessionStartedEvent,
|
||||
ArenaAgentCompletedEvent,
|
||||
ArenaSessionEndedEvent,
|
||||
HookCallEvent,
|
||||
} from '../types.js';
|
||||
import type {
|
||||
RumEvent,
|
||||
|
|
@ -65,6 +66,7 @@ import {
|
|||
type DebugLogger,
|
||||
} from '../../utils/debugLogger.js';
|
||||
import { safeJsonStringify } from '../../utils/safeJsonStringify.js';
|
||||
import { sanitizeHookName } from '../sanitize.js';
|
||||
import { InstallationManager } from '../../utils/installationManager.js';
|
||||
import { FixedDeque } from 'mnemonist';
|
||||
import { AuthType } from '../../core/contentGenerator.js';
|
||||
|
|
@ -995,6 +997,37 @@ export class QwenLogger {
|
|||
this.flushIfNeeded();
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a hook call event
|
||||
* Records hook execution telemetry for observability
|
||||
*/
|
||||
logHookCallEvent(event: HookCallEvent): void {
|
||||
// Sanitize hook name to remove potentially sensitive information
|
||||
const sanitizedHookName = sanitizeHookName(event.hook_name);
|
||||
|
||||
const properties: Record<string, unknown> = {
|
||||
hook_event_name: event.hook_event_name,
|
||||
hook_type: event.hook_type,
|
||||
hook_name: sanitizedHookName,
|
||||
duration_ms: event.duration_ms,
|
||||
success: event.success ? 1 : 0,
|
||||
exit_code: event.exit_code,
|
||||
};
|
||||
|
||||
if (event.error && this.config?.getTelemetryLogPromptsEnabled()) {
|
||||
properties['error'] = event.error;
|
||||
}
|
||||
|
||||
const rumEvent = this.createActionEvent(
|
||||
'hook',
|
||||
`hook_call#${event.hook_event_name}`,
|
||||
{ properties },
|
||||
);
|
||||
|
||||
this.enqueueLogEvent(rumEvent);
|
||||
this.flushIfNeeded();
|
||||
}
|
||||
|
||||
getProxyAgent() {
|
||||
const proxyUrl = this.config?.getProxy();
|
||||
if (!proxyUrl) return undefined;
|
||||
|
|
|
|||
75
packages/core/src/telemetry/sanitize.test.ts
Normal file
75
packages/core/src/telemetry/sanitize.test.ts
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2026 Qwen Team
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { sanitizeHookName } from './sanitize.js';
|
||||
|
||||
describe('sanitizeHookName', () => {
|
||||
it('should return "unknown-command" for empty string', () => {
|
||||
expect(sanitizeHookName('')).toBe('unknown-command');
|
||||
});
|
||||
|
||||
it('should return "unknown-command" for whitespace-only string', () => {
|
||||
expect(sanitizeHookName(' ')).toBe('unknown-command');
|
||||
expect(sanitizeHookName('\t\n\r')).toBe('unknown-command');
|
||||
});
|
||||
|
||||
it('should return "unknown-command" for null/undefined values', () => {
|
||||
// Testing the function behavior with falsy inputs
|
||||
expect(sanitizeHookName('')).toBe('unknown-command');
|
||||
});
|
||||
|
||||
it('should extract command name from full path on Unix systems', () => {
|
||||
expect(sanitizeHookName('/usr/bin/git')).toBe('git');
|
||||
expect(sanitizeHookName('/path/to/.gemini/hooks/check-secrets.sh')).toBe(
|
||||
'check-secrets.sh',
|
||||
);
|
||||
expect(sanitizeHookName('/home/user/script.py --arg=value')).toBe(
|
||||
'script.py',
|
||||
);
|
||||
});
|
||||
|
||||
it('should extract command name from full path on Windows systems', () => {
|
||||
expect(sanitizeHookName('C:\\Windows\\System32\\cmd.exe')).toBe('cmd.exe');
|
||||
expect(sanitizeHookName('C:\\Users\\User\\Documents\\test.bat /c')).toBe(
|
||||
'test.bat',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the command name without arguments for simple commands', () => {
|
||||
expect(sanitizeHookName('git status')).toBe('git');
|
||||
expect(sanitizeHookName('node index.js')).toBe('node');
|
||||
expect(sanitizeHookName('python script.py --api-key=abc123')).toBe(
|
||||
'python',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle relative paths correctly', () => {
|
||||
expect(sanitizeHookName('./my-script.sh')).toBe('my-script.sh');
|
||||
expect(sanitizeHookName('../tools/tool.exe')).toBe('tool.exe');
|
||||
});
|
||||
|
||||
it('should handle complex command lines', () => {
|
||||
expect(
|
||||
sanitizeHookName(
|
||||
'/path/to/.gemini/hooks/check-secrets.sh --api-key=abc123',
|
||||
),
|
||||
).toBe('check-secrets.sh');
|
||||
expect(
|
||||
sanitizeHookName('python /home/user/script.py --token=xyz --verbose'),
|
||||
).toBe('python');
|
||||
});
|
||||
|
||||
it('should handle edge cases', () => {
|
||||
expect(sanitizeHookName('simple-command')).toBe('simple-command');
|
||||
expect(sanitizeHookName('one-word')).toBe('one-word');
|
||||
});
|
||||
|
||||
it('should return "unknown-command" for malformed paths', () => {
|
||||
expect(sanitizeHookName('/')).toBe('unknown-command');
|
||||
expect(sanitizeHookName('\\')).toBe('unknown-command');
|
||||
});
|
||||
});
|
||||
52
packages/core/src/telemetry/sanitize.ts
Normal file
52
packages/core/src/telemetry/sanitize.ts
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
/**
|
||||
* @license
|
||||
* Copyright 2026 Qwen Team
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* Sanitize hook name to remove potentially sensitive information.
|
||||
* Extracts the base command name without arguments or full paths.
|
||||
*
|
||||
* This function protects PII by removing:
|
||||
* - Full file paths that may contain usernames
|
||||
* - Command arguments that may contain credentials, API keys, tokens
|
||||
* - Environment variables with sensitive values
|
||||
*
|
||||
* Examples:
|
||||
* - "/path/to/.gemini/hooks/check-secrets.sh --api-key=abc123" -> "check-secrets.sh"
|
||||
* - "python /home/user/script.py --token=xyz" -> "python"
|
||||
* - "node index.js" -> "node"
|
||||
* - "C:\\Windows\\System32\\cmd.exe /c secret.bat" -> "cmd.exe"
|
||||
* - "" or " " -> "unknown-command"
|
||||
*
|
||||
* @param hookName Full command string.
|
||||
* @returns Sanitized command name.
|
||||
*/
|
||||
export function sanitizeHookName(hookName: string): string {
|
||||
// Handle empty or whitespace-only strings
|
||||
if (!hookName || !hookName.trim()) {
|
||||
return 'unknown-command';
|
||||
}
|
||||
|
||||
// Split by spaces to get command parts
|
||||
const parts = hookName.trim().split(/\s+/);
|
||||
if (parts.length === 0) {
|
||||
return 'unknown-command';
|
||||
}
|
||||
|
||||
// Get the first part (the command)
|
||||
const command = parts[0];
|
||||
if (!command) {
|
||||
return 'unknown-command';
|
||||
}
|
||||
|
||||
// If it's a path, extract just the basename
|
||||
if (command.includes('/') || command.includes('\\')) {
|
||||
const pathParts = command.split(/[/\\]/);
|
||||
const basename = pathParts[pathParts.length - 1];
|
||||
return basename || 'unknown-command';
|
||||
}
|
||||
|
||||
return command;
|
||||
}
|
||||
|
|
@ -802,6 +802,53 @@ export class AuthEvent implements BaseTelemetryEvent {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook call telemetry event
|
||||
*/
|
||||
export class HookCallEvent implements BaseTelemetryEvent {
|
||||
'event.name': string;
|
||||
'event.timestamp': string;
|
||||
hook_event_name: string;
|
||||
hook_type: 'command';
|
||||
hook_name: string;
|
||||
hook_input: Record<string, unknown>;
|
||||
hook_output?: Record<string, unknown>;
|
||||
exit_code?: number;
|
||||
stdout?: string;
|
||||
stderr?: string;
|
||||
duration_ms: number;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
|
||||
constructor(
|
||||
hookEventName: string,
|
||||
hookType: 'command',
|
||||
hookName: string,
|
||||
hookInput: Record<string, unknown>,
|
||||
durationMs: number,
|
||||
success: boolean,
|
||||
hookOutput?: Record<string, unknown>,
|
||||
exitCode?: number,
|
||||
stdout?: string,
|
||||
stderr?: string,
|
||||
error?: string,
|
||||
) {
|
||||
this['event.name'] = 'hook_call';
|
||||
this['event.timestamp'] = new Date().toISOString();
|
||||
this.hook_event_name = hookEventName;
|
||||
this.hook_type = hookType;
|
||||
this.hook_name = hookName;
|
||||
this.hook_input = hookInput;
|
||||
this.hook_output = hookOutput;
|
||||
this.exit_code = exitCode;
|
||||
this.stdout = stdout;
|
||||
this.stderr = stderr;
|
||||
this.duration_ms = durationMs;
|
||||
this.success = success;
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
|
||||
export class SkillLaunchEvent implements BaseTelemetryEvent {
|
||||
'event.name': 'skill_launch';
|
||||
'event.timestamp': string;
|
||||
|
|
@ -877,6 +924,7 @@ export type TelemetryEvent =
|
|||
| ToolOutputTruncatedEvent
|
||||
| ModelSlashCommandEvent
|
||||
| AuthEvent
|
||||
| HookCallEvent
|
||||
| SkillLaunchEvent
|
||||
| UserFeedbackEvent
|
||||
| ArenaSessionStartedEvent
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
|||
import { AgentTool, type AgentParams } from './agent.js';
|
||||
import type { PartListUnion } from '@google/genai';
|
||||
import type { ToolResultDisplay, AgentResultDisplay } from './tools.js';
|
||||
import { ToolConfirmationOutcome } from './tools.js';
|
||||
import type { Config } from '../config/config.js';
|
||||
import { SubagentManager } from '../subagents/subagent-manager.js';
|
||||
import type { SubagentConfig } from '../subagents/types.js';
|
||||
|
|
@ -16,22 +17,32 @@ import {
|
|||
type AgentHeadless,
|
||||
ContextState,
|
||||
} from '../agents/runtime/agent-headless.js';
|
||||
import { AgentEventType } from '../agents/runtime/agent-events.js';
|
||||
import type {
|
||||
AgentToolCallEvent,
|
||||
AgentToolResultEvent,
|
||||
AgentApprovalRequestEvent,
|
||||
AgentEventEmitter,
|
||||
} from '../agents/runtime/agent-events.js';
|
||||
import { partToString } from '../utils/partUtils.js';
|
||||
import type { HookSystem } from '../hooks/hookSystem.js';
|
||||
import { PermissionMode } from '../hooks/types.js';
|
||||
|
||||
// Type for accessing protected methods in tests
|
||||
type AgentToolInvocation = {
|
||||
execute: (
|
||||
signal?: AbortSignal,
|
||||
updateOutput?: (output: ToolResultDisplay) => void,
|
||||
) => Promise<{
|
||||
llmContent: PartListUnion;
|
||||
returnDisplay: ToolResultDisplay;
|
||||
}>;
|
||||
getDescription: () => string;
|
||||
eventEmitter: AgentEventEmitter;
|
||||
};
|
||||
|
||||
type AgentToolWithProtectedMethods = AgentTool & {
|
||||
createInvocation: (params: AgentParams) => {
|
||||
execute: (
|
||||
signal?: AbortSignal,
|
||||
liveOutputCallback?: (chunk: string) => void,
|
||||
) => Promise<{
|
||||
llmContent: PartListUnion;
|
||||
returnDisplay: ToolResultDisplay;
|
||||
}>;
|
||||
getDescription: () => string;
|
||||
};
|
||||
createInvocation: (params: AgentParams) => AgentToolInvocation;
|
||||
};
|
||||
|
||||
// Mock dependencies
|
||||
|
|
@ -1001,4 +1012,295 @@ describe('AgentTool', () => {
|
|||
expect(startAgentId).toMatch(/^file-search-\d+$/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('IDE diff-tab confirmation clears pendingConfirmation', () => {
|
||||
let mockAgent: AgentHeadless;
|
||||
let mockContextState: ContextState;
|
||||
|
||||
// We capture the eventEmitter from the invocation so we can simulate
|
||||
// events during subagent execution.
|
||||
let capturedInvocation: AgentToolInvocation;
|
||||
|
||||
beforeEach(() => {
|
||||
mockContextState = {
|
||||
set: vi.fn(),
|
||||
} as unknown as ContextState;
|
||||
|
||||
MockedContextState.mockImplementation(() => mockContextState);
|
||||
|
||||
vi.mocked(mockSubagentManager.loadSubagent).mockResolvedValue(
|
||||
mockSubagents[0],
|
||||
);
|
||||
});
|
||||
|
||||
function createInvocationWithEventDrivenAgent(
|
||||
emitDuringExecute: (emitter: AgentEventEmitter) => void,
|
||||
) {
|
||||
// Create a mock agent whose execute() emits events on the invocation's
|
||||
// eventEmitter, simulating a real subagent lifecycle.
|
||||
mockAgent = {
|
||||
execute: vi.fn(),
|
||||
result: 'Done',
|
||||
terminateMode: AgentTerminateMode.GOAL,
|
||||
getFinalText: vi.fn().mockReturnValue('Done'),
|
||||
formatCompactResult: vi.fn().mockReturnValue('✅ Success'),
|
||||
getExecutionSummary: vi.fn().mockReturnValue({
|
||||
rounds: 1,
|
||||
totalDurationMs: 100,
|
||||
totalToolCalls: 1,
|
||||
successfulToolCalls: 1,
|
||||
failedToolCalls: 0,
|
||||
successRate: 100,
|
||||
inputTokens: 10,
|
||||
outputTokens: 5,
|
||||
totalTokens: 15,
|
||||
toolUsage: [],
|
||||
}),
|
||||
getStatistics: vi.fn().mockReturnValue({
|
||||
rounds: 1,
|
||||
totalDurationMs: 100,
|
||||
totalToolCalls: 1,
|
||||
successfulToolCalls: 1,
|
||||
failedToolCalls: 0,
|
||||
}),
|
||||
getTerminateMode: vi.fn().mockReturnValue(AgentTerminateMode.GOAL),
|
||||
} as unknown as AgentHeadless;
|
||||
|
||||
vi.mocked(mockAgent.execute).mockImplementation(async () => {
|
||||
emitDuringExecute(capturedInvocation.eventEmitter);
|
||||
});
|
||||
|
||||
vi.mocked(mockSubagentManager.createAgentHeadless).mockResolvedValue(
|
||||
mockAgent,
|
||||
);
|
||||
|
||||
const params: AgentParams = {
|
||||
description: 'Edit files',
|
||||
prompt: 'Fix the bug',
|
||||
subagent_type: 'file-search',
|
||||
};
|
||||
|
||||
capturedInvocation = (
|
||||
agentTool as AgentToolWithProtectedMethods
|
||||
).createInvocation(params);
|
||||
|
||||
return capturedInvocation;
|
||||
}
|
||||
|
||||
it('should clear pendingConfirmation when TOOL_RESULT arrives for the pending tool (IDE accept path)', async () => {
|
||||
// Track whether pendingConfirmation was set then cleared, using
|
||||
// snapshots that safely handle function properties (structuredClone
|
||||
// can't serialize functions).
|
||||
const snapshots: Array<{
|
||||
hasPendingConfirmation: boolean;
|
||||
toolStatuses: Array<{ callId: string; status: string }>;
|
||||
}> = [];
|
||||
|
||||
const invocation = createInvocationWithEventDrivenAgent((emitter) => {
|
||||
emitter.emit(AgentEventType.TOOL_CALL, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-edit-1',
|
||||
name: 'edit_file',
|
||||
args: { path: '/test.ts' },
|
||||
description: 'Editing test.ts',
|
||||
timestamp: Date.now(),
|
||||
} satisfies AgentToolCallEvent);
|
||||
|
||||
// Tool needs approval → pendingConfirmation is set
|
||||
emitter.emit(AgentEventType.TOOL_WAITING_APPROVAL, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-edit-1',
|
||||
name: 'edit_file',
|
||||
description: 'Editing test.ts',
|
||||
timestamp: Date.now(),
|
||||
confirmationDetails: {
|
||||
type: 'edit' as const,
|
||||
title: 'Edit file',
|
||||
fileName: 'test.ts',
|
||||
filePath: '/test.ts',
|
||||
fileDiff: '',
|
||||
originalContent: 'old',
|
||||
newContent: 'new',
|
||||
},
|
||||
respond: vi.fn(),
|
||||
} as unknown as AgentApprovalRequestEvent);
|
||||
|
||||
// IDE diff-tab accepted → TOOL_RESULT arrives without onConfirm
|
||||
emitter.emit(AgentEventType.TOOL_RESULT, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-edit-1',
|
||||
name: 'edit_file',
|
||||
success: true,
|
||||
timestamp: Date.now(),
|
||||
} satisfies AgentToolResultEvent);
|
||||
});
|
||||
|
||||
await invocation.execute(undefined, (output) => {
|
||||
const display = output as AgentResultDisplay;
|
||||
snapshots.push({
|
||||
hasPendingConfirmation: display.pendingConfirmation !== undefined,
|
||||
toolStatuses: (display.toolCalls ?? []).map((tc) => ({
|
||||
callId: tc.callId,
|
||||
status: tc.status,
|
||||
})),
|
||||
});
|
||||
});
|
||||
|
||||
// Should have at least one snapshot with pendingConfirmation set
|
||||
const hasApproval = snapshots.some((s) => s.hasPendingConfirmation);
|
||||
expect(hasApproval).toBe(true);
|
||||
|
||||
// The final snapshot after TOOL_RESULT should have cleared it
|
||||
const resultSnapshot = snapshots.find(
|
||||
(s) =>
|
||||
!s.hasPendingConfirmation &&
|
||||
s.toolStatuses.some(
|
||||
(tc) => tc.callId === 'call-edit-1' && tc.status === 'success',
|
||||
),
|
||||
);
|
||||
expect(resultSnapshot).toBeDefined();
|
||||
});
|
||||
|
||||
it('should NOT clear pendingConfirmation when TOOL_RESULT is for a different tool', async () => {
|
||||
const snapshots: Array<{
|
||||
hasPendingConfirmation: boolean;
|
||||
toolStatuses: Array<{ callId: string; status: string }>;
|
||||
}> = [];
|
||||
|
||||
const invocation = createInvocationWithEventDrivenAgent((emitter) => {
|
||||
// Tool A starts
|
||||
emitter.emit(AgentEventType.TOOL_CALL, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-read-1',
|
||||
name: 'read_file',
|
||||
args: {},
|
||||
description: 'Reading',
|
||||
timestamp: Date.now(),
|
||||
} satisfies AgentToolCallEvent);
|
||||
|
||||
// Tool B starts
|
||||
emitter.emit(AgentEventType.TOOL_CALL, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-edit-1',
|
||||
name: 'edit_file',
|
||||
args: {},
|
||||
description: 'Editing',
|
||||
timestamp: Date.now(),
|
||||
} satisfies AgentToolCallEvent);
|
||||
|
||||
// Tool B needs approval
|
||||
emitter.emit(AgentEventType.TOOL_WAITING_APPROVAL, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-edit-1',
|
||||
name: 'edit_file',
|
||||
description: 'Editing',
|
||||
timestamp: Date.now(),
|
||||
confirmationDetails: {
|
||||
type: 'edit' as const,
|
||||
title: 'Edit',
|
||||
fileName: 'test.ts',
|
||||
filePath: '/test.ts',
|
||||
fileDiff: '',
|
||||
originalContent: '',
|
||||
newContent: 'new',
|
||||
},
|
||||
respond: vi.fn(),
|
||||
} as unknown as AgentApprovalRequestEvent);
|
||||
|
||||
// Tool A finishes (different callId)
|
||||
emitter.emit(AgentEventType.TOOL_RESULT, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-read-1',
|
||||
name: 'read_file',
|
||||
success: true,
|
||||
timestamp: Date.now(),
|
||||
} satisfies AgentToolResultEvent);
|
||||
});
|
||||
|
||||
await invocation.execute(undefined, (output) => {
|
||||
const display = output as AgentResultDisplay;
|
||||
snapshots.push({
|
||||
hasPendingConfirmation: display.pendingConfirmation !== undefined,
|
||||
toolStatuses: (display.toolCalls ?? []).map((tc) => ({
|
||||
callId: tc.callId,
|
||||
status: tc.status,
|
||||
})),
|
||||
});
|
||||
});
|
||||
|
||||
// The snapshot for read_file's TOOL_RESULT should still have
|
||||
// pendingConfirmation because the result was for a different tool.
|
||||
const readResultSnapshot = snapshots.find((s) =>
|
||||
s.toolStatuses.some(
|
||||
(tc) => tc.callId === 'call-read-1' && tc.status === 'success',
|
||||
),
|
||||
);
|
||||
expect(readResultSnapshot).toBeDefined();
|
||||
expect(readResultSnapshot!.hasPendingConfirmation).toBe(true);
|
||||
});
|
||||
|
||||
it('should clear pendingConfirmation via onConfirm callback (terminal UI path)', async () => {
|
||||
let capturedOnConfirm:
|
||||
| ((outcome: ToolConfirmationOutcome) => Promise<void>)
|
||||
| undefined;
|
||||
const snapshots: Array<{ hasPendingConfirmation: boolean }> = [];
|
||||
|
||||
const invocation = createInvocationWithEventDrivenAgent((emitter) => {
|
||||
emitter.emit(AgentEventType.TOOL_CALL, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-edit-1',
|
||||
name: 'edit_file',
|
||||
args: {},
|
||||
description: 'Editing',
|
||||
timestamp: Date.now(),
|
||||
} satisfies AgentToolCallEvent);
|
||||
|
||||
emitter.emit(AgentEventType.TOOL_WAITING_APPROVAL, {
|
||||
subagentId: 'sub-1',
|
||||
round: 1,
|
||||
callId: 'call-edit-1',
|
||||
name: 'edit_file',
|
||||
description: 'Editing',
|
||||
timestamp: Date.now(),
|
||||
confirmationDetails: {
|
||||
type: 'edit' as const,
|
||||
title: 'Edit',
|
||||
fileName: 'test.ts',
|
||||
filePath: '/test.ts',
|
||||
fileDiff: '',
|
||||
originalContent: '',
|
||||
newContent: 'new',
|
||||
},
|
||||
respond: vi.fn(),
|
||||
} as unknown as AgentApprovalRequestEvent);
|
||||
});
|
||||
|
||||
await invocation.execute(undefined, (output) => {
|
||||
const display = output as AgentResultDisplay;
|
||||
snapshots.push({
|
||||
hasPendingConfirmation: display.pendingConfirmation !== undefined,
|
||||
});
|
||||
if (display.pendingConfirmation?.onConfirm) {
|
||||
capturedOnConfirm = display.pendingConfirmation.onConfirm;
|
||||
}
|
||||
});
|
||||
|
||||
expect(capturedOnConfirm).toBeDefined();
|
||||
|
||||
// Call onConfirm as if the user pressed "accept" in the terminal UI
|
||||
snapshots.length = 0;
|
||||
await capturedOnConfirm!(ToolConfirmationOutcome.ProceedOnce);
|
||||
|
||||
// The onConfirm callback should have cleared pendingConfirmation
|
||||
expect(snapshots.some((s) => !s.hasPendingConfirmation)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -308,6 +308,8 @@ class AgentToolInvocation extends BaseToolInvocation<AgentParams, ToolResult> {
|
|||
private setupEventListeners(
|
||||
updateOutput?: (output: ToolResultDisplay) => void,
|
||||
): void {
|
||||
let pendingConfirmationCallId: string | undefined;
|
||||
|
||||
this.eventEmitter.on(AgentEventType.START, () => {
|
||||
this.updateDisplay({ status: 'running' }, updateOutput);
|
||||
});
|
||||
|
|
@ -344,9 +346,22 @@ class AgentToolInvocation extends BaseToolInvocation<AgentParams, ToolResult> {
|
|||
responseParts: event.responseParts,
|
||||
};
|
||||
|
||||
// When a tool result arrives for the tool that had a pending
|
||||
// confirmation, clear the stale prompt. This handles the case where
|
||||
// the IDE diff-tab accept resolved the tool via CoreToolScheduler's
|
||||
// ideConfirmation.then path, which bypasses the UI's onConfirm wrapper.
|
||||
const clearPending =
|
||||
pendingConfirmationCallId === event.callId
|
||||
? { pendingConfirmation: undefined }
|
||||
: {};
|
||||
if (pendingConfirmationCallId === event.callId) {
|
||||
pendingConfirmationCallId = undefined;
|
||||
}
|
||||
|
||||
this.updateDisplay(
|
||||
{
|
||||
toolCalls: [...this.currentToolCalls!],
|
||||
...clearPending,
|
||||
},
|
||||
updateOutput,
|
||||
);
|
||||
|
|
@ -398,6 +413,7 @@ class AgentToolInvocation extends BaseToolInvocation<AgentParams, ToolResult> {
|
|||
}
|
||||
|
||||
// Bridge scheduler confirmation details to UI inline prompt
|
||||
pendingConfirmationCallId = event.callId;
|
||||
const details: ToolCallConfirmationDetails = {
|
||||
...(event.confirmationDetails as Omit<
|
||||
ToolCallConfirmationDetails,
|
||||
|
|
@ -409,6 +425,7 @@ class AgentToolInvocation extends BaseToolInvocation<AgentParams, ToolResult> {
|
|||
) => {
|
||||
// Clear the inline prompt immediately
|
||||
// and optimistically mark the tool as executing for proceed outcomes.
|
||||
pendingConfirmationCallId = undefined;
|
||||
const proceedOutcomes = new Set<ToolConfirmationOutcome>([
|
||||
ToolConfirmationOutcome.ProceedOnce,
|
||||
ToolConfirmationOutcome.ProceedAlways,
|
||||
|
|
|
|||
|
|
@ -366,6 +366,87 @@ describe('GlobTool', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('multi-directory workspace', () => {
|
||||
it('should search across all workspace directories when no path is specified', async () => {
|
||||
// Create a second workspace directory
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'glob-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(path.join(secondDir, '.git'), ''); // Fake git repo
|
||||
await fs.writeFile(path.join(secondDir, 'extra.txt'), 'extra content');
|
||||
await fs.writeFile(path.join(secondDir, 'bonus.txt'), 'bonus content');
|
||||
|
||||
const multiDirConfig = {
|
||||
...mockConfig,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGlobTool = new GlobTool(multiDirConfig);
|
||||
const params: GlobToolParams = { pattern: '*.txt' };
|
||||
const invocation = multiDirGlobTool.build(params);
|
||||
const result = await invocation.execute(abortSignal);
|
||||
|
||||
// Should find files from both directories
|
||||
expect(result.llmContent).toContain(path.join(tempRootDir, 'fileA.txt'));
|
||||
expect(result.llmContent).toContain(path.join(secondDir, 'extra.txt'));
|
||||
expect(result.llmContent).toContain(path.join(secondDir, 'bonus.txt'));
|
||||
expect(result.llmContent).toContain('across 2 workspace directories');
|
||||
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should deduplicate entries across overlapping directories', async () => {
|
||||
// Use the same directory twice to test deduplication
|
||||
const multiDirConfig = {
|
||||
...mockConfig,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [tempRootDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGlobTool = new GlobTool(multiDirConfig);
|
||||
const params: GlobToolParams = { pattern: '*.txt' };
|
||||
const invocation = multiDirGlobTool.build(params);
|
||||
const result = await invocation.execute(abortSignal);
|
||||
|
||||
// Should still only have 2 txt files (fileA.txt, FileB.TXT), not doubled
|
||||
expect(result.llmContent).toContain('Found 2 file(s)');
|
||||
});
|
||||
|
||||
it('should use single directory description when only one workspace dir', async () => {
|
||||
const params: GlobToolParams = { pattern: '*.txt' };
|
||||
const invocation = globTool.build(params);
|
||||
const result = await invocation.execute(abortSignal);
|
||||
|
||||
expect(result.llmContent).toContain('in the workspace directory');
|
||||
expect(result.llmContent).not.toContain('across');
|
||||
});
|
||||
|
||||
it('should search only the specified path when path is provided (ignoring multi-dir)', async () => {
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'glob-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(path.join(secondDir, '.git'), '');
|
||||
await fs.writeFile(path.join(secondDir, 'other.txt'), 'other');
|
||||
|
||||
const multiDirConfig = {
|
||||
...mockConfig,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGlobTool = new GlobTool(multiDirConfig);
|
||||
const params: GlobToolParams = { pattern: '*.txt', path: 'sub' };
|
||||
const invocation = multiDirGlobTool.build(params);
|
||||
const result = await invocation.execute(abortSignal);
|
||||
|
||||
// Should NOT find files from secondDir
|
||||
expect(result.llmContent).not.toContain('other.txt');
|
||||
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('ignore file handling', () => {
|
||||
it('should respect .gitignore files by default', async () => {
|
||||
await fs.writeFile(path.join(tempRootDir, '.gitignore'), '*.ignored.txt');
|
||||
|
|
|
|||
|
|
@ -119,61 +119,104 @@ class GlobToolInvocation extends BaseToolInvocation<
|
|||
return 'ask';
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs glob search in a single directory and returns filtered entries.
|
||||
*/
|
||||
private async globInDirectory(
|
||||
searchDir: string,
|
||||
pattern: string,
|
||||
signal: AbortSignal,
|
||||
): Promise<GlobPath[]> {
|
||||
let effectivePattern = pattern;
|
||||
const fullPath = path.join(searchDir, effectivePattern);
|
||||
if (fs.existsSync(fullPath)) {
|
||||
effectivePattern = escape(effectivePattern);
|
||||
}
|
||||
|
||||
const entries = (await glob(effectivePattern, {
|
||||
cwd: searchDir,
|
||||
withFileTypes: true,
|
||||
nodir: true,
|
||||
stat: true,
|
||||
nocase: true,
|
||||
dot: true,
|
||||
follow: false,
|
||||
signal,
|
||||
})) as GlobPath[];
|
||||
|
||||
// Filter using paths relative to the search directory so that
|
||||
// .gitignore / .qwenignore patterns match correctly regardless of
|
||||
// which workspace directory the file belongs to.
|
||||
const relativePaths = entries.map((p) =>
|
||||
path.relative(searchDir, p.fullpath()),
|
||||
);
|
||||
|
||||
const { filteredPaths } = this.fileService.filterFilesWithReport(
|
||||
relativePaths,
|
||||
this.getFileFilteringOptions(),
|
||||
);
|
||||
|
||||
const normalizePathForComparison = (p: string) =>
|
||||
process.platform === 'win32' || process.platform === 'darwin'
|
||||
? p.toLowerCase()
|
||||
: p;
|
||||
|
||||
const filteredAbsolutePaths = new Set(
|
||||
filteredPaths.map((p) =>
|
||||
normalizePathForComparison(path.resolve(searchDir, p)),
|
||||
),
|
||||
);
|
||||
|
||||
return entries.filter((entry) =>
|
||||
filteredAbsolutePaths.has(normalizePathForComparison(entry.fullpath())),
|
||||
);
|
||||
}
|
||||
|
||||
async execute(signal: AbortSignal): Promise<ToolResult> {
|
||||
try {
|
||||
// Default to target directory if no path is provided
|
||||
const searchDirAbs = resolveAndValidatePath(
|
||||
this.config,
|
||||
this.params.path,
|
||||
{ allowExternalPaths: true },
|
||||
);
|
||||
const searchLocationDescription = this.params.path
|
||||
? `within ${searchDirAbs}`
|
||||
: `in the workspace directory`;
|
||||
// Determine which directories to search
|
||||
const searchDirs: string[] = [];
|
||||
let searchLocationDescription: string;
|
||||
|
||||
// Collect entries from the search directory
|
||||
let pattern = this.params.pattern;
|
||||
const fullPath = path.join(searchDirAbs, pattern);
|
||||
if (fs.existsSync(fullPath)) {
|
||||
pattern = escape(pattern);
|
||||
if (this.params.path) {
|
||||
// User specified a path — search only that directory
|
||||
const searchDirAbs = resolveAndValidatePath(
|
||||
this.config,
|
||||
this.params.path,
|
||||
{ allowExternalPaths: true },
|
||||
);
|
||||
searchDirs.push(searchDirAbs);
|
||||
searchLocationDescription = `within ${searchDirAbs}`;
|
||||
} else {
|
||||
// No path specified — search all workspace directories
|
||||
const workspaceDirs = this.config
|
||||
.getWorkspaceContext()
|
||||
.getDirectories();
|
||||
searchDirs.push(...workspaceDirs);
|
||||
searchLocationDescription =
|
||||
workspaceDirs.length > 1
|
||||
? `across ${workspaceDirs.length} workspace directories`
|
||||
: `in the workspace directory`;
|
||||
}
|
||||
|
||||
const allEntries = (await glob(pattern, {
|
||||
cwd: searchDirAbs,
|
||||
withFileTypes: true,
|
||||
nodir: true,
|
||||
stat: true,
|
||||
nocase: true,
|
||||
dot: true,
|
||||
follow: false,
|
||||
signal,
|
||||
})) as GlobPath[];
|
||||
// Collect entries from all search directories
|
||||
const pattern = this.params.pattern;
|
||||
const allFilteredEntries: GlobPath[] = [];
|
||||
const seenPaths = new Set<string>();
|
||||
|
||||
const relativePaths = allEntries.map((p) =>
|
||||
path.relative(this.config.getTargetDir(), p.fullpath()),
|
||||
);
|
||||
for (const searchDir of searchDirs) {
|
||||
const entries = await this.globInDirectory(searchDir, pattern, signal);
|
||||
for (const entry of entries) {
|
||||
// Deduplicate entries that might appear in overlapping directories
|
||||
const normalized = entry.fullpath();
|
||||
if (!seenPaths.has(normalized)) {
|
||||
seenPaths.add(normalized);
|
||||
allFilteredEntries.push(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { filteredPaths } = this.fileService.filterFilesWithReport(
|
||||
relativePaths,
|
||||
this.getFileFilteringOptions(),
|
||||
);
|
||||
|
||||
const normalizePathForComparison = (p: string) =>
|
||||
process.platform === 'win32' || process.platform === 'darwin'
|
||||
? p.toLowerCase()
|
||||
: p;
|
||||
|
||||
const filteredAbsolutePaths = new Set(
|
||||
filteredPaths.map((p) =>
|
||||
normalizePathForComparison(
|
||||
path.resolve(this.config.getTargetDir(), p),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
const filteredEntries = allEntries.filter((entry) =>
|
||||
filteredAbsolutePaths.has(normalizePathForComparison(entry.fullpath())),
|
||||
);
|
||||
const filteredEntries = allFilteredEntries;
|
||||
|
||||
if (!filteredEntries || filteredEntries.length === 0) {
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -357,6 +357,48 @@ describe('GrepTool', () => {
|
|||
// Clean up
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should convert relative paths to absolute when searching multiple directories', async () => {
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'extra.txt'),
|
||||
'world content in second dir',
|
||||
);
|
||||
|
||||
const multiDirConfig = {
|
||||
getTargetDir: () => tempRootDir,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
getFileExclusions: () => ({
|
||||
getGlobExcludes: () => [],
|
||||
}),
|
||||
getTruncateToolOutputThreshold: () => 25000,
|
||||
getTruncateToolOutputLines: () => 1000,
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new GrepTool(multiDirConfig);
|
||||
|
||||
const params: GrepToolParams = { pattern: 'world' };
|
||||
const invocation = multiDirGrepTool.build(params);
|
||||
const result = await invocation.execute(abortSignal);
|
||||
|
||||
// Should show "across N workspace directories"
|
||||
expect(result.llmContent).toContain('across 2 workspace directories');
|
||||
|
||||
// File paths from the second directory should be absolute
|
||||
expect(result.llmContent).toContain(
|
||||
`File: ${path.resolve(secondDir, 'extra.txt')}`,
|
||||
);
|
||||
|
||||
// File paths from the first directory should also be absolute
|
||||
expect(result.llmContent).toContain(
|
||||
`File: ${path.resolve(tempRootDir, 'fileA.txt')}`,
|
||||
);
|
||||
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDescription', () => {
|
||||
|
|
|
|||
|
|
@ -95,26 +95,52 @@ class GrepToolInvocation extends BaseToolInvocation<
|
|||
|
||||
async execute(signal: AbortSignal): Promise<ToolResult> {
|
||||
try {
|
||||
// Default to target directory if no path is provided
|
||||
const searchDirAbs = resolveAndValidatePath(
|
||||
this.config,
|
||||
this.params.path,
|
||||
{ allowExternalPaths: true },
|
||||
);
|
||||
const searchDirDisplay = this.params.path || '.';
|
||||
// Determine which directories to search
|
||||
const searchDirs: string[] = [];
|
||||
let searchLocationDescription: string;
|
||||
|
||||
// Perform grep search
|
||||
const rawMatches = await this.performGrepSearch({
|
||||
pattern: this.params.pattern,
|
||||
path: searchDirAbs,
|
||||
glob: this.params.glob,
|
||||
signal,
|
||||
});
|
||||
if (this.params.path) {
|
||||
// User specified a path — search only that directory
|
||||
const searchDirAbs = resolveAndValidatePath(
|
||||
this.config,
|
||||
this.params.path,
|
||||
{ allowExternalPaths: true },
|
||||
);
|
||||
searchDirs.push(searchDirAbs);
|
||||
searchLocationDescription = `in path "${this.params.path}"`;
|
||||
} else {
|
||||
// No path specified — search all workspace directories
|
||||
const workspaceDirs = this.config
|
||||
.getWorkspaceContext()
|
||||
.getDirectories();
|
||||
searchDirs.push(...workspaceDirs);
|
||||
searchLocationDescription =
|
||||
workspaceDirs.length > 1
|
||||
? `across ${workspaceDirs.length} workspace directories`
|
||||
: `in the workspace directory`;
|
||||
}
|
||||
|
||||
// Build search description
|
||||
const searchLocationDescription = this.params.path
|
||||
? `in path "${searchDirDisplay}"`
|
||||
: `in the workspace directory`;
|
||||
// Perform grep search across all directories
|
||||
const rawMatches: GrepMatch[] = [];
|
||||
for (const searchDir of searchDirs) {
|
||||
const matches = await this.performGrepSearch({
|
||||
pattern: this.params.pattern,
|
||||
path: searchDir,
|
||||
glob: this.params.glob,
|
||||
signal,
|
||||
});
|
||||
// When searching multiple directories, convert relative file paths
|
||||
// to absolute paths so results from different directories are
|
||||
// unambiguous.
|
||||
if (searchDirs.length > 1) {
|
||||
for (const match of matches) {
|
||||
if (!path.isAbsolute(match.filePath)) {
|
||||
match.filePath = path.resolve(searchDir, match.filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
rawMatches.push(...matches);
|
||||
}
|
||||
|
||||
const filterDescription = this.params.glob
|
||||
? ` (filter: "${this.params.glob}")`
|
||||
|
|
|
|||
|
|
@ -736,7 +736,7 @@ describe('DiscoveredMCPTool', () => {
|
|||
});
|
||||
|
||||
describe('getDefaultPermission and getConfirmationDetails', () => {
|
||||
it('should return ask even if trust is true and folder is trusted (trust logic moved to PM)', async () => {
|
||||
it('should return allow when trust is true', async () => {
|
||||
const trustedTool = new DiscoveredMCPTool(
|
||||
mockCallableToolInstance,
|
||||
serverName,
|
||||
|
|
@ -748,7 +748,7 @@ describe('DiscoveredMCPTool', () => {
|
|||
{ isTrustedFolder: () => true } as any,
|
||||
);
|
||||
const invocation = trustedTool.build({ param: 'mock' });
|
||||
expect(await invocation.getDefaultPermission()).toBe('ask');
|
||||
expect(await invocation.getDefaultPermission()).toBe('allow');
|
||||
});
|
||||
|
||||
it('should return ask if not trusted', async () => {
|
||||
|
|
@ -808,7 +808,7 @@ describe('DiscoveredMCPTool', () => {
|
|||
isTrustedFolder: () => isTrusted,
|
||||
});
|
||||
|
||||
it('should return ask even if trust is true and folder is trusted (trust logic moved to PM)', async () => {
|
||||
it('should return allow when trust is true and folder is trusted', async () => {
|
||||
const trustedTool = new DiscoveredMCPTool(
|
||||
mockCallableToolInstance,
|
||||
serverName,
|
||||
|
|
@ -820,7 +820,7 @@ describe('DiscoveredMCPTool', () => {
|
|||
mockConfig(true) as any, // isTrustedFolder = true
|
||||
);
|
||||
const invocation = trustedTool.build({ param: 'mock' });
|
||||
expect(await invocation.getDefaultPermission()).toBe('ask');
|
||||
expect(await invocation.getDefaultPermission()).toBe('allow');
|
||||
});
|
||||
|
||||
it('should return ask if trust is true but folder is not trusted', async () => {
|
||||
|
|
|
|||
|
|
@ -124,14 +124,17 @@ class DiscoveredMCPToolInvocation extends BaseToolInvocation<
|
|||
}
|
||||
|
||||
/**
|
||||
* MCP tool default permission based on annotations:
|
||||
* MCP tool default permission based on trust and annotations:
|
||||
* - trust: true in a trusted folder → 'allow' (server explicitly trusted by user config)
|
||||
* - readOnlyHint → 'allow'
|
||||
* - All other MCP tools → 'ask'
|
||||
*
|
||||
* Note: trust/isTrustedFolder logic is now handled by PM rules,
|
||||
* not by getDefaultPermission().
|
||||
*/
|
||||
override async getDefaultPermission(): Promise<PermissionDecision> {
|
||||
// MCP servers explicitly marked as trusted bypass confirmation,
|
||||
// but only when the workspace folder is also trusted (security gate).
|
||||
if (this.trust === true && this.cliConfig?.isTrustedFolder()) {
|
||||
return 'allow';
|
||||
}
|
||||
// MCP tools annotated with readOnlyHint: true are safe
|
||||
if (this.annotations?.readOnlyHint === true) {
|
||||
return 'allow';
|
||||
|
|
|
|||
|
|
@ -436,6 +436,116 @@ describe('RipGrepTool', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('multi-directory workspace', () => {
|
||||
it('should search across all workspace directories when no path is specified', async () => {
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(secondDir, 'extra.txt'),
|
||||
'hello from second dir',
|
||||
);
|
||||
|
||||
const multiDirConfig = {
|
||||
...mockConfig,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new RipGrepTool(multiDirConfig);
|
||||
|
||||
(runRipgrep as Mock).mockResolvedValue({
|
||||
stdout: `fileA.txt:1:hello world${EOL}${secondDir}/extra.txt:1:hello from second dir${EOL}`,
|
||||
truncated: false,
|
||||
error: undefined,
|
||||
});
|
||||
|
||||
const params: RipGrepToolParams = { pattern: 'hello' };
|
||||
const invocation = multiDirGrepTool.build(params);
|
||||
const result = await invocation.execute(abortSignal);
|
||||
|
||||
expect(result.llmContent).toContain('across 2 workspace directories');
|
||||
expect(result.llmContent).toContain('Found 2 matches');
|
||||
|
||||
// Verify both paths were passed to runRipgrep
|
||||
expect(runRipgrep).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([tempRootDir, secondDir]),
|
||||
expect.anything(),
|
||||
);
|
||||
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should search only specified path when path is given (ignoring multi-dir)', async () => {
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(path.join(secondDir, 'other.txt'), 'other content');
|
||||
|
||||
const multiDirConfig = {
|
||||
...mockConfig,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new RipGrepTool(multiDirConfig);
|
||||
|
||||
(runRipgrep as Mock).mockResolvedValue({
|
||||
stdout: `fileC.txt:1:another world in sub dir${EOL}`,
|
||||
truncated: false,
|
||||
error: undefined,
|
||||
});
|
||||
|
||||
const params: RipGrepToolParams = { pattern: 'world', path: 'sub' };
|
||||
const invocation = multiDirGrepTool.build(params);
|
||||
const result = await invocation.execute(abortSignal);
|
||||
|
||||
expect(result.llmContent).toContain('in path "sub"');
|
||||
expect(result.llmContent).not.toContain('across');
|
||||
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should load .qwenignore from each workspace directory', async () => {
|
||||
const secondDir = await fs.mkdtemp(
|
||||
path.join(os.tmpdir(), 'grep-tool-second-'),
|
||||
);
|
||||
await fs.writeFile(path.join(secondDir, '.qwenignore'), 'ignored.txt\n');
|
||||
await fs.writeFile(
|
||||
path.join(tempRootDir, '.qwenignore'),
|
||||
'other-ignored.txt\n',
|
||||
);
|
||||
|
||||
const multiDirConfig = {
|
||||
...mockConfig,
|
||||
getWorkspaceContext: () =>
|
||||
createMockWorkspaceContext(tempRootDir, [secondDir]),
|
||||
} as unknown as Config;
|
||||
|
||||
const multiDirGrepTool = new RipGrepTool(multiDirConfig);
|
||||
|
||||
(runRipgrep as Mock).mockResolvedValue({
|
||||
stdout: '',
|
||||
truncated: false,
|
||||
error: undefined,
|
||||
});
|
||||
|
||||
const params: RipGrepToolParams = { pattern: 'test' };
|
||||
const invocation = multiDirGrepTool.build(params);
|
||||
await invocation.execute(abortSignal);
|
||||
|
||||
// Verify both .qwenignore files were passed
|
||||
const rgArgs = (runRipgrep as Mock).mock.calls[0][0] as string[];
|
||||
const ignoreFileArgs = rgArgs.filter(
|
||||
(a: string, i: number) => i > 0 && rgArgs[i - 1] === '--ignore-file',
|
||||
);
|
||||
expect(ignoreFileArgs).toContain(path.join(tempRootDir, '.qwenignore'));
|
||||
expect(ignoreFileArgs).toContain(path.join(secondDir, '.qwenignore'));
|
||||
|
||||
await fs.rm(secondDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('abort signal handling', () => {
|
||||
it('should handle AbortSignal during search', async () => {
|
||||
const controller = new AbortController();
|
||||
|
|
|
|||
|
|
@ -58,17 +58,32 @@ class GrepToolInvocation extends BaseToolInvocation<
|
|||
|
||||
async execute(signal: AbortSignal): Promise<ToolResult> {
|
||||
try {
|
||||
const searchDirAbs = resolveAndValidatePath(
|
||||
this.config,
|
||||
this.params.path,
|
||||
{ allowFiles: true },
|
||||
);
|
||||
const searchDirDisplay = this.params.path || '.';
|
||||
// Determine which paths to search
|
||||
const searchPaths: string[] = [];
|
||||
let searchDirDisplay: string;
|
||||
|
||||
if (this.params.path) {
|
||||
// User specified a path — search only that path
|
||||
const searchDirAbs = resolveAndValidatePath(
|
||||
this.config,
|
||||
this.params.path,
|
||||
{ allowFiles: true },
|
||||
);
|
||||
searchPaths.push(searchDirAbs);
|
||||
searchDirDisplay = this.params.path;
|
||||
} else {
|
||||
// No path specified — search all workspace directories
|
||||
const workspaceDirs = this.config
|
||||
.getWorkspaceContext()
|
||||
.getDirectories();
|
||||
searchPaths.push(...workspaceDirs);
|
||||
searchDirDisplay = '.';
|
||||
}
|
||||
|
||||
// Get raw ripgrep output
|
||||
const rawOutput = await this.performRipgrepSearch({
|
||||
pattern: this.params.pattern,
|
||||
path: searchDirAbs,
|
||||
paths: searchPaths,
|
||||
glob: this.params.glob,
|
||||
signal,
|
||||
});
|
||||
|
|
@ -76,7 +91,9 @@ class GrepToolInvocation extends BaseToolInvocation<
|
|||
// Build search description
|
||||
const searchLocationDescription = this.params.path
|
||||
? `in path "${searchDirDisplay}"`
|
||||
: `in the workspace directory`;
|
||||
: searchPaths.length > 1
|
||||
? `across ${searchPaths.length} workspace directories`
|
||||
: `in the workspace directory`;
|
||||
|
||||
const filterDescription = this.params.glob
|
||||
? ` (filter: "${this.params.glob}")`
|
||||
|
|
@ -171,11 +188,11 @@ class GrepToolInvocation extends BaseToolInvocation<
|
|||
|
||||
private async performRipgrepSearch(options: {
|
||||
pattern: string;
|
||||
path: string; // Can be a file or directory
|
||||
paths: string[]; // Can be files or directories
|
||||
glob?: string;
|
||||
signal: AbortSignal;
|
||||
}): Promise<string> {
|
||||
const { pattern, path: absolutePath, glob } = options;
|
||||
const { pattern, paths, glob } = options;
|
||||
|
||||
const rgArgs: string[] = [
|
||||
'--line-number',
|
||||
|
|
@ -193,12 +210,21 @@ class GrepToolInvocation extends BaseToolInvocation<
|
|||
}
|
||||
|
||||
if (filteringOptions.respectQwenIgnore) {
|
||||
const qwenIgnorePath = path.join(
|
||||
this.config.getTargetDir(),
|
||||
'.qwenignore',
|
||||
);
|
||||
if (fs.existsSync(qwenIgnorePath)) {
|
||||
rgArgs.push('--ignore-file', qwenIgnorePath);
|
||||
// Load .qwenignore from each workspace directory, not just the primary one
|
||||
const seenIgnoreFiles = new Set<string>();
|
||||
for (const searchPath of paths) {
|
||||
const dir =
|
||||
fs.existsSync(searchPath) && fs.statSync(searchPath).isDirectory()
|
||||
? searchPath
|
||||
: path.dirname(searchPath);
|
||||
const qwenIgnorePath = path.join(dir, '.qwenignore');
|
||||
if (
|
||||
!seenIgnoreFiles.has(qwenIgnorePath) &&
|
||||
fs.existsSync(qwenIgnorePath)
|
||||
) {
|
||||
rgArgs.push('--ignore-file', qwenIgnorePath);
|
||||
seenIgnoreFiles.add(qwenIgnorePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -208,7 +234,8 @@ class GrepToolInvocation extends BaseToolInvocation<
|
|||
}
|
||||
|
||||
rgArgs.push('--threads', '4');
|
||||
rgArgs.push(absolutePath);
|
||||
// Pass all search paths to ripgrep (it supports multiple paths natively)
|
||||
rgArgs.push(...paths);
|
||||
|
||||
const result = await runRipgrep(rgArgs, options.signal);
|
||||
if (result.error && !result.stdout) {
|
||||
|
|
|
|||
|
|
@ -556,12 +556,20 @@ describe('getShellConfiguration', () => {
|
|||
});
|
||||
|
||||
describe('on Windows', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
beforeEach(() => {
|
||||
mockPlatform.mockReturnValue('win32');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should return cmd.exe configuration by default', () => {
|
||||
delete process.env['ComSpec'];
|
||||
delete process.env['MSYSTEM'];
|
||||
delete process.env['TERM'];
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('cmd.exe');
|
||||
expect(config.argsPrefix).toEqual(['/d', '/s', '/c']);
|
||||
|
|
@ -571,6 +579,8 @@ describe('getShellConfiguration', () => {
|
|||
it('should respect ComSpec for cmd.exe', () => {
|
||||
const cmdPath = 'C:\\WINDOWS\\system32\\cmd.exe';
|
||||
process.env['ComSpec'] = cmdPath;
|
||||
delete process.env['MSYSTEM'];
|
||||
delete process.env['TERM'];
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe(cmdPath);
|
||||
expect(config.argsPrefix).toEqual(['/d', '/s', '/c']);
|
||||
|
|
@ -581,6 +591,8 @@ describe('getShellConfiguration', () => {
|
|||
const psPath =
|
||||
'C:\\WINDOWS\\System32\\WindowsPowerShell\\v1.0\\powershell.exe';
|
||||
process.env['ComSpec'] = psPath;
|
||||
delete process.env['MSYSTEM'];
|
||||
delete process.env['TERM'];
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe(psPath);
|
||||
expect(config.argsPrefix).toEqual(['-NoProfile', '-Command']);
|
||||
|
|
@ -590,6 +602,8 @@ describe('getShellConfiguration', () => {
|
|||
it('should return PowerShell configuration if ComSpec points to pwsh.exe', () => {
|
||||
const pwshPath = 'C:\\Program Files\\PowerShell\\7\\pwsh.exe';
|
||||
process.env['ComSpec'] = pwshPath;
|
||||
delete process.env['MSYSTEM'];
|
||||
delete process.env['TERM'];
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe(pwshPath);
|
||||
expect(config.argsPrefix).toEqual(['-NoProfile', '-Command']);
|
||||
|
|
@ -598,11 +612,76 @@ describe('getShellConfiguration', () => {
|
|||
|
||||
it('should be case-insensitive when checking ComSpec', () => {
|
||||
process.env['ComSpec'] = 'C:\\Path\\To\\POWERSHELL.EXE';
|
||||
delete process.env['MSYSTEM'];
|
||||
delete process.env['TERM'];
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('C:\\Path\\To\\POWERSHELL.EXE');
|
||||
expect(config.argsPrefix).toEqual(['-NoProfile', '-Command']);
|
||||
expect(config.shell).toBe('powershell');
|
||||
});
|
||||
|
||||
describe('Git Bash / MSYS2 / MinTTY detection', () => {
|
||||
it('should return bash configuration when MSYSTEM starts with MINGW', () => {
|
||||
process.env['MSYSTEM'] = 'MINGW64';
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('bash');
|
||||
expect(config.argsPrefix).toEqual(['-c']);
|
||||
expect(config.shell).toBe('bash');
|
||||
});
|
||||
|
||||
it('should return bash configuration when MSYSTEM starts with MSYS', () => {
|
||||
process.env['MSYSTEM'] = 'MSYS';
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('bash');
|
||||
expect(config.argsPrefix).toEqual(['-c']);
|
||||
expect(config.shell).toBe('bash');
|
||||
});
|
||||
|
||||
it('should return bash configuration when TERM includes msys', () => {
|
||||
delete process.env['MSYSTEM'];
|
||||
process.env['TERM'] = 'xterm-256color-msys';
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('bash');
|
||||
expect(config.argsPrefix).toEqual(['-c']);
|
||||
expect(config.shell).toBe('bash');
|
||||
});
|
||||
|
||||
it('should return bash configuration when TERM includes cygwin', () => {
|
||||
delete process.env['MSYSTEM'];
|
||||
process.env['TERM'] = 'xterm-256color-cygwin';
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('bash');
|
||||
expect(config.argsPrefix).toEqual(['-c']);
|
||||
expect(config.shell).toBe('bash');
|
||||
});
|
||||
|
||||
it('should prioritize MSYSTEM over TERM for Git Bash detection', () => {
|
||||
process.env['MSYSTEM'] = 'MINGW64';
|
||||
process.env['TERM'] = 'xterm';
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('bash');
|
||||
expect(config.argsPrefix).toEqual(['-c']);
|
||||
expect(config.shell).toBe('bash');
|
||||
});
|
||||
|
||||
it('should return cmd.exe when MSYSTEM and TERM do not indicate Git Bash', () => {
|
||||
process.env['MSYSTEM'] = 'UNKNOWN';
|
||||
process.env['TERM'] = 'xterm';
|
||||
delete process.env['ComSpec'];
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('cmd.exe');
|
||||
expect(config.argsPrefix).toEqual(['/d', '/s', '/c']);
|
||||
expect(config.shell).toBe('cmd');
|
||||
});
|
||||
|
||||
it('should return bash when MSYSTEM is MINGW32', () => {
|
||||
process.env['MSYSTEM'] = 'MINGW32';
|
||||
const config = getShellConfiguration();
|
||||
expect(config.executable).toBe('bash');
|
||||
expect(config.argsPrefix).toEqual(['-c']);
|
||||
expect(config.shell).toBe('bash');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -48,6 +48,24 @@ export interface ShellConfiguration {
|
|||
*/
|
||||
export function getShellConfiguration(): ShellConfiguration {
|
||||
if (isWindows()) {
|
||||
// Detect Git Bash / MSYS2 / MinTTY environments
|
||||
// These environments should use bash instead of cmd/PowerShell
|
||||
const msystem = process.env['MSYSTEM'];
|
||||
const term = process.env['TERM'] || '';
|
||||
const isGitBash =
|
||||
msystem?.startsWith('MINGW') ||
|
||||
msystem?.startsWith('MSYS') ||
|
||||
term.includes('msys') ||
|
||||
term.includes('cygwin');
|
||||
|
||||
if (isGitBash) {
|
||||
return {
|
||||
executable: 'bash',
|
||||
argsPrefix: ['-c'],
|
||||
shell: 'bash',
|
||||
};
|
||||
}
|
||||
|
||||
const comSpec = process.env['ComSpec'] || 'cmd.exe';
|
||||
const executable = comSpec.toLowerCase();
|
||||
|
||||
|
|
@ -892,14 +910,18 @@ export function execCommand(
|
|||
reject(error);
|
||||
} else {
|
||||
resolve({
|
||||
stdout: stdout ?? '',
|
||||
stderr: stderr ?? '',
|
||||
stdout: String(stdout ?? ''),
|
||||
stderr: String(stderr ?? ''),
|
||||
code: typeof error.code === 'number' ? error.code : 1,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
resolve({ stdout: stdout ?? '', stderr: stderr ?? '', code: 0 });
|
||||
resolve({
|
||||
stdout: String(stdout ?? ''),
|
||||
stderr: String(stderr ?? ''),
|
||||
code: 0,
|
||||
});
|
||||
},
|
||||
);
|
||||
child.on('error', reject);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue