Skip to content

Commit d235cb4

Browse files
committed
fix: more tests
1 parent bb41c54 commit d235cb4

File tree

3 files changed

+9
-13
lines changed

3 files changed

+9
-13
lines changed

extensions/cli/src/compaction.infiniteLoop.test.ts

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
import { ChatCompletionMessageParam } from "openai/resources.mjs";
2-
import { describe, it, expect, vi } from "vitest";
31
import { ModelConfig } from "@continuedev/config-yaml";
42
import { BaseLlmApi } from "@continuedev/openai-adapters";
3+
import { ChatCompletionMessageParam } from "openai/resources.mjs";
4+
import { describe, it, expect, vi } from "vitest";
55

66
import { compactChatHistory } from "./compaction.js";
77
import { streamChatResponse } from "./streamChatResponse.js";
@@ -43,7 +43,7 @@ describe("compaction infinite loop prevention", () => {
4343
mockStreamResponse.mockImplementation(
4444
async (history, model, api, controller, callbacks) => {
4545
callbacks?.onContent?.("Summary");
46-
callbacks?.onContentComplete?.();
46+
callbacks?.onContentComplete?.("Summary");
4747
return "Summary";
4848
},
4949
);
@@ -54,7 +54,7 @@ describe("compaction infinite loop prevention", () => {
5454
];
5555

5656
// This should not hang - it should break out of the loop
57-
const result = await compactChatHistory(history, mockModel, mockLlmApi);
57+
const result = await compactChatHistory(history, mockModel, mockLlmApi, {});
5858

5959
// Should complete successfully even though token count is still too high
6060
expect(result.compactedHistory).toBeDefined();
@@ -75,7 +75,7 @@ describe("compaction infinite loop prevention", () => {
7575
mockStreamResponse.mockImplementation(
7676
async (history, model, api, controller, callbacks) => {
7777
callbacks?.onContent?.("Summary");
78-
callbacks?.onContentComplete?.();
78+
callbacks?.onContentComplete?.("Summary");
7979
return "Summary";
8080
},
8181
);
@@ -88,7 +88,7 @@ describe("compaction infinite loop prevention", () => {
8888
];
8989

9090
// This should not hang
91-
const result = await compactChatHistory(history, mockModel, mockLlmApi);
91+
const result = await compactChatHistory(history, mockModel, mockLlmApi, {});
9292

9393
expect(result.compactedHistory).toBeDefined();
9494
});
@@ -116,7 +116,7 @@ describe("compaction infinite loop prevention", () => {
116116
mockStreamResponse.mockImplementation(
117117
async (history, model, api, controller, callbacks) => {
118118
callbacks?.onContent?.("Summary");
119-
callbacks?.onContentComplete?.();
119+
callbacks?.onContentComplete?.("Summary");
120120
return "Summary";
121121
},
122122
);
@@ -129,7 +129,7 @@ describe("compaction infinite loop prevention", () => {
129129
{ role: "user", content: "Another question" },
130130
];
131131

132-
const result = await compactChatHistory(history, mockModel, mockLlmApi);
132+
const result = await compactChatHistory(history, mockModel, mockLlmApi, {});
133133

134134
expect(result.compactedHistory).toBeDefined();
135135
// The function will call countTokens multiple times during the process

extensions/cli/src/test-helpers/ui-test-context.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ export function createUITestContext(
7272
responseStartTime: null,
7373
inputMode: true,
7474
attachedFiles: [],
75-
handleUserMessage: vi.fn(),
75+
handleUserMessage: vi.fn().mockResolvedValue(undefined),
7676
handleInterrupt: vi.fn(),
7777
handleFileAttached: vi.fn(),
7878
resetChatHistory: vi.fn(),

extensions/cli/src/ui/hooks/useChat.helpers.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -291,10 +291,6 @@ export async function handleAutoCompaction({
291291
currentChatHistory: ChatCompletionMessageParam[];
292292
currentCompactionIndex: number | null;
293293
}> {
294-
if (!llmApi) {
295-
throw new Error("LLM API is not available for auto-compaction");
296-
}
297-
298294
const { handleAutoCompaction: coreAutoCompaction } = await import(
299295
"../../streamChatResponse.autoCompaction.js"
300296
);

0 commit comments

Comments
 (0)