1
- import { ChatCompletionMessageParam } from "openai/resources.mjs" ;
2
- import { describe , it , expect , vi } from "vitest" ;
3
1
import { ModelConfig } from "@continuedev/config-yaml" ;
4
2
import { BaseLlmApi } from "@continuedev/openai-adapters" ;
3
+ import { ChatCompletionMessageParam } from "openai/resources.mjs" ;
4
+ import { describe , it , expect , vi } from "vitest" ;
5
5
6
6
import { compactChatHistory } from "./compaction.js" ;
7
7
import { streamChatResponse } from "./streamChatResponse.js" ;
@@ -43,7 +43,7 @@ describe("compaction infinite loop prevention", () => {
43
43
mockStreamResponse . mockImplementation (
44
44
async ( history , model , api , controller , callbacks ) => {
45
45
callbacks ?. onContent ?.( "Summary" ) ;
46
- callbacks ?. onContentComplete ?.( ) ;
46
+ callbacks ?. onContentComplete ?.( "Summary" ) ;
47
47
return "Summary" ;
48
48
} ,
49
49
) ;
@@ -54,7 +54,7 @@ describe("compaction infinite loop prevention", () => {
54
54
] ;
55
55
56
56
// This should not hang - it should break out of the loop
57
- const result = await compactChatHistory ( history , mockModel , mockLlmApi ) ;
57
+ const result = await compactChatHistory ( history , mockModel , mockLlmApi , { } ) ;
58
58
59
59
// Should complete successfully even though token count is still too high
60
60
expect ( result . compactedHistory ) . toBeDefined ( ) ;
@@ -75,7 +75,7 @@ describe("compaction infinite loop prevention", () => {
75
75
mockStreamResponse . mockImplementation (
76
76
async ( history , model , api , controller , callbacks ) => {
77
77
callbacks ?. onContent ?.( "Summary" ) ;
78
- callbacks ?. onContentComplete ?.( ) ;
78
+ callbacks ?. onContentComplete ?.( "Summary" ) ;
79
79
return "Summary" ;
80
80
} ,
81
81
) ;
@@ -88,7 +88,7 @@ describe("compaction infinite loop prevention", () => {
88
88
] ;
89
89
90
90
// This should not hang
91
- const result = await compactChatHistory ( history , mockModel , mockLlmApi ) ;
91
+ const result = await compactChatHistory ( history , mockModel , mockLlmApi , { } ) ;
92
92
93
93
expect ( result . compactedHistory ) . toBeDefined ( ) ;
94
94
} ) ;
@@ -116,7 +116,7 @@ describe("compaction infinite loop prevention", () => {
116
116
mockStreamResponse . mockImplementation (
117
117
async ( history , model , api , controller , callbacks ) => {
118
118
callbacks ?. onContent ?.( "Summary" ) ;
119
- callbacks ?. onContentComplete ?.( ) ;
119
+ callbacks ?. onContentComplete ?.( "Summary" ) ;
120
120
return "Summary" ;
121
121
} ,
122
122
) ;
@@ -129,7 +129,7 @@ describe("compaction infinite loop prevention", () => {
129
129
{ role : "user" , content : "Another question" } ,
130
130
] ;
131
131
132
- const result = await compactChatHistory ( history , mockModel , mockLlmApi ) ;
132
+ const result = await compactChatHistory ( history , mockModel , mockLlmApi , { } ) ;
133
133
134
134
expect ( result . compactedHistory ) . toBeDefined ( ) ;
135
135
// The function will call countTokens multiple times during the process
0 commit comments