11using BotSharp . Abstraction . Agents ;
22using BotSharp . Abstraction . Agents . Enums ;
3- using BotSharp . Abstraction . Conversations ;
43using BotSharp . Abstraction . Loggers ;
54using BotSharp . Abstraction . Functions . Models ;
65using BotSharp . Abstraction . Routing ;
76using BotSharp . Plugin . GoogleAI . Settings ;
87using LLMSharp . Google . Palm ;
98using Microsoft . Extensions . Logging ;
10- using System . Diagnostics . Metrics ;
11- using static System . Net . Mime . MediaTypeNames ;
9+ using LLMSharp . Google . Palm . DiscussService ;
1210
1311namespace BotSharp . Plugin . GoogleAI . Providers ;
1412
@@ -39,19 +37,25 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
3937
4038 var client = new GooglePalmClient ( apiKey : _settings . PaLM . ApiKey ) ;
4139
42- var ( prompt , messages ) = PrepareOptions ( agent , conversations ) ;
40+ var ( prompt , messages , hasFunctions ) = PrepareOptions ( agent , conversations ) ;
4341
4442 RoleDialogModel msg ;
4543
46- if ( messages == null )
44+ if ( hasFunctions )
4745 {
4846 // use text completion
49- var response = client . GenerateTextAsync ( prompt , null ) . Result ;
47+ // var response = client.GenerateTextAsync(prompt, null).Result;
48+ var response = client . ChatAsync ( new PalmChatCompletionRequest
49+ {
50+ Context = prompt ,
51+ Messages = messages ,
52+ Temperature = 0.1f
53+ } ) . Result ;
5054
5155 var message = response . Candidates . First ( ) ;
5256
5357 // check if returns function calling
54- var llmResponse = message . Output . JsonContent < FunctionCallingResponse > ( ) ;
58+ var llmResponse = message . Content . JsonContent < FunctionCallingResponse > ( ) ;
5559
5660 msg = new RoleDialogModel ( llmResponse . Role , llmResponse . Content )
5761 {
@@ -79,13 +83,14 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
7983 Task . WaitAll ( hooks . Select ( hook =>
8084 hook . AfterGenerated ( msg , new TokenStatsModel
8185 {
86+ Prompt = prompt ,
8287 Model = _model
8388 } ) ) . ToArray ( ) ) ;
8489
8590 return msg ;
8691 }
8792
88- private ( string , List < PalmChatMessage > ) PrepareOptions ( Agent agent , List < RoleDialogModel > conversations )
93+ private ( string , List < PalmChatMessage > , bool ) PrepareOptions ( Agent agent , List < RoleDialogModel > conversations )
8994 {
9095 var prompt = "" ;
9196
@@ -99,6 +104,9 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
99104 var routing = _services . GetRequiredService < IRoutingService > ( ) ;
100105 var router = routing . Router ;
101106
107+ var messages = conversations . Select ( c => new PalmChatMessage ( c . Content , c . Role == AgentRole . User ? "user" : "AI" ) )
108+ . ToList ( ) ;
109+
102110 if ( agent . Functions != null && agent . Functions . Count > 0 )
103111 {
104112 prompt += "\r \n \r \n [Functions] defined in JSON Schema:\r \n " ;
@@ -118,13 +126,13 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
118126
119127 prompt += "\r \n \r \n " + router . Templates . FirstOrDefault ( x => x . Name == "response_with_function" ) . Content ;
120128
121- return ( prompt , null ) ;
129+ return ( prompt , new List < PalmChatMessage >
130+ {
131+ new PalmChatMessage ( "Which function should be used for the next step based on latest user or function response, output your response in JSON:" , AgentRole . User ) ,
132+ } , true ) ;
122133 }
123134
124- var messages = conversations . Select ( c => new PalmChatMessage ( c . Content , c . Role == AgentRole . User ? "user" : "AI" ) )
125- . ToList ( ) ;
126-
127- return ( prompt , messages ) ;
135+ return ( prompt , messages , false ) ;
128136 }
129137
130138 public Task < bool > GetChatCompletionsAsync ( Agent agent , List < RoleDialogModel > conversations , Func < RoleDialogModel , Task > onMessageReceived , Func < RoleDialogModel , Task > onFunctionExecuting )
0 commit comments