Skip to content

Commit 07d54af

Browse files
authored
Fix special str and value type for web-ui (#5632)
* fix special str display for chatbot * fix value type for gr.Slider
1 parent 451612c commit 07d54af

File tree

2 files changed

+25
-4
lines changed

2 files changed

+25
-4
lines changed

swift/ui/llm_infer/llm_infer.py

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -344,6 +344,18 @@ def agent_type(cls, response):
344344
return 'toolbench'
345345
return None
346346

347+
@classmethod
348+
def parse_text(cls, messages):
349+
prepared_msgs = []
350+
for message in messages:
351+
if isinstance(message, tuple):
352+
query = message[0].replace('<', '&lt;').replace('>', '&gt;').replace('*', '&ast;')
353+
response = message[1].replace('<', '&lt;').replace('>', '&gt;').replace('*', '&ast;')
354+
prepared_msgs.append((query, response))
355+
else:
356+
prepared_msgs.append(message)
357+
return prepared_msgs
358+
347359
@classmethod
348360
def send_message(cls, running_task, template_type, prompt: str, image, video, audio, infer_request: InferRequest,
349361
infer_model_type, system, max_new_tokens, temperature, top_k, top_p, repetition_penalty):
@@ -367,8 +379,10 @@ def send_message(cls, running_task, template_type, prompt: str, image, video, au
367379
infer_request.messages[-1]['medias'].append(media)
368380

369381
if not prompt:
370-
yield '', cls._replace_tag_with_media(infer_request), gr.update(value=None), gr.update(
371-
value=None), gr.update(value=None), infer_request
382+
chatbot_content = cls._replace_tag_with_media(infer_request)
383+
chatbot_content = cls.parse_text(chatbot_content)
384+
yield '', chatbot_content, gr.update(value=None), gr.update(value=None), gr.update(
385+
value=None), infer_request
372386
return
373387
else:
374388
infer_request.messages[-1]['content'] = infer_request.messages[-1]['content'] + prompt
@@ -407,5 +421,7 @@ def send_message(cls, running_task, template_type, prompt: str, image, video, au
407421
continue
408422
stream_resp_with_history += chunk.choices[0].delta.content if chat else chunk.choices[0].text
409423
infer_request.messages[-1]['content'] = stream_resp_with_history
410-
yield '', cls._replace_tag_with_media(infer_request), gr.update(value=None), gr.update(
411-
value=None), gr.update(value=None), infer_request
424+
chatbot_content = cls._replace_tag_with_media(infer_request)
425+
chatbot_content = cls.parse_text(chatbot_content)
426+
yield '', chatbot_content, gr.update(value=None), gr.update(value=None), gr.update(
427+
value=None), infer_request

swift/ui/llm_infer/runtime.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# Copyright (c) Alibaba, Inc. and its affiliates.
22
import collections
33
import os.path
4+
import re
45
import subprocess
56
import sys
67
import time
@@ -274,6 +275,10 @@ def task_changed(cls, task, base_tab):
274275
if e.elem_id in all_args:
275276
if isinstance(e, gr.Dropdown) and e.multiselect:
276277
arg = all_args[e.elem_id].split(' ')
278+
elif isinstance(e, gr.Slider) and re.fullmatch(cls.int_regex, all_args[e.elem_id]):
279+
arg = int(all_args[e.elem_id])
280+
elif isinstance(e, gr.Slider) and re.fullmatch(cls.float_regex, all_args[e.elem_id]):
281+
arg = float(all_args[e.elem_id])
277282
else:
278283
if e.elem_id == 'model':
279284
if is_custom_path:

0 commit comments

Comments
 (0)