From 628e73a78f0cc6864acf90bf3436e6dfaf8f8302 Mon Sep 17 00:00:00 2001 From: yale <2927096163@qq.com> Date: Mon, 15 May 2023 16:17:00 +0800 Subject: [PATCH] :bug: Fix multiline overwrite output --- models/chatglm/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/models/chatglm/__init__.py b/models/chatglm/__init__.py index 0440751..3fa7661 100644 --- a/models/chatglm/__init__.py +++ b/models/chatglm/__init__.py @@ -33,8 +33,10 @@ def chat(self) -> str: history = [] while True: text = input("用户输入:") + last_response = "" for response, history in self.model.stream_chat(self.tokenizer, text, history=history): - print(response, end='\r') + print(response[len(last_response):], end='') + last_response = response print(flush=True) def run_web_demo(self, input_text, history=[]): @@ -45,4 +47,4 @@ def run(self, text, history=[]): return self.model.chat(self.tokenizer, text, history=history) def get_model(args): - return ChatGLMMdoel(args) \ No newline at end of file + return ChatGLMMdoel(args)