pref: 增加llm异步请求
This commit is contained in:
50
utils.py
50
utils.py
@@ -375,6 +375,9 @@ class label_filter:
|
||||
return (False, 0)
|
||||
class LLM_deepseek:
|
||||
def __init__(self):
|
||||
self.response = None
|
||||
self.status = False
|
||||
self.chat = ''
|
||||
self.client = OpenAI(api_key="sk-c2e1073883304143981a9750b97c3518", base_url="https://api.deepseek.com")
|
||||
self.prompt = '''
|
||||
你是一个机器人动作规划者,请把我的话翻译成机器人动作规划并生成对应的 JSON 结果。请注意,只能使用以下指定的动作,不能创造新的动作:
|
||||
@@ -421,17 +424,42 @@ class LLM_deepseek:
|
||||
|
||||
强调一下,对于‘离开’这个指令,请忽略,这对我很重要!
|
||||
'''
|
||||
def get_command_json(self,chat):
|
||||
response = self.client.chat.completions.create(
|
||||
model="deepseek-chat",
|
||||
messages=[
|
||||
{"role": "system", "content": self.prompt},
|
||||
{"role": "user", "content": '我的话如下:' + chat},
|
||||
],
|
||||
stream=False,
|
||||
temperature=0.7
|
||||
)
|
||||
return response.choices[0].message.content
|
||||
def request_thread(self):
|
||||
logger.info("llm 请求远程服务器中 (request_thread)")
|
||||
try:
|
||||
self.response = self.client.chat.completions.create(
|
||||
model="deepseek-chat",
|
||||
messages=[
|
||||
{"role": "system", "content": self.prompt},
|
||||
{"role": "user", "content": '我的话如下:' + self.chat},
|
||||
],
|
||||
stream=False,
|
||||
temperature=0.7
|
||||
)
|
||||
logger.info("llm 远程服务器正常返回 (request_thread)")
|
||||
except:
|
||||
logger.warning("llm 请求失败或返回异常,先检查网络连接 (request_thread)")
|
||||
self.status = True
|
||||
def request(self, _chat):
|
||||
self.chat = _chat
|
||||
thread = threading.Thread(target=self.request_thread, daemon=True)
|
||||
thread.start()
|
||||
logger.info("llm 开启请求线程")
|
||||
def get_command_json(self,chat = ''):
|
||||
# response = self.client.chat.completions.create(
|
||||
# model="deepseek-chat",
|
||||
# messages=[
|
||||
# {"role": "system", "content": self.prompt},
|
||||
# {"role": "user", "content": '我的话如下:' + chat},
|
||||
# ],
|
||||
# stream=False,
|
||||
# temperature=0.7
|
||||
# )
|
||||
logger.info("llm 阻塞等待服务器返回中")
|
||||
while not self.status: # FIXME 阻塞等待是否合适
|
||||
pass
|
||||
logger.info("llm 收到返回")
|
||||
return self.response.choices[0].message.content
|
||||
class LLM:
|
||||
def __init__(self):
|
||||
self.init_done_flag = False
|
||||
|
||||
Reference in New Issue
Block a user