imetting_backend/app/services/llm_service.py

163 lines
5.5 KiB
Python
Raw Normal View History

2025-08-26 13:57:16 +00:00
import json
2025-09-25 03:48:02 +00:00
import dashscope
from http import HTTPStatus
from typing import Optional, Dict, List, Generator
import app.core.config as config_module
2025-08-26 13:57:16 +00:00
from app.core.database import get_db_connection
class LLMService:
"""LLM服务 - 专注于大模型API调用和提示词管理"""
2025-08-26 13:57:16 +00:00
def __init__(self):
2025-09-25 03:48:02 +00:00
# 设置dashscope API key
dashscope.api_key = config_module.QWEN_API_KEY
@property
def model_name(self):
"""动态获取模型名称"""
return config_module.LLM_CONFIG["model_name"]
@property
def system_prompt(self):
"""动态获取系统提示词"""
return config_module.LLM_CONFIG["system_prompt"]
@property
def time_out(self):
"""动态获取超时时间"""
return config_module.LLM_CONFIG["time_out"]
@property
def temperature(self):
"""动态获取temperature"""
return config_module.LLM_CONFIG["temperature"]
@property
def top_p(self):
"""动态获取top_p"""
return config_module.LLM_CONFIG["top_p"]
2025-12-11 08:48:12 +00:00
def get_task_prompt(self, task_type: str, cursor=None, prompt_id: Optional[int] = None) -> str:
2025-09-25 03:48:02 +00:00
"""
统一的提示词获取方法
2025-09-25 03:48:02 +00:00
2025-08-26 13:57:16 +00:00
Args:
2025-12-11 08:48:12 +00:00
task_type: 任务类型 'MEETING_TASK', 'KNOWLEDGE_TASK'
cursor: 数据库游标如果传入则使用否则创建新连接
2025-12-11 08:48:12 +00:00
prompt_id: 可选的提示词ID如果指定则使用该提示词否则使用默认提示词
2025-09-25 03:48:02 +00:00
2025-08-26 13:57:16 +00:00
Returns:
str: 提示词内容如果未找到返回默认提示词
"""
2025-12-11 08:48:12 +00:00
# 如果指定了 prompt_id直接获取该提示词
if prompt_id:
query = """
SELECT content
FROM prompts
WHERE id = %s AND task_type = %s AND is_active = TRUE
LIMIT 1
"""
params = (prompt_id, task_type)
else:
# 否则获取默认提示词
query = """
SELECT content
FROM prompts
WHERE task_type = %s
AND is_default = TRUE
AND is_active = TRUE
LIMIT 1
"""
params = (task_type,)
2025-09-25 03:48:02 +00:00
if cursor:
2025-12-11 08:48:12 +00:00
cursor.execute(query, params)
result = cursor.fetchone()
if result:
return result['content'] if isinstance(result, dict) else result[0]
else:
with get_db_connection() as connection:
cursor = connection.cursor(dictionary=True)
2025-12-11 08:48:12 +00:00
cursor.execute(query, params)
result = cursor.fetchone()
if result:
return result['content']
2025-09-25 03:48:02 +00:00
# 返回默认提示词
2025-12-11 08:48:12 +00:00
return self._get_default_prompt(task_type)
2025-09-25 03:48:02 +00:00
def _get_default_prompt(self, task_name: str) -> str:
"""获取默认提示词"""
default_prompts = {
'MEETING_TASK': self.system_prompt, # 使用配置文件中的系统提示词
'KNOWLEDGE_TASK': "请根据提供的信息生成知识库文章。",
}
return default_prompts.get(task_name, "请根据提供的内容进行总结和分析。")
2025-09-25 03:48:02 +00:00
def _call_llm_api_stream(self, prompt: str) -> Generator[str, None, None]:
"""流式调用阿里Qwen3大模型API"""
try:
responses = dashscope.Generation.call(
model=self.model_name,
prompt=prompt,
stream=True,
timeout=self.time_out,
temperature=self.temperature,
top_p=self.top_p,
incremental_output=True # 开启增量输出模式
)
for response in responses:
if response.status_code == HTTPStatus.OK:
# 增量输出内容
new_content = response.output.get('text', '')
if new_content:
yield new_content
else:
error_msg = f"Request failed with status code: {response.status_code}, Error: {response.message}"
print(error_msg)
yield f"error: {error_msg}"
break
except Exception as e:
error_msg = f"流式调用大模型API错误: {e}"
print(error_msg)
yield f"error: {error_msg}"
2025-08-26 13:57:16 +00:00
def _call_llm_api(self, prompt: str) -> Optional[str]:
"""调用阿里Qwen3大模型API非流式"""
2025-08-26 13:57:16 +00:00
try:
2025-09-25 03:48:02 +00:00
response = dashscope.Generation.call(
model=self.model_name,
prompt=prompt,
timeout=self.time_out,
temperature=self.temperature,
top_p=self.top_p
)
if response.status_code == HTTPStatus.OK:
return response.output.get('text', '')
2025-08-26 13:57:16 +00:00
else:
2025-09-25 03:48:02 +00:00
print(f"API调用失败: {response.status_code}, {response.message}")
2025-08-26 13:57:16 +00:00
return None
2025-09-25 03:48:02 +00:00
2025-08-26 13:57:16 +00:00
except Exception as e:
print(f"调用大模型API错误: {e}")
return None
# 测试代码
if __name__ == '__main__':
print("--- 运行LLM服务测试 ---")
llm_service = LLMService()
# 测试获取任务提示词
meeting_prompt = llm_service.get_task_prompt('MEETING_TASK')
print(f"会议任务提示词: {meeting_prompt[:100]}...")
knowledge_prompt = llm_service.get_task_prompt('KNOWLEDGE_TASK')
print(f"知识库任务提示词: {knowledge_prompt[:100]}...")
print("--- LLM服务测试完成 ---")