backtrader/llm_manager.py

98 lines
2.7 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from typing import Optional
import requests
from config_manager import get_config_manager
from logger_manager import get_logger
config_manager = get_config_manager()
logger = get_logger()
class LLMManager:
"""LLM管理器负责加载API配置并提供与LLM交互的功能"""
_instance = None # 单例实例
def __init__(self):
self._api_key = None
self._api_base = None
self._model = None
self._temperature = None
def __new__(cls):
"""实现单例模式"""
if cls._instance is None:
cls._instance = super(LLMManager, cls).__new__(cls)
cls._instance._config = None
cls._instance._api_key = None
cls._instance._api_base = None
cls._instance._model = None
cls._instance._provider = None
return cls._instance
def initialize(self):
"""初始化LLM配置"""
llm_config = config_manager.get('llm', {})
# 加载LLM配置
self._api_key = llm_config.get('api_key', '')
self._api_base = llm_config.get('api_base', 'https://api.openai.com/v1')
self._model = llm_config.get('model', 'gpt-3.5-turbo')
self._temperature = llm_config.get('temperature', 0.7)
if not self._api_key:
logger.warning("警告: LLM API密钥未配置请在config.yaml中设置。")
def chat(self, content: str, prompt: Optional[str] = None) -> str:
"""
与LLM进行对话
Args:
content: 用户输入内容
prompt: 可选的系统提示词
Returns:
LLM的回复内容
"""
if self._config is None:
self.initialize()
headers = {
"Authorization": f"Bearer {self._api_key}",
"Content-Type": "application/json"
}
messages = []
# 添加系统提示(如果有)
if prompt:
messages.append({"role": "system", "content": prompt})
# 添加用户消息
messages.append({"role": "user", "content": content})
payload = {
"model": self._model,
"messages": messages,
"temperature": self._temperature,
}
logger.debug(f"请求数据: {payload}")
response = requests.post(
f"{self._api_base}/chat/completions",
headers=headers,
json=payload
)
if response.status_code != 200:
logger.info(f"请求失败: {response.status_code}, {response.text}")
return "请求失败,请稍后再试。"
return response.json()["choices"][0]["message"]["content"]
# 提供简单的访问函数
def get_llm_manager() -> LLMManager:
"""获取LLM管理器实例"""
return LLMManager()