diff --git a/config_manager.py b/config_manager.py index 4dbc381..4479c80 100644 --- a/config_manager.py +++ b/config_manager.py @@ -47,12 +47,16 @@ class ConfigManager: 'level': 'INFO', 'store': True, 'path': 'logs' + }, + 'llm': { + 'api_key': '', # 需要用户填写 + 'api_base': 'https://api.openai.com/v1', # API基础URL + 'model': 'gpt-3.5-turbo', # 默认模型 + 'temperature': 0.7, # 温度参数 } } - # 确保目录存在 os.makedirs(os.path.dirname(config_path) or '.', exist_ok=True) - # 写入默认配置 with open(config_path, 'w', encoding='utf-8') as f: yaml.dump(default_config, f, default_flow_style=False, allow_unicode=True) diff --git a/database_manager.py b/database_manager.py index 01808d5..c86b65c 100644 --- a/database_manager.py +++ b/database_manager.py @@ -3,6 +3,10 @@ import traceback import pandas as pd from sqlalchemy import create_engine, text, inspect +from logger_manager import get_logger + +# 获取日志器 +logger = get_logger() from config_manager import get_config_manager from logger_manager import get_logger @@ -58,6 +62,7 @@ class DatabaseManager: if conditions: query += f" WHERE {conditions}" try: + logger.debug(f"执行查询: {query}") return pd.read_sql(query, engine) except Exception as e: self.logger.error(f"从数据库加载数据时出错: {e}") diff --git a/llm_manager.py b/llm_manager.py new file mode 100644 index 0000000..698dbdc --- /dev/null +++ b/llm_manager.py @@ -0,0 +1,93 @@ +import os +import yaml +import requests +from typing import Dict, Any, Optional + + +class LLMManager: + """LLM管理器,负责加载API配置并提供与LLM交互的功能""" + + _instance = None # 单例实例 + + def __init__(self): + self._api_key = None + self._api_base = None + self._model = None + self._temperature = None + + def __new__(cls): + """实现单例模式""" + if cls._instance is None: + cls._instance = super(LLMManager, cls).__new__(cls) + cls._instance._config = None + cls._instance._api_key = None + cls._instance._api_base = None + cls._instance._model = None + cls._instance._provider = None + return cls._instance + + def initialize(self): + """初始化LLM配置""" + from config_manager import get_config_manager + + config_manager = get_config_manager() + llm_config = config_manager.get('llm', {}) + + # 加载LLM配置 + self._api_key = llm_config.get('api_key', '') + self._api_base = llm_config.get('api_base', 'https://api.openai.com/v1') + self._model = llm_config.get('model', 'gpt-3.5-turbo') + self._temperature = llm_config.get('temperature', 0.7) + + if not self._api_key: + print("警告: LLM API密钥未配置,请在config.yaml中设置。") + + def chat(self, content: str, prompt: Optional[str] = None) -> str: + """ + 与LLM进行对话 + + Args: + content: 用户输入内容 + prompt: 可选的系统提示词 + + Returns: + LLM的回复内容 + """ + if self._config is None: + self.initialize() + + headers = { + "Authorization": f"Bearer {self._api_key}", + "Content-Type": "application/json" + } + + messages = [] + # 添加系统提示(如果有) + if prompt: + messages.append({"role": "system", "content": prompt}) + + # 添加用户消息 + messages.append({"role": "user", "content": content}) + + payload = { + "model": self._model, + "messages": messages, + "temperature": self._temperature, + } + + response = requests.post( + f"{self._api_base}/chat/completions", + headers=headers, + json=payload + ) + + if response.status_code != 200: + raise Exception(f"API请求失败: {response.text}") + + return response.json()["choices"][0]["message"]["content"] + + +# 提供简单的访问函数 +def get_llm_manager() -> LLMManager: + """获取LLM管理器实例""" + return LLMManager() \ No newline at end of file