llm-chat/backend/services/ChatService.js

383 lines
11 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

const Message = require('../models/Message');
const ConversationService = require('./ConversationService');
const LLMApiClient = require('./LLMApiClient');
const cacheManager = require('../utils/cache');
class ChatService {
constructor() {
this.conversationService = new ConversationService();
this.llmClient = new LLMApiClient();
this.cachePrefix = 'chat_service';
}
/**
* 处理用户消息并获取AI回复
*/
async processMessage(conversationId, userMessage, options = {}) {
try {
// 验证输入
if (!conversationId || isNaN(conversationId)) {
throw new Error('无效的对话ID');
}
if (!userMessage || userMessage.trim().length === 0) {
throw new Error('消息内容不能为空');
}
const trimmedMessage = userMessage.trim();
// 确保对话存在
const conversation = await this.conversationService.getConversationById(conversationId);
if (!conversation) {
throw new Error('对话不存在');
}
console.log(`处理消息: 对话${conversationId}, 内容: ${trimmedMessage.substring(0, 50)}...`);
// 1. 保存用户消息
const userMsg = await Message.create(conversationId, 'user', trimmedMessage);
// 2. 获取对话历史作为上下文
const conversationHistory = await this.getConversationContext(conversationId);
// 3. 检查缓存
const cacheKey = this.generateCacheKey(conversationHistory);
let cachedResponse = cacheManager.getCachedApiResponse(conversationHistory);
let aiResponse;
if (cachedResponse && !options.skipCache) {
console.log('使用缓存的AI回复');
aiResponse = cachedResponse;
} else {
// 4. 调用LLM API获取回复
aiResponse = await this.getLLMResponse(conversationHistory, options);
// 5. 缓存API响应
if (aiResponse && !options.skipCache) {
cacheManager.cacheApiResponse(conversationHistory, aiResponse);
}
}
if (!aiResponse || !aiResponse.content) {
throw new Error('LLM服务返回无效响应');
}
// 6. 保存AI回复
const aiMsg = await Message.create(conversationId, 'assistant', aiResponse.content);
// 7. 更新对话活动时间
await this.conversationService.updateLastActivity(conversationId);
// 8. 如果是对话的第一条消息,自动生成标题
if (conversation.title === '新对话') {
await this.conversationService.generateConversationTitle(conversationId);
}
// 9. 更新上下文缓存
this.updateContextCache(conversationId);
const result = {
userMessage: {
id: userMsg.id,
content: userMsg.content,
timestamp: userMsg.timestamp
},
aiResponse: {
id: aiMsg.id,
content: aiMsg.content,
timestamp: aiMsg.timestamp
},
conversationId: conversationId,
usage: aiResponse.usage
};
console.log(`消息处理完成: 对话${conversationId}`);
return result;
} catch (error) {
console.error('ChatService.processMessage错误:', error);
throw error;
}
}
/**
* 处理流式消息响应
*/
async processStreamMessage(conversationId, userMessage, options = {}) {
try {
// 验证输入
if (!conversationId || isNaN(conversationId)) {
throw new Error('无效的对话ID');
}
if (!userMessage || userMessage.trim().length === 0) {
throw new Error('消息内容不能为空');
}
const trimmedMessage = userMessage.trim();
// 确保对话存在
const conversation = await this.conversationService.getConversationById(conversationId);
if (!conversation) {
throw new Error('对话不存在');
}
console.log(`处理流式消息: 对话${conversationId}`);
// 1. 保存用户消息
const userMsg = await Message.create(conversationId, 'user', trimmedMessage);
// 2. 获取对话历史
const conversationHistory = await this.getConversationContext(conversationId);
// 3. 获取流式响应
const stream = await this.llmClient.createStreamChatCompletion(conversationHistory, {
...options,
stream: true
});
// 返回流式响应处理函数
return {
userMessage: {
id: userMsg.id,
content: userMsg.content,
timestamp: userMsg.timestamp
},
stream: stream,
conversationId: conversationId
};
} catch (error) {
console.error('ChatService.processStreamMessage错误:', error);
throw error;
}
}
/**
* 完成流式消息处理保存完整的AI回复
*/
async completeStreamMessage(conversationId, aiContent) {
try {
if (!aiContent || aiContent.trim().length === 0) {
throw new Error('AI回复内容为空');
}
// 保存AI回复
const aiMsg = await Message.create(conversationId, 'assistant', aiContent.trim());
// 更新对话活动时间
await this.conversationService.updateLastActivity(conversationId);
// 更新上下文缓存
this.updateContextCache(conversationId);
return {
id: aiMsg.id,
content: aiMsg.content,
timestamp: aiMsg.timestamp
};
} catch (error) {
console.error('ChatService.completeStreamMessage错误:', error);
throw error;
}
}
/**
* 获取对话上下文
*/
async getConversationContext(conversationId, maxTokens = 4000) {
try {
// 先尝试从缓存获取
let context = cacheManager.getConversationContext(conversationId);
if (context) {
console.log(`从缓存获取对话上下文: ${conversationId}`);
return context;
}
// 从数据库获取
context = await Message.getConversationHistory(conversationId, maxTokens);
// 缓存上下文
cacheManager.cacheConversationContext(conversationId, context);
console.log(`获取对话上下文: ${conversationId}, ${context.length}条消息`);
return context;
} catch (error) {
console.error('ChatService.getConversationContext错误:', error);
throw new Error('获取对话上下文失败');
}
}
/**
* 调用LLM API获取回复
*/
async getLLMResponse(messages, options = {}) {
try {
const llmOptions = {
temperature: options.temperature,
max_tokens: options.max_tokens,
top_p: options.top_p,
frequency_penalty: options.frequency_penalty,
presence_penalty: options.presence_penalty,
model: options.model
};
// 移除undefined值
Object.keys(llmOptions).forEach(key => {
if (llmOptions[key] === undefined) {
delete llmOptions[key];
}
});
console.log('调用LLM API:', JSON.stringify(llmOptions, null, 2));
const response = await this.llmClient.createChatCompletion(messages, llmOptions);
console.log('LLM API响应:', {
model: response.model,
usage: response.usage,
contentLength: response.content?.length || 0
});
return response;
} catch (error) {
console.error('ChatService.getLLMResponse错误:', error);
// 根据错误类型返回更友好的错误信息
if (error.message.includes('API密钥')) {
throw new Error('AI服务配置错误请联系管理员');
} else if (error.message.includes('请求频率')) {
throw new Error('请求过于频繁,请稍后再试');
} else if (error.message.includes('网络')) {
throw new Error('网络连接异常,请检查网络设置');
} else {
throw new Error('AI服务暂时不可用请稍后重试');
}
}
}
/**
* 生成缓存键
*/
generateCacheKey(messages) {
const messageString = JSON.stringify(messages);
return cacheManager.generateKey('chat_completion', messageString);
}
/**
* 更新上下文缓存
*/
updateContextCache(conversationId) {
try {
// 删除旧的上下文缓存,强制下次重新获取
cacheManager.deleteConversationContext(conversationId);
// 清除相关的消息缓存
this.conversationService.clearConversationCache(conversationId);
} catch (error) {
console.error('更新上下文缓存失败:', error);
}
}
/**
* 重新生成AI回复
*/
async regenerateResponse(conversationId, options = {}) {
try {
// 获取最后一条用户消息
const messages = await Message.getRecentMessages(conversationId, 10);
const lastUserMessage = messages.reverse().find(msg => msg.role === 'user');
if (!lastUserMessage) {
throw new Error('没有找到用户消息');
}
// 删除最后一条AI回复如果存在
const lastMessage = messages[messages.length - 1];
if (lastMessage && lastMessage.role === 'assistant') {
await Message.delete(lastMessage.id);
console.log('删除上一条AI回复');
}
// 获取更新后的对话历史
const conversationHistory = await this.getConversationContext(conversationId);
// 强制跳过缓存,重新生成回复
const aiResponse = await this.getLLMResponse(conversationHistory, {
...options,
skipCache: true
});
if (!aiResponse || !aiResponse.content) {
throw new Error('重新生成回复失败');
}
// 保存新的AI回复
const aiMsg = await Message.create(conversationId, 'assistant', aiResponse.content);
// 更新对话活动时间
await this.conversationService.updateLastActivity(conversationId);
// 更新上下文缓存
this.updateContextCache(conversationId);
const result = {
id: aiMsg.id,
content: aiMsg.content,
timestamp: aiMsg.timestamp,
usage: aiResponse.usage
};
console.log(`重新生成回复完成: 对话${conversationId}`);
return result;
} catch (error) {
console.error('ChatService.regenerateResponse错误:', error);
throw error;
}
}
/**
* 检查LLM服务状态
*/
async checkLLMStatus() {
try {
const status = await this.llmClient.checkConnection();
return status;
} catch (error) {
console.error('检查LLM状态失败:', error);
return {
status: 'error',
message: '无法连接到LLM服务'
};
}
}
/**
* 获取聊天统计信息
*/
async getChatStats() {
try {
// 这里可以添加更多统计信息
const cacheStats = cacheManager.getStats();
return {
cache: cacheStats,
llm: {
connected: (await this.checkLLMStatus()).status === 'connected'
}
};
} catch (error) {
console.error('获取聊天统计信息失败:', error);
return {
error: error.message
};
}
}
}
module.exports = ChatService;