256 lines
10 KiB
JavaScript
256 lines
10 KiB
JavaScript
import * as api from './api_service.js';
|
|
import * as ui from './chat_ui.js';
|
|
import { showAlert, showConfirm } from './shared_functions.js';
|
|
|
|
let currentChatId = null;
|
|
let chatHistories = {}; // In-memory cache of chat histories { chatId: [messages] }
|
|
let availableModel = 'default-model-id'; // Fetched model ID
|
|
let systemPrompt = "You are a helpful and concise assistant."; // Default system prompt
|
|
let maxTokens = 4096; // Default max tokens, will be updated by tokenize
|
|
|
|
// --- Initialization ---
|
|
export async function initializeChat() {
|
|
await loadAvailableModel();
|
|
loadChatSessionsFromStorage();
|
|
if (Object.keys(chatHistories).length > 0) {
|
|
// Load the most recent chat (assuming keys are somewhat ordered or track last used)
|
|
const lastChatId = Object.keys(chatHistories).sort().pop(); // Simple sort, might need better tracking
|
|
await loadChat(lastChatId);
|
|
} else {
|
|
// Create a new chat if none exist
|
|
await createNewChat();
|
|
}
|
|
// Initial token calculation for the loaded chat
|
|
await updateTokenUsage();
|
|
}
|
|
|
|
async function loadAvailableModel() {
|
|
try {
|
|
availableModel = await api.getAvailableModels();
|
|
console.log("Using model:", availableModel);
|
|
} catch (error) {
|
|
console.error("Failed to load model, using default:", error);
|
|
// Keep the default model ID
|
|
}
|
|
}
|
|
|
|
// --- Chat Session Management ---
|
|
function loadChatSessionsFromStorage() {
|
|
chatHistories = {}; // Clear cache first
|
|
let hasChats = false;
|
|
Object.keys(localStorage).forEach((key) => {
|
|
if (key.startsWith('chatHistory-')) {
|
|
const chatId = key.substring('chatHistory-'.length);
|
|
try {
|
|
const history = JSON.parse(localStorage.getItem(key));
|
|
if (Array.isArray(history)) {
|
|
chatHistories[chatId] = history;
|
|
const chatName = generateChatName(history, chatId);
|
|
ui.addChatToList({ id: chatId, name: chatName }, false, loadChat, deleteChat); // Add to UI
|
|
hasChats = true;
|
|
} else {
|
|
localStorage.removeItem(key); // Clean up invalid data
|
|
}
|
|
} catch (e) {
|
|
console.error(`Error parsing chat history for ${key}:`, e);
|
|
localStorage.removeItem(key); // Clean up corrupted data
|
|
}
|
|
}
|
|
});
|
|
console.log("Loaded sessions:", Object.keys(chatHistories).length);
|
|
}
|
|
|
|
export async function loadChat(chatId) {
|
|
if (!chatHistories[chatId]) {
|
|
console.error(`Chat history for ${chatId} not found.`);
|
|
showAlert(`Chat ${chatId} konnte nicht geladen werden.`, 'error');
|
|
// Optionally load the first available chat or create new
|
|
const firstChatId = Object.keys(chatHistories)[0];
|
|
if(firstChatId) await loadChat(firstChatId);
|
|
else await createNewChat();
|
|
return;
|
|
}
|
|
if (currentChatId === chatId) return; // Already loaded
|
|
|
|
currentChatId = chatId;
|
|
console.log(`Loading chat: ${chatId}`);
|
|
|
|
ui.clearChatBox();
|
|
chatHistories[currentChatId].forEach(msg => {
|
|
ui.displayMessage(msg.content, msg.role, { isMarkdown: msg.role === 'assistant' });
|
|
});
|
|
ui.setActiveChatInList(chatId);
|
|
ui.enableInput();
|
|
await updateTokenUsage(); // Update token count for the newly loaded chat
|
|
}
|
|
|
|
export async function createNewChat() {
|
|
currentChatId = `chat_${Date.now()}_${Math.random().toString(16).slice(2, 8)}`;
|
|
chatHistories[currentChatId] = []; // Initialize empty history
|
|
saveChatHistoryToStorage(currentChatId); // Save empty chat immediately
|
|
|
|
const chatName = generateChatName([], currentChatId);
|
|
ui.addChatToList({ id: currentChatId, name: chatName }, true, loadChat, deleteChat); // Add to UI and mark active
|
|
ui.clearChatBox();
|
|
ui.displayMessage("Neuer Chat gestartet. Frag mich etwas!", "assistant");
|
|
ui.setActiveChatInList(currentChatId);
|
|
ui.enableInput();
|
|
await updateTokenUsage(); // Reset token count
|
|
console.log("Created new chat:", currentChatId);
|
|
}
|
|
|
|
export async function deleteChat(chatId) {
|
|
const confirmed = await showConfirm(`Möchten Sie den Chat "${generateChatName(chatHistories[chatId], chatId)}" wirklich löschen?`, 'error');
|
|
if (confirmed) {
|
|
console.log(`Deleting chat: ${chatId}`);
|
|
delete chatHistories[chatId]; // Remove from memory
|
|
localStorage.removeItem(`chatHistory-${chatId}`); // Remove from storage
|
|
ui.removeChatFromList(chatId); // Remove from UI
|
|
|
|
// If the deleted chat was the current one, load another or create new
|
|
if (currentChatId === chatId) {
|
|
const remainingChatIds = Object.keys(chatHistories);
|
|
if (remainingChatIds.length > 0) {
|
|
await loadChat(remainingChatIds[0]); // Load the first remaining
|
|
} else {
|
|
await createNewChat(); // Create a new one if none left
|
|
}
|
|
}
|
|
showAlert('Chat gelöscht.', 'success');
|
|
}
|
|
}
|
|
|
|
// --- Message Handling ---
|
|
export async function handleSendMessage(messageText) {
|
|
if (!currentChatId || !messageText) return;
|
|
|
|
ui.disableInput(true); // Disable input and show loading
|
|
|
|
// 1. Add user message to history and UI
|
|
const userMessage = { role: 'user', content: messageText };
|
|
chatHistories[currentChatId].push(userMessage);
|
|
ui.displayMessage(messageText, 'user');
|
|
saveChatHistoryToStorage(currentChatId); // Save after adding user message
|
|
updateChatNameIfNeeded(currentChatId); // Update sidebar name if it's the first message
|
|
|
|
// 2. Update token count *before* sending to AI (includes new user message)
|
|
await updateTokenUsage();
|
|
|
|
// 3. Get AI Response (Streaming)
|
|
let fullResponse = '';
|
|
try {
|
|
const responseStream = await api.streamChatCompletion(
|
|
chatHistories[currentChatId], // Send current history
|
|
systemPrompt,
|
|
availableModel
|
|
);
|
|
|
|
// Process the stream
|
|
const reader = responseStream.body.getReader();
|
|
const decoder = new TextDecoder('utf-8');
|
|
ui.updateStreamingMessage('', true); // Initialize the streaming div
|
|
|
|
while (true) {
|
|
const { done, value } = await reader.read();
|
|
if (done) break;
|
|
|
|
|
|
const chunk = decoder.decode(value, { stream: true });
|
|
// Handle potential multiple data chunks in one value
|
|
const lines = chunk.split('\n');
|
|
for (const line of lines) {
|
|
if (line.startsWith('data: ')) {
|
|
const jsonStr = line.substring(6).trim();
|
|
if (jsonStr === '[DONE]') {
|
|
break; // Stream finished signal
|
|
}
|
|
try {
|
|
const json = JSON.parse(jsonStr);
|
|
const deltaContent = json.choices?.[0]?.delta?.content;
|
|
if (deltaContent) {
|
|
fullResponse += deltaContent;
|
|
ui.updateStreamingMessage(deltaContent, true); // Update UI incrementally
|
|
}
|
|
} catch (error) {
|
|
if (jsonStr) { // Avoid logging empty lines as errors
|
|
console.warn('Error parsing stream JSON:', error, 'Received:', jsonStr);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
ui.finalizeStreamingMessage(); // Finalize the UI element
|
|
|
|
// 4. Add complete AI response to history
|
|
if (fullResponse) {
|
|
const assistantMessage = { role: 'assistant', content: fullResponse };
|
|
chatHistories[currentChatId].push(assistantMessage);
|
|
saveChatHistoryToStorage(currentChatId); // Save after getting full response
|
|
} else {
|
|
// Handle case where stream ended with no content
|
|
console.warn("Stream ended without content.");
|
|
// Maybe display a generic "No response" message? Not added here.
|
|
}
|
|
|
|
// 5. Update token usage *after* receiving AI response
|
|
await updateTokenUsage();
|
|
|
|
} catch (error) {
|
|
console.error('Error during chat completion:', error);
|
|
showAlert(`Fehler bei der Kommunikation mit der AI: ${error.message}`, 'error');
|
|
ui.finalizeStreamingMessage(); // Ensure UI is cleaned up on error
|
|
// Optionally remove the empty assistant message div if created?
|
|
} finally {
|
|
ui.enableInput(); // Re-enable input regardless of success/error
|
|
}
|
|
}
|
|
|
|
// --- History & Storage ---
|
|
function saveChatHistoryToStorage(chatId) {
|
|
if (chatHistories[chatId]) {
|
|
try {
|
|
localStorage.setItem(`chatHistory-${chatId}`, JSON.stringify(chatHistories[chatId]));
|
|
} catch (e) {
|
|
console.error("Error saving chat history to localStorage:", e);
|
|
showAlert("Fehler beim Speichern des Chat-Verlaufs. Möglicherweise ist der Speicher voll.", "error");
|
|
}
|
|
}
|
|
}
|
|
|
|
function generateChatName(history, chatId) {
|
|
// Use the first user message as the name, truncated
|
|
const firstUserMessage = history?.find(msg => msg.role === 'user')?.content;
|
|
if (firstUserMessage) {
|
|
return firstUserMessage.substring(0, 25) + (firstUserMessage.length > 25 ? '...' : '');
|
|
}
|
|
// Fallback name
|
|
return `Chat ${chatId.slice(-4)}`;
|
|
}
|
|
|
|
function updateChatNameIfNeeded(chatId) {
|
|
// If history only has 1 message (the user one just added), update name
|
|
if (chatHistories[chatId] && chatHistories[chatId].length === 1) {
|
|
const newName = generateChatName(chatHistories[chatId], chatId);
|
|
ui.updateChatNameInList(chatId, newName);
|
|
}
|
|
}
|
|
|
|
// --- Tokenization & Progress ---
|
|
async function updateTokenUsage() {
|
|
if (!currentChatId || !chatHistories[currentChatId]) {
|
|
ui.updateProgressBar(0, maxTokens); // Reset if no chat
|
|
return;
|
|
}
|
|
try {
|
|
// Call tokenize API with current history
|
|
const tokenInfo = await api.tokenizePrompt(chatHistories[currentChatId], systemPrompt, availableModel);
|
|
maxTokens = tokenInfo.max_model_len || maxTokens; // Update max tokens if provided
|
|
const currentTokens = tokenInfo.count || 0;
|
|
ui.updateProgressBar(currentTokens, maxTokens);
|
|
} catch (error) {
|
|
console.error("Failed to update token usage:", error);
|
|
// Keep previous progress bar state or reset? Resetting might be clearer.
|
|
ui.updateProgressBar(0, maxTokens);
|
|
}
|
|
} |