feat: streaming AI responses with stop button

This commit is contained in:
Alexej Wolff
2026-02-11 01:30:56 +01:00
parent 715f2a9bcf
commit 161ecd661e
3 changed files with 200 additions and 15 deletions
+102
View File
@@ -82,6 +82,78 @@ export async function sendMessage(
return data.choices[0]?.message?.content || "";
}
/**
* Streaming версия sendMessage - возвращает текст по частям
*/
export async function sendMessageStream(
messages: DeepSeekMessage[],
temperature: number = 0.8,
onChunk: (chunk: string) => void,
signal?: AbortSignal,
): Promise<string> {
const apiKey = getApiKey();
if (!apiKey) {
throw new Error(
"DeepSeek API ключ не настроен. Добавьте VITE_DEEPSEEK_API_KEY в .env файл",
);
}
const response = await fetch(DEEPSEEK_API_URL, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model: "deepseek-chat",
messages,
temperature,
max_tokens: 1000,
stream: true,
}),
signal,
});
if (!response.ok) {
throw new Error(`DeepSeek API error: ${response.status}`);
}
const reader = response.body?.getReader();
if (!reader) throw new Error("No response body");
const decoder = new TextDecoder();
let fullContent = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
const lines = chunk.split("\n").filter((line) => line.trim() !== "");
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (data === "[DONE]") continue;
try {
const parsed = JSON.parse(data);
const content = parsed.choices?.[0]?.delta?.content || "";
if (content) {
fullContent += content;
onChunk(content);
}
} catch {
// Ignore parse errors
}
}
}
}
return fullContent;
}
/**
* Строит базовый системный промпт (правила стиля) - КЭШИРУЕТСЯ
*/
@@ -273,6 +345,36 @@ export async function generateStoryResponse(
return sendMessage(messages, story.temperature || 1.3);
}
/**
* Streaming версия generateStoryResponse
*/
export async function generateStoryResponseStream(
story: Story,
chatHistory: ChatMessage[],
userMessage: string,
onChunk: (chunk: string) => void,
player?: PlayerCharacter,
session?: GameSession,
signal?: AbortSignal,
): Promise<string> {
const styleRules = buildStyleRules(story, player);
const worldContext = buildWorldContext(story);
const dynamicContext = session ? buildDynamicContext(session) : "";
const recentMessages = chatHistory.slice(-RECENT_MESSAGES_COUNT);
const systemPrompt = styleRules + "\n" + worldContext + "\n" + dynamicContext;
const messages: DeepSeekMessage[] = [
{ role: "system", content: systemPrompt },
...recentMessages.map((msg) => ({
role: msg.role as "user" | "assistant",
content: msg.content,
})),
{ role: "user", content: userMessage },
];
return sendMessageStream(messages, story.temperature || 1.3, onChunk, signal);
}
/**
* Генерирует сводку истории (вызывать периодически)
*/