feat: banner generation, improved memory system, streaming text animation

- Add banner/cover generation for stories with character reference support
- Improve summary system: generate every 8 msgs or when context large
- Enhance summary prompt to preserve critical story info (promises, relationships)
- Add typewriter text animation during AI streaming
- Increase context to 20 messages, lower summary temperature to 0.1
- Server: auto-truncate long messages instead of rejecting
This commit is contained in:
Alexej Wolff
2026-05-09 06:39:10 +02:00
parent f6fffc1561
commit e8cd01c693
9 changed files with 1498 additions and 260 deletions
+49 -22
View File
@@ -198,18 +198,16 @@ function validateDeepSeekRequest(body) {
const errors = [];
const { messages, temperature, max_tokens } = body;
// Validate messages
// Validate messages - only check critical errors, truncation handled in sanitize
if (!Array.isArray(messages)) {
errors.push("messages must be an array");
} else {
if (messages.length === 0) {
errors.push("messages cannot be empty");
}
if (messages.length > DEEPSEEK_LIMITS.MAX_MESSAGES) {
errors.push(`too many messages (max ${DEEPSEEK_LIMITS.MAX_MESSAGES})`);
}
// Don't error on too many messages - we'll truncate them
// Don't error on too long messages - we'll truncate them
let totalLength = 0;
for (let i = 0; i < messages.length; i++) {
const msg = messages[i];
if (!msg || typeof msg !== "object") {
@@ -223,21 +221,8 @@ function validateDeepSeekRequest(body) {
}
if (typeof msg.content !== "string") {
errors.push(`messages[${i}].content must be a string`);
} else {
if (msg.content.length > DEEPSEEK_LIMITS.MAX_MESSAGE_LENGTH) {
errors.push(
`messages[${i}].content too long (max ${DEEPSEEK_LIMITS.MAX_MESSAGE_LENGTH} chars)`,
);
}
totalLength += msg.content.length;
}
}
if (totalLength > DEEPSEEK_LIMITS.MAX_TOTAL_LENGTH) {
errors.push(
`total message content too long (max ${DEEPSEEK_LIMITS.MAX_TOTAL_LENGTH} chars)`,
);
}
}
// Validate temperature
@@ -264,8 +249,37 @@ function validateDeepSeekRequest(body) {
return errors;
}
/**
* Truncate ONLY individual messages that are too long.
* Does NOT remove messages - that would lose context.
* If total is too long, client should handle via summary generation.
*/
function truncateMessagesToFit(messages) {
const MAX_PER_MSG = DEEPSEEK_LIMITS.MAX_MESSAGE_LENGTH;
// Only truncate individual messages that are too long - keep the END (recent context)
return messages.map((msg, idx) => {
if (msg.content && msg.content.length > MAX_PER_MSG) {
console.log(
`Truncating message ${idx} (${msg.role}) from ${msg.content.length} to ${MAX_PER_MSG} chars - keeping END`,
);
// Keep end of message (most recent context is more important)
return {
...msg,
content:
"[earlier content truncated]...\n\n" +
msg.content.slice(-MAX_PER_MSG + 50),
};
}
return msg;
});
}
function sanitizeDeepSeekMessages(messages) {
return messages.map((msg) => ({
// First truncate to fit limits
const truncatedMessages = truncateMessagesToFit(messages);
return truncatedMessages.map((msg) => ({
role: msg.role,
content: String(msg.content),
}));
@@ -1059,7 +1073,7 @@ app.get("/api/admin/stats", requireAuth, async (req, res) => {
// Прокси для генерации изображений через Grok (обход CORS)
app.post("/api/generate-image", requireAuth, async (req, res) => {
try {
const { prompt } = req.body;
const { prompt, orientation = "portrait", refHistory } = req.body;
const apiKey = process.env.GEMINIGEN_API_KEY;
if (!apiKey) {
@@ -1068,14 +1082,27 @@ app.post("/api/generate-image", requireAuth, async (req, res) => {
.json({ error: "GeminiGen API key not configured" });
}
console.log("Generating image with Grok, prompt:", prompt);
console.log(
"Generating image with Grok, prompt:",
prompt,
"orientation:",
orientation,
"refHistory:",
refHistory,
);
// Используем FormData для multipart/form-data
const formData = new FormData();
formData.append("prompt", prompt);
formData.append("orientation", "portrait"); // 9:16
formData.append("orientation", orientation); // portrait (9:16) or landscape (16:9)
formData.append("num_result", "1");
// Добавляем референс истории если есть (для консистентности персонажей)
if (refHistory) {
formData.append("ref_history", refHistory);
console.log("Using ref_history:", refHistory);
}
const response = await fetch(
"https://api.geminigen.ai/uapi/v1/imagen/grok",
{