Major refactor: security, performance, and code organization
Security: - DeepSeek API moved to server-side proxy with rate limiting (20 req/min) - Whitelist validation for all POST/PUT routes - Cookie security (secure, sameSite, httpOnly in production) - Input validation for messages, tokens, temperature - Sanitized hasOwnProperty to prevent prototype pollution Performance: - Lazy loading for chat messages (sliding window of 20) - Streaming response throttling (50ms batches) - Scroll optimization (only scroll on new messages) - AbortController fix for stop button Code organization: - GamePage refactored from ~1170 to ~750 lines - New hooks: useGameSession, useStreamingResponse, useCharacterDetection, useLazyMessages - New components: MessageList, ChatInput, SessionSelector, CharacterPanel - Fixed ESLint errors Features: - OOC mode button for direct AI instructions - Message versions (aiResponse) now persist to DB - playerId saved in sessions
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
// DeepSeek API service for story generation
|
||||
// DeepSeek API service for story generation (via backend proxy)
|
||||
|
||||
import type {
|
||||
Story,
|
||||
@@ -7,15 +7,12 @@ import type {
|
||||
GameSession,
|
||||
} from "../types";
|
||||
|
||||
const DEEPSEEK_API_URL = "https://api.deepseek.com/v1/chat/completions";
|
||||
const API_BASE = import.meta.env.VITE_API_URL || "http://localhost:3001";
|
||||
|
||||
// Context settings
|
||||
const RECENT_MESSAGES_COUNT = 6; // Last N messages for context
|
||||
const SUMMARY_THRESHOLD = 15; // After how many messages to generate summary
|
||||
|
||||
// API key should be stored in environment variables
|
||||
const getApiKey = () => import.meta.env.VITE_DEEPSEEK_API_KEY || "";
|
||||
|
||||
interface DeepSeekMessage {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string;
|
||||
@@ -39,23 +36,13 @@ export async function sendMessage(
|
||||
messages: DeepSeekMessage[],
|
||||
temperature: number = 0.8,
|
||||
): Promise<string> {
|
||||
const apiKey = getApiKey();
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(
|
||||
"DeepSeek API key not configured. Add VITE_DEEPSEEK_API_KEY to your .env file",
|
||||
);
|
||||
}
|
||||
|
||||
const response = await fetch(DEEPSEEK_API_URL, {
|
||||
const response = await fetch(`${API_BASE}/api/deepseek/chat`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
credentials: "include",
|
||||
body: JSON.stringify({
|
||||
// model: "deepseek-chat",
|
||||
model: "deepseek-chat",
|
||||
messages,
|
||||
temperature,
|
||||
max_tokens: 1000,
|
||||
@@ -79,26 +66,16 @@ export async function sendMessageStream(
|
||||
onChunk: (chunk: string) => void,
|
||||
signal?: AbortSignal,
|
||||
): Promise<string> {
|
||||
const apiKey = getApiKey();
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(
|
||||
"DeepSeek API key not configured. Add VITE_DEEPSEEK_API_KEY to your .env file",
|
||||
);
|
||||
}
|
||||
|
||||
const response = await fetch(DEEPSEEK_API_URL, {
|
||||
const response = await fetch(`${API_BASE}/api/deepseek/chat/stream`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
credentials: "include",
|
||||
body: JSON.stringify({
|
||||
model: "deepseek-chat",
|
||||
messages,
|
||||
temperature,
|
||||
max_tokens: 1000,
|
||||
stream: true,
|
||||
}),
|
||||
signal,
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user