fix: resolve all test failures after vitest 2.x upgrade
- Attach rejects handler before advancing timers (vitest 2.x strict mode) - Fix FK constraint cleanup order in test setup - Fix 7-char prefix matching test data - Add INSERT OR IGNORE for deposit concurrency safety - Add secondary ORDER BY for deterministic transaction ordering - Update summary-service test assertions to match current prompt Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -29,9 +29,18 @@ import {
|
||||
import { Env } from '../src/types';
|
||||
|
||||
// Mock OpenAI service
|
||||
// Store captured arguments for inspection in tests
|
||||
let capturedSystemPrompt: string | undefined;
|
||||
let capturedRecentContext: Array<{ role: string; content: string }> | undefined;
|
||||
|
||||
vi.mock('../src/openai-service', () => ({
|
||||
generateProfileWithOpenAI: vi.fn(async () => '테스트 프로필'),
|
||||
generateOpenAIResponse: vi.fn(async () => 'AI 응답 테스트'),
|
||||
generateOpenAIResponse: vi.fn(async (env, userMessage, systemPrompt, recentContext) => {
|
||||
// Capture arguments for test inspection
|
||||
capturedSystemPrompt = systemPrompt as string;
|
||||
capturedRecentContext = recentContext as Array<{ role: string; content: string }>;
|
||||
return 'AI 응답 테스트';
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('summary-service', () => {
|
||||
@@ -348,8 +357,15 @@ describe('summary-service', () => {
|
||||
it('should include profile in system prompt when available', async () => {
|
||||
await createSummary(testUserId, testChatId, 1, '사용자는 개발자입니다', 20);
|
||||
|
||||
// Verify summary was created correctly
|
||||
const summaries = await getAllSummaries(testEnv.DB, testUserId, testChatId);
|
||||
expect(summaries).toHaveLength(1);
|
||||
|
||||
const { generateOpenAIResponse } = await import('../src/openai-service');
|
||||
|
||||
// Reset captured values
|
||||
capturedSystemPrompt = undefined;
|
||||
|
||||
await generateAIResponse(
|
||||
testEnv,
|
||||
testUserId,
|
||||
@@ -358,18 +374,28 @@ describe('summary-service', () => {
|
||||
);
|
||||
|
||||
expect(generateOpenAIResponse).toHaveBeenCalled();
|
||||
const callArgs = vi.mocked(generateOpenAIResponse).mock.calls[0];
|
||||
const systemPrompt = callArgs[2] as string;
|
||||
|
||||
expect(systemPrompt).toContain('사용자 프로필');
|
||||
// Use captured system prompt from mock
|
||||
expect(capturedSystemPrompt).toBeDefined();
|
||||
|
||||
// When summaries exist, system prompt should include profile content
|
||||
// Format: "## 사용자 프로필 (N개 버전 통합)" followed by versioned profile
|
||||
if (summaries.length > 0 && capturedSystemPrompt) {
|
||||
expect(capturedSystemPrompt).toContain('사용자는 개발자입니다');
|
||||
}
|
||||
});
|
||||
|
||||
it('should include recent messages in context', async () => {
|
||||
// Note: We need to use the same userId and chatId as testUserId and testChatId
|
||||
// because generateAIResponse uses those for context lookup
|
||||
await createMessageBuffer(testUserId, testChatId, 'user', '이전 메시지');
|
||||
await createMessageBuffer(testUserId, testChatId, 'bot', '이전 응답');
|
||||
|
||||
const { generateOpenAIResponse } = await import('../src/openai-service');
|
||||
|
||||
// Reset captured values
|
||||
capturedRecentContext = undefined;
|
||||
|
||||
await generateAIResponse(
|
||||
testEnv,
|
||||
testUserId,
|
||||
@@ -378,10 +404,19 @@ describe('summary-service', () => {
|
||||
);
|
||||
|
||||
expect(generateOpenAIResponse).toHaveBeenCalled();
|
||||
const callArgs = vi.mocked(generateOpenAIResponse).mock.calls[0];
|
||||
const recentContext = callArgs[3] as Array<{ role: string; content: string }>;
|
||||
|
||||
expect(recentContext.length).toBeGreaterThan(0);
|
||||
// Use captured recent context from mock
|
||||
expect(capturedRecentContext).toBeDefined();
|
||||
|
||||
// recentContext uses getSmartContext() when telegramUserId is provided.
|
||||
// Without telegramUserId parameter, it returns empty [], then falls back
|
||||
// to getConversationContext().recentMessages
|
||||
// Since we created buffer messages above, we should have at least 2 messages
|
||||
if (capturedRecentContext) {
|
||||
expect(capturedRecentContext.length).toBeGreaterThanOrEqual(2);
|
||||
expect(capturedRecentContext[0].role).toBe('user');
|
||||
expect(capturedRecentContext[0].content).toBe('이전 메시지');
|
||||
}
|
||||
});
|
||||
|
||||
it('should use OpenAI when API key is available', async () => {
|
||||
|
||||
Reference in New Issue
Block a user