Path: blob/main/extensions/copilot/src/platform/endpoint/node/test/responsesApi.spec.ts
13405 views
/*---------------------------------------------------------------------------------------------1* Copyright (c) Microsoft Corporation. All rights reserved.2* Licensed under the MIT License. See License.txt in the project root for license information.3*--------------------------------------------------------------------------------------------*/45import { Raw } from '@vscode/prompt-tsx';6import type { OpenAI } from 'openai';7import { describe, expect, it } from 'vitest';8import { TokenizerType } from '../../../../util/common/tokenizer';9import { IInstantiationService } from '../../../../util/vs/platform/instantiation/common/instantiation';10import { ChatLocation } from '../../../chat/common/commonTypes';11import { ConfigKey, IConfigurationService } from '../../../configuration/common/configurationService';12import { InMemoryConfigurationService } from '../../../configuration/test/common/inMemoryConfigurationService';13import { ILogService } from '../../../log/common/logService';14import { isOpenAIContextManagementResponse } from '../../../networking/common/fetch';15import { IChatEndpoint, ICreateEndpointBodyOptions } from '../../../networking/common/networking';16import { openAIContextManagementCompactionType, OpenAIContextManagementResponse } from '../../../networking/common/openai';17import { IToolDeferralService } from '../../../networking/common/toolDeferralService';18import { IChatWebSocketManager, NullChatWebSocketManager } from '../../../networking/node/chatWebSocketManager';19import { TelemetryData } from '../../../telemetry/common/telemetryData';20import { SpyingTelemetryService } from '../../../telemetry/node/spyingTelemetryService';21import { createFakeStreamResponse } from '../../../test/node/fetcher';22import { createPlatformServices } from '../../../test/node/services';23import { CustomDataPartMimeTypes } from '../../common/endpointTypes';24import { createResponsesRequestBody, getResponsesApiCompactionThresholdFromBody, processResponseFromChatEndpoint, responseApiInputToRawMessagesForLogging } from '../responsesApi';2526const testEndpoint: IChatEndpoint = {27urlOrRequestMetadata: 'https://example.test/chat',28modelMaxPromptTokens: 128000,29name: 'Test Endpoint',30version: '1',31family: 'gpt-5-mini',32tokenizer: TokenizerType.O200K,33maxOutputTokens: 4096,34model: 'gpt-5-mini',35modelProvider: 'openai',36supportsToolCalls: true,37supportsVision: true,38supportsPrediction: true,39showInModelPicker: true,40isFallback: false,41acquireTokenizer() {42throw new Error('Not implemented in test');43},44async processResponseFromChatEndpoint() {45throw new Error('Not implemented in test');46},47async makeChatRequest() {48throw new Error('Not implemented in test');49},50async makeChatRequest2() {51throw new Error('Not implemented in test');52},53createRequestBody() {54throw new Error('Not implemented in test');55},56cloneWithTokenOverride() {57return this;58}59};6061const createRequestOptions = (messages: Raw.ChatMessage[], useWebSocket: boolean): ICreateEndpointBodyOptions => ({62debugName: 'test',63messages,64requestId: 'req-1',65postOptions: {},66finishedCb: undefined,67location: undefined as any,68useWebSocket,69});7071const createStatefulMarkerMessage = (modelId: string, marker: string): Raw.ChatMessage => ({72role: Raw.ChatRole.Assistant,73content: [{74type: Raw.ChatCompletionContentPartKind.Opaque,75value: {76type: CustomDataPartMimeTypes.StatefulMarker,77value: {78modelId,79marker,80}81}82}]83});8485const createCompactionResponse = (id: string, encrypted_content: string): OpenAIContextManagementResponse => ({86type: openAIContextManagementCompactionType,87id,88encrypted_content,89});9091const createCompactionAssistantMessage = (compaction: OpenAIContextManagementResponse): Raw.ChatMessage => ({92role: Raw.ChatRole.Assistant,93content: [{94type: Raw.ChatCompletionContentPartKind.Opaque,95value: {96type: CustomDataPartMimeTypes.ContextManagement,97compaction,98}99}]100});101102type ResponseFunctionCallInputItem = OpenAI.Responses.ResponseInputItem & {103type: 'function_call';104name: string;105namespace?: string;106};107108function isFunctionCallInputItem(item: OpenAI.Responses.ResponseInputItem, name: string): item is ResponseFunctionCallInputItem {109return item.type === 'function_call' && 'name' in item && item.name === name;110}111112describe('responseApiInputToRawMessagesForLogging', () => {113114it('converts simple string input to user message', () => {115const body: OpenAI.Responses.ResponseCreateParams = {116model: 'gpt-5-mini',117input: 'Hello, world!'118};119120const result = responseApiInputToRawMessagesForLogging(body);121122expect(result).toHaveLength(1);123expect(result[0].role).toBe(Raw.ChatRole.User);124expect(result[0].content).toEqual([125{ type: Raw.ChatCompletionContentPartKind.Text, text: 'Hello, world!' }126]);127});128129it('includes system instructions when provided', () => {130const body: OpenAI.Responses.ResponseCreateParams = {131model: 'gpt-5-mini',132input: 'Hello',133instructions: 'You are a helpful assistant'134};135136const result = responseApiInputToRawMessagesForLogging(body);137138expect(result).toHaveLength(2);139expect(result[0].role).toBe(Raw.ChatRole.System);140expect(result[0].content).toEqual([141{ type: Raw.ChatCompletionContentPartKind.Text, text: 'You are a helpful assistant' }142]);143expect(result[1].role).toBe(Raw.ChatRole.User);144});145146it('converts user message with input_text content', () => {147const body: OpenAI.Responses.ResponseCreateParams = {148model: 'gpt-5-mini',149input: [150{151role: 'user',152content: [{ type: 'input_text', text: 'What is the weather?' }]153}154]155};156157const result = responseApiInputToRawMessagesForLogging(body);158159expect(result).toHaveLength(1);160expect(result[0].role).toBe(Raw.ChatRole.User);161expect(result[0].content).toEqual([162{ type: Raw.ChatCompletionContentPartKind.Text, text: 'What is the weather?' }163]);164});165166it('converts system/developer messages to system role', () => {167const body: OpenAI.Responses.ResponseCreateParams = {168model: 'gpt-5-mini',169input: [170{171role: 'developer',172content: 'Be concise'173}174]175};176177const result = responseApiInputToRawMessagesForLogging(body);178179expect(result).toHaveLength(1);180expect(result[0].role).toBe(Raw.ChatRole.System);181});182183it('converts function_call items to assistant tool calls', () => {184const body: OpenAI.Responses.ResponseCreateParams = {185model: 'gpt-5-mini',186input: [187{188type: 'function_call',189call_id: 'call_123',190name: 'get_weather',191arguments: '{"location": "Seattle"}'192}193]194};195196const result = responseApiInputToRawMessagesForLogging(body);197198expect(result).toHaveLength(1);199expect(result[0].role).toBe(Raw.ChatRole.Assistant);200const assistantMsg = result[0] as Raw.AssistantChatMessage;201expect(assistantMsg.toolCalls).toHaveLength(1);202expect(assistantMsg.toolCalls![0]).toEqual({203id: 'call_123',204type: 'function',205function: {206name: 'get_weather',207arguments: '{"location": "Seattle"}'208}209});210});211212it('converts function_call_output items to tool messages', () => {213const body: OpenAI.Responses.ResponseCreateParams = {214model: 'gpt-5-mini',215input: [216{217type: 'function_call_output',218call_id: 'call_123',219output: 'Sunny, 72°F'220}221]222};223224const result = responseApiInputToRawMessagesForLogging(body);225226expect(result).toHaveLength(1);227expect(result[0].role).toBe(Raw.ChatRole.Tool);228const toolMsg = result[0] as Raw.ToolChatMessage;229expect(toolMsg.toolCallId).toBe('call_123');230expect(toolMsg.content).toEqual([231{ type: Raw.ChatCompletionContentPartKind.Text, text: 'Sunny, 72°F' }232]);233});234235it('handles mixed conversation with multiple message types', () => {236const body: OpenAI.Responses.ResponseCreateParams = {237model: 'gpt-5-mini',238instructions: 'You are a weather assistant',239input: [240{241role: 'user',242content: 'What is the weather in Seattle?'243},244{245type: 'function_call',246call_id: 'call_456',247name: 'get_weather',248arguments: '{"location": "Seattle"}'249},250{251type: 'function_call_output',252call_id: 'call_456',253output: 'Rainy, 55°F'254},255{256role: 'user',257content: 'Thanks!'258}259]260};261262const result = responseApiInputToRawMessagesForLogging(body);263264expect(result).toHaveLength(5);265expect(result[0].role).toBe(Raw.ChatRole.System); // instructions266expect(result[1].role).toBe(Raw.ChatRole.User); // first user message267expect(result[2].role).toBe(Raw.ChatRole.Assistant); // function call268expect((result[2] as Raw.AssistantChatMessage).toolCalls).toHaveLength(1);269expect(result[3].role).toBe(Raw.ChatRole.Tool); // function output270expect(result[4].role).toBe(Raw.ChatRole.User); // thanks message271});272273it('returns empty array for undefined input', () => {274const body: OpenAI.Responses.ResponseCreateParams = {275model: 'gpt-5-mini',276input: undefined as any277};278279const result = responseApiInputToRawMessagesForLogging(body);280281expect(result).toHaveLength(0);282});283284it('groups consecutive function calls into single assistant message', () => {285const body: OpenAI.Responses.ResponseCreateParams = {286model: 'gpt-5-mini',287input: [288{289type: 'function_call',290call_id: 'call_1',291name: 'tool_a',292arguments: '{}'293},294{295type: 'function_call',296call_id: 'call_2',297name: 'tool_b',298arguments: '{}'299}300]301};302303const result = responseApiInputToRawMessagesForLogging(body);304305// Two consecutive function calls should be grouped into one assistant message306expect(result).toHaveLength(1);307expect(result[0].role).toBe(Raw.ChatRole.Assistant);308expect((result[0] as Raw.AssistantChatMessage).toolCalls).toHaveLength(2);309});310311it('converts tool_search_call and tool_search_output items to raw messages', () => {312const body: OpenAI.Responses.ResponseCreateParams = {313model: 'gpt-5-mini',314input: [315{316type: 'tool_search_call',317execution: 'client',318call_id: 'ts_call_1',319status: 'completed',320arguments: { query: 'file editing tools' },321} as unknown as OpenAI.Responses.ResponseInputItem,322{323type: 'tool_search_output',324execution: 'client',325call_id: 'ts_call_1',326status: 'completed',327tools: [328{ type: 'function', name: 'grep_search', description: 'Search files', defer_loading: true, parameters: {} },329{ type: 'function', name: 'file_search', description: 'Find files', defer_loading: true, parameters: {} },330],331} as unknown as OpenAI.Responses.ResponseInputItem332]333};334335const result = responseApiInputToRawMessagesForLogging(body);336337expect(result).toEqual([338{339role: Raw.ChatRole.Assistant,340content: [],341toolCalls: [{342id: 'ts_call_1',343type: 'function',344function: {345name: 'tool_search',346arguments: '{"query":"file editing tools"}',347}348}]349},350{351role: Raw.ChatRole.Tool,352content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: '["grep_search","file_search"]' }],353toolCallId: 'ts_call_1',354}355]);356});357});358359describe('createResponsesRequestBody', () => {360it('extracts compaction threshold from request body context management', () => {361expect(getResponsesApiCompactionThresholdFromBody({362context_management: [{363type: openAIContextManagementCompactionType,364compact_threshold: 1234,365}]366})).toBe(1234);367});368369it('still slices websocket requests by stateful marker index when compaction is disabled', () => {370const services = createPlatformServices();371const wsManager = new NullChatWebSocketManager();372wsManager.getStatefulMarker = () => 'resp-prev';373services.set(IChatWebSocketManager, wsManager);374const accessor = services.createTestingAccessor();375const instantiationService = accessor.get(IInstantiationService);376const endpointWithoutCompaction = { ...testEndpoint, family: 'gpt-5' as const };377const messages: Raw.ChatMessage[] = [378{379role: Raw.ChatRole.User,380content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'before marker' }],381},382createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),383{384role: Raw.ChatRole.User,385content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }],386},387];388389const webSocketBody = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, { ...createRequestOptions(messages, true), conversationId: 'conv-1' }, endpointWithoutCompaction.model, endpointWithoutCompaction));390391expect(webSocketBody.previous_response_id).toBe('resp-prev');392expect(webSocketBody.input).toHaveLength(1);393expect(webSocketBody.input?.[0]).toMatchObject({394role: 'user',395content: [{ type: 'input_text', text: 'after marker' }],396});397398accessor.dispose();399services.dispose();400});401402it('includes the newest compaction item in websocket requests when it predates the stateful marker', () => {403const services = createPlatformServices();404const wsManager = new NullChatWebSocketManager();405wsManager.getStatefulMarker = () => 'resp-prev';406services.set(IChatWebSocketManager, wsManager);407const accessor = services.createTestingAccessor();408const instantiationService = accessor.get(IInstantiationService);409const latestCompaction = createCompactionResponse('cmp_ws', 'enc_ws');410const messages: Raw.ChatMessage[] = [411{412role: Raw.ChatRole.User,413content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'before compaction' }],414},415createCompactionAssistantMessage(latestCompaction),416createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),417{418role: Raw.ChatRole.User,419content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }],420},421];422423const webSocketBody = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, { ...createRequestOptions(messages, true), conversationId: 'conv-1' }, testEndpoint.model, testEndpoint));424425expect(webSocketBody.previous_response_id).toBe('resp-prev');426expect(webSocketBody.input).toContainEqual({427type: openAIContextManagementCompactionType,428id: 'cmp_ws',429encrypted_content: 'enc_ws',430});431expect(webSocketBody.input).toContainEqual({432role: 'user',433content: [{ type: 'input_text', text: 'after marker' }],434});435436accessor.dispose();437services.dispose();438});439440it('sends all messages when the websocket stateful marker is not in the current messages', () => {441const services = createPlatformServices();442const wsManager = new NullChatWebSocketManager();443wsManager.getStatefulMarker = () => 'resp-stale';444services.set(IChatWebSocketManager, wsManager);445const accessor = services.createTestingAccessor();446const instantiationService = accessor.get(IInstantiationService);447const messages: Raw.ChatMessage[] = [448{449role: Raw.ChatRole.User,450content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'first message' }],451},452createStatefulMarkerMessage(testEndpoint.model, 'resp-different'),453{454role: Raw.ChatRole.User,455content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'second message' }],456},457];458459const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, { ...createRequestOptions(messages, true), conversationId: 'conv-1' }, testEndpoint.model, testEndpoint));460461expect(body.previous_response_id).toBeUndefined();462expect(body.input).toHaveLength(2);463expect(body.input?.[0]).toMatchObject({464role: 'user',465content: [{ type: 'input_text', text: 'first message' }],466});467expect(body.input?.[1]).toMatchObject({468role: 'user',469content: [{ type: 'input_text', text: 'second message' }],470});471472accessor.dispose();473services.dispose();474});475476it('does not reuse a websocket stateful marker when modeChanged is true', () => {477const services = createPlatformServices();478const wsManager = new NullChatWebSocketManager();479wsManager.getStatefulMarker = () => 'resp-prev';480services.set(IChatWebSocketManager, wsManager);481const accessor = services.createTestingAccessor();482const instantiationService = accessor.get(IInstantiationService);483const messages: Raw.ChatMessage[] = [484{485role: Raw.ChatRole.User,486content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'before marker' }],487},488createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),489{490role: Raw.ChatRole.User,491content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }],492},493];494495const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, { ...createRequestOptions(messages, true), conversationId: 'conv-1', modeChanged: true }, testEndpoint.model, testEndpoint));496497expect(body.previous_response_id).toBeUndefined();498expect(body.input).toHaveLength(2);499expect(body.input?.[0]).toMatchObject({500role: 'user',501content: [{ type: 'input_text', text: 'before marker' }],502});503expect(body.input?.[1]).toMatchObject({504role: 'user',505content: [{ type: 'input_text', text: 'after marker' }],506});507508accessor.dispose();509services.dispose();510});511512it('reuses the newly established websocket marker on follow-up requests after switching into plan mode', () => {513const services = createPlatformServices();514const wsManager = new NullChatWebSocketManager();515wsManager.getStatefulMarker = () => 'resp-plan-1';516services.set(IChatWebSocketManager, wsManager);517const accessor = services.createTestingAccessor();518const instantiationService = accessor.get(IInstantiationService);519const websocketEndpoint = { ...testEndpoint, family: 'gpt-5.5', model: 'gpt-5.5' as const };520const messages: Raw.ChatMessage[] = [521{522role: Raw.ChatRole.User,523content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'implementation context before switching modes' }],524},525createStatefulMarkerMessage(websocketEndpoint.model, 'resp-agent-1'),526{527role: Raw.ChatRole.User,528content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'switch to plan mode' }],529},530createStatefulMarkerMessage(websocketEndpoint.model, 'resp-plan-1'),531{532role: Raw.ChatRole.User,533content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'plan follow up' }],534},535];536537const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(538servicesAccessor,539{ ...createRequestOptions(messages, true), conversationId: 'conv-plan-1' },540websocketEndpoint.model,541websocketEndpoint,542));543544expect(body.previous_response_id).toBe('resp-plan-1');545expect(body.input).toHaveLength(1);546expect(body.input?.[0]).toMatchObject({547role: 'user',548content: [{ type: 'input_text', text: 'plan follow up' }],549});550551accessor.dispose();552services.dispose();553});554555it('treats websocket requests from agent to plan and back to implementation as separate mode changes', () => {556const services = createPlatformServices();557const wsManager = new NullChatWebSocketManager();558services.set(IChatWebSocketManager, wsManager);559const accessor = services.createTestingAccessor();560const instantiationService = accessor.get(IInstantiationService);561const websocketEndpoint = { ...testEndpoint, family: 'gpt-5.4', model: 'gpt-5.4' as const };562563wsManager.getStatefulMarker = () => 'resp-agent-1';564const planMessages: Raw.ChatMessage[] = [565{566role: Raw.ChatRole.User,567content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'agent context before switching to plan' }],568},569createStatefulMarkerMessage(websocketEndpoint.model, 'resp-agent-1'),570{571role: Raw.ChatRole.User,572content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'plan this change' }],573},574];575576const planBody = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(577servicesAccessor,578{ ...createRequestOptions(planMessages, true), conversationId: 'conv-mode-change', modeChanged: true },579websocketEndpoint.model,580websocketEndpoint,581));582583expect(planBody.previous_response_id).toBeUndefined();584expect(planBody.input).toHaveLength(2);585expect(planBody.input?.[0]).toMatchObject({586role: 'user',587content: [{ type: 'input_text', text: 'agent context before switching to plan' }],588});589expect(planBody.input?.[1]).toMatchObject({590role: 'user',591content: [{ type: 'input_text', text: 'plan this change' }],592});593594wsManager.getStatefulMarker = () => 'resp-plan-1';595const implementationMessages: Raw.ChatMessage[] = [596{597role: Raw.ChatRole.User,598content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'plan context before switching back to implementation' }],599},600createStatefulMarkerMessage(websocketEndpoint.model, 'resp-plan-1'),601{602role: Raw.ChatRole.User,603content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'start implementation' }],604},605];606607const implementationBody = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(608servicesAccessor,609{ ...createRequestOptions(implementationMessages, true), conversationId: 'conv-mode-change', modeChanged: true },610websocketEndpoint.model,611websocketEndpoint,612));613614expect(implementationBody.previous_response_id).toBeUndefined();615expect(implementationBody.input).toHaveLength(2);616expect(implementationBody.input?.[0]).toMatchObject({617role: 'user',618content: [{ type: 'input_text', text: 'plan context before switching back to implementation' }],619});620expect(implementationBody.input?.[1]).toMatchObject({621role: 'user',622content: [{ type: 'input_text', text: 'start implementation' }],623});624625accessor.dispose();626services.dispose();627});628629it('includes the newest compaction item in non-websocket requests when it predates the stateful marker', () => {630const services = createPlatformServices();631const accessor = services.createTestingAccessor();632const instantiationService = accessor.get(IInstantiationService);633const latestCompaction = createCompactionResponse('cmp_http', 'enc_http');634const messages: Raw.ChatMessage[] = [635{636role: Raw.ChatRole.User,637content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'before compaction' }],638},639createCompactionAssistantMessage(latestCompaction),640createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),641{642role: Raw.ChatRole.User,643content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }],644},645];646647const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, createRequestOptions(messages, false), testEndpoint.model, testEndpoint));648649expect(body.previous_response_id).toBe('resp-prev');650expect(body.input).toContainEqual({651type: openAIContextManagementCompactionType,652id: 'cmp_http',653encrypted_content: 'enc_http',654});655expect(body.input).toContainEqual({656role: 'user',657content: [{ type: 'input_text', text: 'after marker' }],658});659660accessor.dispose();661services.dispose();662});663664it('does not reuse an HTTP stateful marker when modeChanged is true', () => {665const services = createPlatformServices();666const accessor = services.createTestingAccessor();667const instantiationService = accessor.get(IInstantiationService);668const messages: Raw.ChatMessage[] = [669{670role: Raw.ChatRole.User,671content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'before marker' }],672},673createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),674{675role: Raw.ChatRole.User,676content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }],677},678];679680const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, { ...createRequestOptions(messages, false), modeChanged: true }, testEndpoint.model, testEndpoint));681682expect(body.previous_response_id).toBeUndefined();683expect(body.input).toHaveLength(2);684expect(body.input?.[0]).toMatchObject({685role: 'user',686content: [{ type: 'input_text', text: 'before marker' }],687});688expect(body.input?.[1]).toMatchObject({689role: 'user',690content: [{ type: 'input_text', text: 'after marker' }],691});692693accessor.dispose();694services.dispose();695});696697it('round-trips the newest stored compaction item', () => {698const services = createPlatformServices();699const accessor = services.createTestingAccessor();700const instantiationService = accessor.get(IInstantiationService);701const latestCompaction = createCompactionResponse('cmp_new', 'enc_new');702const messages: Raw.ChatMessage[] = [703{704role: Raw.ChatRole.User,705content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'earlier turn' }],706},707createCompactionAssistantMessage(latestCompaction),708{709role: Raw.ChatRole.User,710content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'follow up' }],711},712];713714const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, createRequestOptions(messages, false), testEndpoint.model, testEndpoint));715716expect(body.input).toContainEqual({717type: openAIContextManagementCompactionType,718id: 'cmp_new',719encrypted_content: 'enc_new',720});721722accessor.dispose();723services.dispose();724});725726it('sends assistant messages with output content and without a fake output message id', () => {727const services = createPlatformServices();728const accessor = services.createTestingAccessor();729const instantiationService = accessor.get(IInstantiationService);730const messages: Raw.ChatMessage[] = [731{732role: Raw.ChatRole.Assistant,733content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'previous answer' }],734},735];736737const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, createRequestOptions(messages, false), testEndpoint.model, testEndpoint));738739expect(body.input?.[0]).toMatchObject({740role: 'assistant',741content: [{ type: 'output_text', text: 'previous answer' }],742type: 'message',743});744expect(body.input?.[0]).not.toHaveProperty('id');745expect(body.input?.[0]).not.toHaveProperty('status');746747accessor.dispose();748services.dispose();749});750751it('does not send whitespace-only assistant messages', () => {752const services = createPlatformServices();753const accessor = services.createTestingAccessor();754const instantiationService = accessor.get(IInstantiationService);755const messages: Raw.ChatMessage[] = [756{757role: Raw.ChatRole.Assistant,758content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: ' \n\t' }],759},760];761762const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, createRequestOptions(messages, false), testEndpoint.model, testEndpoint));763764expect(body.input).toHaveLength(0);765766accessor.dispose();767services.dispose();768});769770it('adds namespace field only to function_call for tools loaded via tool_search_output', () => {771const services = createPlatformServices();772services.define(IToolDeferralService, { _serviceBrand: undefined, isNonDeferredTool: (name: string) => name === 'read_file' || name === 'tool_search' });773const accessor = services.createTestingAccessor();774const instantiationService = accessor.get(IInstantiationService);775const configService = accessor.get(IConfigurationService) as InMemoryConfigurationService;776configService.setConfig(ConfigKey.ResponsesApiToolSearchEnabled, true);777const endpoint = { ...testEndpoint, model: 'gpt-5.4', family: 'gpt-5.4' };778const tools = [779{ type: 'function' as const, function: { name: 'tool_search', description: 'Search tools', parameters: {} } },780{ type: 'function' as const, function: { name: 'some_mcp_tool', description: 'MCP tool', parameters: {} } },781{ type: 'function' as const, function: { name: 'read_file', description: 'Read a file', parameters: {} } },782];783const messages: Raw.ChatMessage[] = [784{785role: Raw.ChatRole.User,786content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'find something' }],787},788// Assistant calls tool_search789{790role: Raw.ChatRole.Assistant,791content: [],792toolCalls: [{ id: 'ts_1', type: 'function', function: { name: 'tool_search', arguments: '{"query":"search"}' } }],793},794// tool_search returns some_mcp_tool795{796role: Raw.ChatRole.Tool,797content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: '["some_mcp_tool"]' }],798toolCallId: 'ts_1',799},800// Assistant calls some_mcp_tool (loaded via tool_search) and read_file (not loaded via tool_search)801{802role: Raw.ChatRole.Assistant,803content: [],804toolCalls: [805{ id: 'call_mcp', type: 'function', function: { name: 'some_mcp_tool', arguments: '{"q":"hello"}' } },806{ id: 'call_read', type: 'function', function: { name: 'read_file', arguments: '{"path":"foo.ts"}' } },807],808},809];810811const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, { ...createRequestOptions(messages, false), location: ChatLocation.Agent, requestOptions: { tools } }, endpoint.model, endpoint));812813const input = body.input as OpenAI.Responses.ResponseInputItem[];814815// some_mcp_tool was loaded via tool_search_output — should have namespace816const mcpCall = input.find(item => isFunctionCallInputItem(item, 'some_mcp_tool'));817expect(mcpCall).toBeDefined();818expect(mcpCall?.namespace).toBe('some_mcp_tool');819820// read_file was NOT loaded via tool_search — should NOT have namespace821const readCall = input.find(item => isFunctionCallInputItem(item, 'read_file'));822expect(readCall).toBeDefined();823expect(readCall).not.toHaveProperty('namespace');824825accessor.dispose();826services.dispose();827});828});829830describe('processResponseFromChatEndpoint telemetry', () => {831it('emits engine.messages for Responses API assistant output', async () => {832const services = createPlatformServices();833const accessor = services.createTestingAccessor();834const instantiationService = accessor.get(IInstantiationService);835const logService = accessor.get(ILogService);836const telemetryService = new SpyingTelemetryService();837838const completedEvent = {839type: 'response.completed',840response: {841id: 'resp_123',842model: 'gpt-5-mini',843created_at: 123,844usage: {845input_tokens: 11,846output_tokens: 7,847total_tokens: 18,848input_tokens_details: { cached_tokens: 0 },849output_tokens_details: { reasoning_tokens: 0 },850},851output: [852{853type: 'message',854content: [{ type: 'output_text', text: 'final assistant reply' }],855}856],857}858};859860const response = createFakeStreamResponse(`data: ${JSON.stringify(completedEvent)}\n\n`);861const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-1' }, {});862863const stream = await processResponseFromChatEndpoint(864instantiationService,865telemetryService,866logService,867response,8681,869async () => undefined,870telemetryData871);872873for await (const _ of stream) {874// consume all completions to flush telemetry side effects875}876877const events = telemetryService.getEvents().telemetryServiceEvents.filter(e => e.eventName === 'engine.messages');878expect(events.length).toBeGreaterThan(0);879880const outputEvent = events[events.length - 1];881const messagesJson = JSON.parse(String((outputEvent.properties as Record<string, string>)?.messagesJson));882expect(messagesJson).toHaveLength(1);883expect(messagesJson[0].role).toBe('assistant');884expect(messagesJson[0].content).toBe('final assistant reply');885886accessor.dispose();887services.dispose();888});889890it('reconciles the newest compaction item from response.completed for the next request', async () => {891const services = createPlatformServices();892const accessor = services.createTestingAccessor();893const instantiationService = accessor.get(IInstantiationService);894const logService = accessor.get(ILogService);895const telemetryService = new SpyingTelemetryService();896const streamedCompactions: OpenAIContextManagementResponse[] = [];897898const olderCompaction = createCompactionResponse('cmp_old', 'enc_old');899const newerCompaction = createCompactionResponse('cmp_new', 'enc_new');900const compactionAddedEvent = {901type: 'response.output_item.added',902output_index: 0,903item: olderCompaction,904};905const compactionEvent = {906type: 'response.output_item.done',907output_index: 0,908item: olderCompaction,909};910const completedEvent = {911type: 'response.completed',912response: {913id: 'resp_latest_compaction',914model: 'gpt-5-mini',915created_at: 123,916usage: {917input_tokens: 1200,918output_tokens: 9,919total_tokens: 1209,920input_tokens_details: { cached_tokens: 0 },921output_tokens_details: { reasoning_tokens: 0 },922},923output: [924olderCompaction,925{926type: 'message',927content: [{ type: 'output_text', text: 'reply' }],928},929newerCompaction,930],931}932};933934const response = createFakeStreamResponse(`data: ${JSON.stringify(compactionAddedEvent)}\n\ndata: ${JSON.stringify(compactionEvent)}\n\ndata: ${JSON.stringify(completedEvent)}\n\n`);935const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-latest-compaction' }, {});936937const stream = await processResponseFromChatEndpoint(938instantiationService,939telemetryService,940logService,941response,9421,943async (_text, _unused, delta) => {944if (delta.contextManagement && isOpenAIContextManagementResponse(delta.contextManagement)) {945streamedCompactions.push(delta.contextManagement);946}947return undefined;948},949telemetryData,9501000951);952953for await (const _ of stream) {954// consume stream955}956957expect(streamedCompactions.map(item => item.id)).toEqual(['cmp_old', 'cmp_new']);958959const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, createRequestOptions([960createCompactionAssistantMessage(streamedCompactions[streamedCompactions.length - 1]),961{962role: Raw.ChatRole.User,963content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'continue' }],964},965], false), testEndpoint.model, testEndpoint));966967expect(body.input).toContainEqual({968type: openAIContextManagementCompactionType,969id: 'cmp_new',970encrypted_content: 'enc_new',971});972expect(body.input).not.toContainEqual({973type: openAIContextManagementCompactionType,974id: 'cmp_old',975encrypted_content: 'enc_old',976});977978accessor.dispose();979services.dispose();980});981982it('does not emit compaction telemetry when compaction is disabled', async () => {983const services = createPlatformServices();984const accessor = services.createTestingAccessor();985const instantiationService = accessor.get(IInstantiationService);986const logService = accessor.get(ILogService);987const telemetryService = new SpyingTelemetryService();988989const compactionEvent = {990type: 'response.output_item.done',991output_index: 0,992item: {993type: openAIContextManagementCompactionType,994id: 'cmp_disabled',995encrypted_content: 'enc',996}997};998const completedEvent = {999type: 'response.completed',1000response: {1001id: 'resp_disabled',1002model: 'gpt-5-mini',1003created_at: 123,1004usage: {1005input_tokens: 1500,1006output_tokens: 9,1007total_tokens: 1509,1008input_tokens_details: { cached_tokens: 0 },1009output_tokens_details: { reasoning_tokens: 0 },1010},1011output: []1012}1013};10141015const response = createFakeStreamResponse(`data: ${JSON.stringify(compactionEvent)}\n\ndata: ${JSON.stringify(completedEvent)}\n\n`);1016const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-4' }, {});10171018const stream = await processResponseFromChatEndpoint(1019instantiationService,1020telemetryService,1021logService,1022response,10231,1024async () => undefined,1025telemetryData,1026undefined1027);10281029for await (const _ of stream) {1030// consume stream1031}10321033const event = telemetryService.getEvents().telemetryServiceEvents.find(e => e.eventName === 'responsesApi.compactionOutcome');1034expect(event).toBeUndefined();10351036accessor.dispose();1037services.dispose();1038});10391040it('captures compaction returned before output_item.done for the next request', async () => {1041const services = createPlatformServices();1042const accessor = services.createTestingAccessor();1043const instantiationService = accessor.get(IInstantiationService);1044const logService = accessor.get(ILogService);1045const telemetryService = new SpyingTelemetryService();1046const streamedCompactions: OpenAIContextManagementResponse[] = [];10471048const earlyCompaction = createCompactionResponse('cmp_early', 'enc_early');1049const compactionAddedEvent = {1050type: 'response.output_item.added',1051output_index: 0,1052item: earlyCompaction,1053};1054const completedEvent = {1055type: 'response.completed',1056response: {1057id: 'resp_early_compaction',1058model: 'gpt-5-mini',1059created_at: 123,1060usage: {1061input_tokens: 1200,1062output_tokens: 9,1063total_tokens: 1209,1064input_tokens_details: { cached_tokens: 0 },1065output_tokens_details: { reasoning_tokens: 0 },1066},1067output: [1068{1069type: 'message',1070content: [{ type: 'output_text', text: 'reply' }],1071},1072],1073}1074};10751076const response = createFakeStreamResponse(`data: ${JSON.stringify(compactionAddedEvent)}\n\ndata: ${JSON.stringify(completedEvent)}\n\n`);1077const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-early-compaction' }, {});10781079const stream = await processResponseFromChatEndpoint(1080instantiationService,1081telemetryService,1082logService,1083response,10841,1085async (_text, _unused, delta) => {1086if (delta.contextManagement && isOpenAIContextManagementResponse(delta.contextManagement)) {1087streamedCompactions.push(delta.contextManagement);1088}1089return undefined;1090},1091telemetryData,109210001093);10941095for await (const _ of stream) {1096// consume stream1097}10981099expect(streamedCompactions.map(item => item.id)).toEqual(['cmp_early']);11001101const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(servicesAccessor, createRequestOptions([1102createCompactionAssistantMessage(streamedCompactions[streamedCompactions.length - 1]),1103{1104role: Raw.ChatRole.User,1105content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'continue' }],1106},1107], false), testEndpoint.model, testEndpoint));11081109expect(body.input).toContainEqual({1110type: openAIContextManagementCompactionType,1111id: 'cmp_early',1112encrypted_content: 'enc_early',1113});11141115accessor.dispose();1116services.dispose();1117});11181119it('emits telemetry when the server returns a compaction item', async () => {1120const services = createPlatformServices();1121const accessor = services.createTestingAccessor();1122const instantiationService = accessor.get(IInstantiationService);1123const logService = accessor.get(ILogService);1124const telemetryService = new SpyingTelemetryService();11251126const compactionEvent = {1127type: 'response.output_item.done',1128output_index: 0,1129item: {1130type: openAIContextManagementCompactionType,1131id: 'cmp_123',1132encrypted_content: 'enc',1133}1134};1135const completedEvent = {1136type: 'response.completed',1137response: {1138id: 'resp_456',1139model: 'gpt-5-mini',1140created_at: 123,1141usage: {1142input_tokens: 1200,1143output_tokens: 7,1144total_tokens: 1207,1145input_tokens_details: { cached_tokens: 0 },1146output_tokens_details: { reasoning_tokens: 0 },1147},1148output: []1149}1150};11511152const response = createFakeStreamResponse(`data: ${JSON.stringify(compactionEvent)}\n\ndata: ${JSON.stringify(completedEvent)}\n\n`);1153const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-2' }, {});11541155const stream = await processResponseFromChatEndpoint(1156instantiationService,1157telemetryService,1158logService,1159response,11601,1161async () => undefined,1162telemetryData,116310001164);11651166for await (const _ of stream) {1167// consume stream1168}11691170const event = telemetryService.getEvents().telemetryServiceEvents.find(e => e.eventName === 'responsesApi.compactionOutcome');1171expect(event).toBeDefined();1172expect(event?.properties).toMatchObject({1173outcome: 'compaction_returned',1174model: 'gpt-5-mini',1175});1176expect(event?.measurements).toMatchObject({1177compactThreshold: 1000,1178promptTokens: 1200,1179totalTokens: 1207,1180});11811182accessor.dispose();1183services.dispose();1184});11851186it('emits telemetry when the server exceeds threshold without returning a compaction item', async () => {1187const services = createPlatformServices();1188const accessor = services.createTestingAccessor();1189const instantiationService = accessor.get(IInstantiationService);1190const logService = accessor.get(ILogService);1191const telemetryService = new SpyingTelemetryService();11921193const completedEvent = {1194type: 'response.completed',1195response: {1196id: 'resp_789',1197model: 'gpt-5-mini',1198created_at: 123,1199usage: {1200input_tokens: 1500,1201output_tokens: 9,1202total_tokens: 1509,1203input_tokens_details: { cached_tokens: 0 },1204output_tokens_details: { reasoning_tokens: 0 },1205},1206output: [1207{1208type: 'message',1209content: [{ type: 'output_text', text: 'reply' }],1210}1211]1212}1213};12141215const response = createFakeStreamResponse(`data: ${JSON.stringify(completedEvent)}\n\n`);1216const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-3' }, {});12171218const stream = await processResponseFromChatEndpoint(1219instantiationService,1220telemetryService,1221logService,1222response,12231,1224async () => undefined,1225telemetryData,122610001227);12281229for await (const _ of stream) {1230// consume stream1231}12321233const event = telemetryService.getEvents().telemetryServiceEvents.find(e => e.eventName === 'responsesApi.compactionOutcome');1234expect(event).toBeDefined();1235expect(event?.properties).toMatchObject({1236outcome: 'threshold_met_no_compaction',1237model: 'gpt-5-mini',1238});1239expect(event?.measurements).toMatchObject({1240compactThreshold: 1000,1241promptTokens: 1500,1242totalTokens: 1509,1243});12441245accessor.dispose();1246services.dispose();1247});1248});12491250describe('summarizedAtRoundId and stateful marker interaction', () => {1251it('skips stateful marker when summarizedAtRoundId differs from connection', () => {1252const services = createPlatformServices();1253const wsManager: IChatWebSocketManager = {1254_serviceBrand: undefined,1255getOrCreateConnection: () => { throw new Error('not implemented'); },1256hasActiveConnection: () => false,1257getStatefulMarker: () => 'resp-prev',1258getSummarizedAtRoundId: () => 'round-old',1259closeConnection: () => { },1260closeAll: () => { },1261};1262services.set(IChatWebSocketManager, wsManager);1263const accessor = services.createTestingAccessor();1264const instantiationService = accessor.get(IInstantiationService);1265const messages: Raw.ChatMessage[] = [1266{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'summarized history' }] },1267createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),1268{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }] },1269];12701271const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(1272servicesAccessor,1273{ ...createRequestOptions(messages, true), conversationId: 'conv-1', summarizedAtRoundId: 'round-new' },1274testEndpoint.model, testEndpoint,1275));12761277expect(body.previous_response_id).toBeUndefined();1278expect(body.input).toHaveLength(2);12791280accessor.dispose();1281services.dispose();1282});12831284it('uses stateful marker when summarizedAtRoundId matches connection', () => {1285const services = createPlatformServices();1286const wsManager = new NullChatWebSocketManager();1287wsManager.getStatefulMarker = () => 'resp-prev';1288wsManager.getSummarizedAtRoundId = () => 'round-5';1289services.set(IChatWebSocketManager, wsManager);1290const accessor = services.createTestingAccessor();1291const instantiationService = accessor.get(IInstantiationService);1292const messages: Raw.ChatMessage[] = [1293{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'summarized history' }] },1294createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),1295{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }] },1296];12971298const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(1299servicesAccessor,1300{ ...createRequestOptions(messages, true), conversationId: 'conv-1', summarizedAtRoundId: 'round-5' },1301testEndpoint.model, testEndpoint,1302));13031304expect(body.previous_response_id).toBe('resp-prev');1305expect(body.input).toHaveLength(1);13061307accessor.dispose();1308services.dispose();1309});13101311it('uses stateful marker when both sides have no summary', () => {1312const services = createPlatformServices();1313const wsManager = new NullChatWebSocketManager();1314wsManager.getStatefulMarker = () => 'resp-prev';1315wsManager.getSummarizedAtRoundId = () => undefined;1316services.set(IChatWebSocketManager, wsManager);1317const accessor = services.createTestingAccessor();1318const instantiationService = accessor.get(IInstantiationService);1319const messages: Raw.ChatMessage[] = [1320{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'first message' }] },1321createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),1322{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'second message' }] },1323];13241325const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(1326servicesAccessor,1327{ ...createRequestOptions(messages, true), conversationId: 'conv-1' },1328testEndpoint.model, testEndpoint,1329));13301331expect(body.previous_response_id).toBe('resp-prev');1332expect(body.input).toHaveLength(1);13331334accessor.dispose();1335services.dispose();1336});13371338it('skips stateful marker when conversation is rolled back past summary', () => {1339const services = createPlatformServices();1340const wsManager = new NullChatWebSocketManager();1341wsManager.getStatefulMarker = () => 'resp-prev';1342wsManager.getSummarizedAtRoundId = () => 'round-5';1343services.set(IChatWebSocketManager, wsManager);1344const accessor = services.createTestingAccessor();1345const instantiationService = accessor.get(IInstantiationService);1346const messages: Raw.ChatMessage[] = [1347{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'first message' }] },1348createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),1349{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'second message' }] },1350];13511352const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(1353servicesAccessor,1354{ ...createRequestOptions(messages, true), conversationId: 'conv-1', summarizedAtRoundId: undefined },1355testEndpoint.model, testEndpoint,1356));13571358expect(body.previous_response_id).toBeUndefined();1359expect(body.input).toHaveLength(2);13601361accessor.dispose();1362services.dispose();1363});13641365it('skips stateful marker on first request after new summarization', () => {1366const services = createPlatformServices();1367const wsManager = new NullChatWebSocketManager();1368wsManager.getStatefulMarker = () => 'resp-prev';1369wsManager.getSummarizedAtRoundId = () => undefined;1370services.set(IChatWebSocketManager, wsManager);1371const accessor = services.createTestingAccessor();1372const instantiationService = accessor.get(IInstantiationService);1373const messages: Raw.ChatMessage[] = [1374{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'summarized history' }] },1375createStatefulMarkerMessage(testEndpoint.model, 'resp-prev'),1376{ role: Raw.ChatRole.User, content: [{ type: Raw.ChatCompletionContentPartKind.Text, text: 'after marker' }] },1377];13781379const body = instantiationService.invokeFunction(servicesAccessor => createResponsesRequestBody(1380servicesAccessor,1381{ ...createRequestOptions(messages, true), conversationId: 'conv-1', summarizedAtRoundId: 'round-new' },1382testEndpoint.model, testEndpoint,1383));13841385expect(body.previous_response_id).toBeUndefined();1386expect(body.input).toHaveLength(2);13871388accessor.dispose();1389services.dispose();1390});1391});13921393describe('phase commentary followed by phase final_answer', () => {1394it('inserts a separator between commentary and final_answer text in the stream', async () => {1395const services = createPlatformServices();1396const accessor = services.createTestingAccessor();1397const instantiationService = accessor.get(IInstantiationService);1398const logService = accessor.get(ILogService);1399const telemetryService = new SpyingTelemetryService();1400const accumulatedTexts: string[] = [];1401const phases: string[] = [];14021403const commentaryText = 'Responding directly in commentary as requested. My name is GitHub Copilot.';1404const finalText = 'My name is GitHub Copilot.';14051406// Real-world Responses API stream: commentary message (output_index 0)1407// followed by final_answer message (output_index 1), with incremental1408// text deltas for each.1409const events = [1410{ type: 'response.output_item.added', output_index: 0, item: { type: 'message', role: 'assistant', content: [], phase: 'commentary', status: 'in_progress' }, sequence_number: 2 },1411{ type: 'response.content_part.added', output_index: 0, content_index: 0, item_id: 'item-0', part: { type: 'output_text', text: '', annotations: [], logprobs: [] }, sequence_number: 3 },1412{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'Respond', logprobs: [], sequence_number: 4 },1413{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'ing', logprobs: [], sequence_number: 5 },1414{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' directly', logprobs: [], sequence_number: 6 },1415{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' in', logprobs: [], sequence_number: 7 },1416{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' commentary', logprobs: [], sequence_number: 8 },1417{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' as', logprobs: [], sequence_number: 9 },1418{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' requested', logprobs: [], sequence_number: 10 },1419{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: '.', logprobs: [], sequence_number: 11 },1420{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' My', logprobs: [], sequence_number: 12 },1421{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' name', logprobs: [], sequence_number: 13 },1422{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' is', logprobs: [], sequence_number: 14 },1423{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' Git', logprobs: [], sequence_number: 15 },1424{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'Hub', logprobs: [], sequence_number: 16 },1425{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: ' Cop', logprobs: [], sequence_number: 17 },1426{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: 'ilot', logprobs: [], sequence_number: 18 },1427{ type: 'response.output_text.delta', output_index: 0, content_index: 0, item_id: 'item-0', delta: '.', logprobs: [], sequence_number: 19 },1428{ type: 'response.output_text.done', output_index: 0, content_index: 0, item_id: 'item-0', text: commentaryText, logprobs: [], sequence_number: 20 },1429{ type: 'response.content_part.done', output_index: 0, content_index: 0, item_id: 'item-0', part: { type: 'output_text', text: commentaryText, annotations: [], logprobs: [] }, sequence_number: 21 },1430{ type: 'response.output_item.done', output_index: 0, item: { type: 'message', role: 'assistant', content: [{ type: 'output_text', text: commentaryText, annotations: [], logprobs: [] }], phase: 'commentary', status: 'completed' }, sequence_number: 22 },1431{ type: 'response.output_item.added', output_index: 1, item: { type: 'message', role: 'assistant', content: [], phase: 'final_answer', status: 'in_progress' }, sequence_number: 23 },1432{ type: 'response.content_part.added', output_index: 1, content_index: 0, item_id: 'item-1', part: { type: 'output_text', text: '', annotations: [], logprobs: [] }, sequence_number: 24 },1433{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: 'My', logprobs: [], sequence_number: 25 },1434{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' name', logprobs: [], sequence_number: 26 },1435{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' is', logprobs: [], sequence_number: 27 },1436{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' Git', logprobs: [], sequence_number: 28 },1437{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: 'Hub', logprobs: [], sequence_number: 29 },1438{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: ' Cop', logprobs: [], sequence_number: 30 },1439{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: 'ilot', logprobs: [], sequence_number: 31 },1440{ type: 'response.output_text.delta', output_index: 1, content_index: 0, item_id: 'item-1', delta: '.', logprobs: [], sequence_number: 32 },1441{ type: 'response.output_text.done', output_index: 1, content_index: 0, item_id: 'item-1', text: finalText, logprobs: [], sequence_number: 33 },1442{ type: 'response.content_part.done', output_index: 1, content_index: 0, item_id: 'item-1', part: { type: 'output_text', text: finalText, annotations: [], logprobs: [] }, sequence_number: 34 },1443{ type: 'response.output_item.done', output_index: 1, item: { type: 'message', role: 'assistant', content: [{ type: 'output_text', text: finalText, annotations: [], logprobs: [] }], phase: 'final_answer', status: 'completed' }, sequence_number: 35 },1444{1445type: 'response.completed',1446response: {1447id: 'resp_phase_test',1448model: 'gpt-5.4-2026-03-05',1449created_at: 1776962259,1450usage: { input_tokens: 8432, output_tokens: 35, total_tokens: 8467, input_tokens_details: { cached_tokens: 0 }, output_tokens_details: { reasoning_tokens: 0 } },1451output: [1452{ type: 'message', content: [{ type: 'output_text', text: commentaryText, annotations: [], logprobs: [] }], phase: 'commentary', role: 'assistant', status: 'completed' },1453{ type: 'message', content: [{ type: 'output_text', text: finalText, annotations: [], logprobs: [] }], phase: 'final_answer', role: 'assistant', status: 'completed' },1454],1455},1456sequence_number: 36,1457}1458];14591460const sseBody = events.map(e => `data: ${JSON.stringify(e)}\n\n`).join('');1461const response = createFakeStreamResponse(sseBody);1462const telemetryData = TelemetryData.createAndMarkAsIssued({ modelCallId: 'model-call-phase-test' }, {});14631464const stream = await processResponseFromChatEndpoint(1465instantiationService,1466telemetryService,1467logService,1468response,14691,1470async (text, _unused, delta) => {1471accumulatedTexts.push(text);1472if (delta.phase) {1473phases.push(delta.phase);1474}1475return undefined;1476},1477telemetryData,1478);14791480for await (const _ of stream) {1481// consume stream1482}14831484expect(phases).toEqual(['commentary', 'final_answer']);14851486// The accumulated text must separate commentary and final_answer text1487const finalAccumulatedText = accumulatedTexts[accumulatedTexts.length - 1];1488expect(finalAccumulatedText).toBe(1489commentaryText + '\n\n' + finalText1490);14911492accessor.dispose();1493services.dispose();1494});1495});149614971498