Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 28 additions & 2 deletions packages/typescript/ai-gemini/src/adapters/text.ts
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,12 @@ export class GeminiTextAdapter<
}
}

// Capture thought signature for Gemini 3.0 compatibility
const metadata =
'thoughtSignature' in (part as any) && (part as any).thoughtSignature
? { thoughtSignature: (part as any).thoughtSignature }
: undefined

yield {
type: 'tool_call',
id: generateId(this.name),
Expand All @@ -283,6 +289,7 @@ export class GeminiTextAdapter<
name: toolCallData.name,
arguments: toolCallData.args,
},
metadata,
},
index: toolCallData.index,
}
Expand Down Expand Up @@ -323,6 +330,12 @@ export class GeminiTextAdapter<
index: nextToolIndex++,
})

// Capture thought signature for Gemini 3.0 compatibility
const metadata =
'thoughtSignature' in part && (part as any).thoughtSignature
? { thoughtSignature: (part as any).thoughtSignature }
: undefined
Comment on lines +333 to +337
Copy link

Copilot AI Jan 18, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is duplicated logic from lines 274-278. Consider extracting this into a helper function to reduce code duplication and ensure consistency. For example, a function like extractThoughtSignatureMetadata(part: any) would improve maintainability.

Copilot uses AI. Check for mistakes.

yield {
type: 'tool_call',
id: generateId(this.name),
Expand All @@ -338,6 +351,7 @@ export class GeminiTextAdapter<
? functionArgs
: JSON.stringify(functionArgs),
},
metadata,
},
index: nextToolIndex - 1,
}
Expand Down Expand Up @@ -461,12 +475,24 @@ export class GeminiTextAdapter<
>
}

parts.push({
const part: Part = {
functionCall: {
name: toolCall.function.name,
args: parsedArgs,
},
})
}

// Include thought signature if present for Gemini 3.0 compatibility
if (
toolCall.metadata &&
typeof toolCall.metadata === 'object' &&
'thoughtSignature' in toolCall.metadata &&
typeof toolCall.metadata.thoughtSignature === 'string'
) {
;(part as any).thoughtSignature = toolCall.metadata.thoughtSignature
}

parts.push(part)
}
}

Expand Down
112 changes: 112 additions & 0 deletions packages/typescript/ai-gemini/tests/gemini-adapter.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -343,4 +343,116 @@ describe('GeminiAdapter through AI', () => {
expect(payload.config.systemInstruction).toContain('123 tokens')
expect(result.summary).toBe(summaryText)
})

it('captures and preserves thought signatures for Gemini 3.0 compatibility', async () => {
const testThoughtSignature = 'thought_signature_abc123'
const streamChunks = [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
id: 'tool_call_1',
name: 'lookup_weather',
args: { location: 'Paris' },
},
thoughtSignature: testThoughtSignature,
},
],
},
finishReason: 'UNEXPECTED_TOOL_CALL',
},
],
},
]

mocks.generateContentStreamSpy.mockResolvedValue(createStream(streamChunks))

const adapter = createTextAdapter()
const received: StreamChunk[] = []

// First request - capture thought signature
for await (const chunk of chat({
adapter,
messages: [{ role: 'user', content: 'What is the weather in Paris?' }],
tools: [weatherTool],
})) {
received.push(chunk)
}

// Verify thought signature was captured in metadata
const toolCallChunk = received.find((c) => c.type === 'tool_call')
expect(toolCallChunk).toBeDefined()
if (toolCallChunk && toolCallChunk.type === 'tool_call') {
expect(toolCallChunk.toolCall.metadata).toEqual({
thoughtSignature: testThoughtSignature,
})
}

// Clear mocks for second request
vi.clearAllMocks()

const secondResponseChunks = [
{
candidates: [
{
content: {
parts: [{ text: 'The weather is sunny.' }],
},
finishReason: 'STOP',
},
],
},
]

mocks.generateContentStreamSpy.mockResolvedValue(
createStream(secondResponseChunks),
)

// Second request - verify thought signature is sent back
// Use ModelMessage format with toolCalls array
for await (const _ of chat({
adapter,
messages: [
{ role: 'user', content: 'What is the weather in Paris?' },
{
role: 'assistant',
content: null,
toolCalls: [
{
id: 'tool_call_1',
type: 'function',
function: {
name: 'lookup_weather',
arguments: '{"location":"Paris"}',
},
metadata: { thoughtSignature: testThoughtSignature },
},
],
},
{
role: 'tool',
content: 'Sunny, 22°C',
toolCallId: 'tool_call_1',
},
],
})) {
/* consume stream */
}

// Verify thought signature was included in the request
expect(mocks.generateContentStreamSpy).toHaveBeenCalledTimes(1)
const [payload] = mocks.generateContentStreamSpy.mock.calls[0]
const assistantMessage = payload.contents.find(
(msg: any) => msg.role === 'model',
)
expect(assistantMessage).toBeDefined()
const functionCallPart = assistantMessage?.parts?.find(
(part: any) => part.functionCall,
)
expect(functionCallPart).toBeDefined()
expect(functionCallPart.thoughtSignature).toBe(testThoughtSignature)
})
})
15 changes: 7 additions & 8 deletions packages/typescript/ai/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,12 @@ export interface ToolCall {
name: string
arguments: string // JSON string
}
/**
* Provider-specific metadata associated with this tool call.
* Used by adapters to store additional information needed for API compatibility.
* For example, Gemini stores thought signatures here for Gemini 3.0 models.
*/
metadata?: unknown
}

// ============================================================================
Expand Down Expand Up @@ -675,14 +681,7 @@ export interface ContentStreamChunk extends BaseStreamChunk {

export interface ToolCallStreamChunk extends BaseStreamChunk {
type: 'tool_call'
toolCall: {
id: string
type: 'function'
function: {
name: string
arguments: string // Incremental JSON arguments
}
}
toolCall: ToolCall
index: number
}

Expand Down
Loading