Skip to content

Commit e45533a

Browse files
committed
feat(chat): add composable primitives, raw task example, and task mode switcher
1 parent 2ab5bf5 commit e45533a

File tree

6 files changed

+364
-7
lines changed

6 files changed

+364
-7
lines changed

packages/trigger-sdk/src/v3/ai.ts

Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1567,6 +1567,156 @@ function cleanupAbortedParts(message: UIMessage): UIMessage {
15671567
};
15681568
}
15691569

1570+
// ---------------------------------------------------------------------------
1571+
// Composable primitives for raw task chat
1572+
// ---------------------------------------------------------------------------
1573+
1574+
/**
1575+
* Create a managed stop signal wired to the chat stop input stream.
1576+
*
1577+
* Call once at the start of your run. Use `signal` as the abort signal for
1578+
* `streamText`. Call `reset()` at the start of each turn to get a fresh
1579+
* per-turn signal. Call `cleanup()` when the run ends.
1580+
*
1581+
* @example
1582+
* ```ts
1583+
* const stop = chat.createStopSignal();
1584+
* for (let turn = 0; turn < 100; turn++) {
1585+
* stop.reset();
1586+
* const result = streamText({ model, messages, abortSignal: stop.signal });
1587+
* await chat.pipe(result);
1588+
* // ...
1589+
* }
1590+
* stop.cleanup();
1591+
* ```
1592+
*/
1593+
function createStopSignal(): { readonly signal: AbortSignal; reset: () => void; cleanup: () => void } {
1594+
let controller = new AbortController();
1595+
const sub = stopInput.on((data) => {
1596+
controller.abort(data?.message || "stopped");
1597+
});
1598+
return {
1599+
get signal() { return controller.signal; },
1600+
reset() { controller = new AbortController(); },
1601+
cleanup() { sub.off(); },
1602+
};
1603+
}
1604+
1605+
/**
1606+
* Signal the frontend that the current turn is complete.
1607+
*
1608+
* The `TriggerChatTransport` intercepts this to close the ReadableStream
1609+
* for the current turn. Call after piping the response stream.
1610+
*
1611+
* @example
1612+
* ```ts
1613+
* await chat.pipe(result);
1614+
* await chat.writeTurnComplete();
1615+
* ```
1616+
*/
1617+
async function chatWriteTurnComplete(options?: { publicAccessToken?: string }): Promise<void> {
1618+
await writeTurnCompleteChunk(undefined, options?.publicAccessToken);
1619+
}
1620+
1621+
/**
1622+
* Pipe a `StreamTextResult` (or similar) to the chat stream and capture
1623+
* the assistant's response message via `onFinish`.
1624+
*
1625+
* Combines `toUIMessageStream()` + `onFinish` callback + `chat.pipe()`.
1626+
* Returns the captured `UIMessage`, or `undefined` if capture failed.
1627+
*
1628+
* @example
1629+
* ```ts
1630+
* const result = streamText({ model, messages, abortSignal: signal });
1631+
* const response = await chat.pipeAndCapture(result, { signal });
1632+
* if (response) conversation.addResponse(response);
1633+
* ```
1634+
*/
1635+
async function pipeChatAndCapture(
1636+
source: UIMessageStreamable,
1637+
options?: { signal?: AbortSignal; spanName?: string }
1638+
): Promise<UIMessage | undefined> {
1639+
let captured: UIMessage | undefined;
1640+
let resolveOnFinish: () => void;
1641+
const onFinishPromise = new Promise<void>((r) => { resolveOnFinish = r; });
1642+
1643+
const uiStream = source.toUIMessageStream({
1644+
onFinish: ({ responseMessage }: { responseMessage: UIMessage }) => {
1645+
captured = responseMessage;
1646+
resolveOnFinish!();
1647+
},
1648+
});
1649+
1650+
await pipeChat(uiStream, { signal: options?.signal, spanName: options?.spanName ?? "stream response" });
1651+
await onFinishPromise;
1652+
1653+
return captured;
1654+
}
1655+
1656+
/**
1657+
* Accumulates conversation messages across turns.
1658+
*
1659+
* Handles the transport protocol: turn 0 sends full history (replace),
1660+
* subsequent turns send only new messages (append), regenerate sends
1661+
* full history minus last assistant message (replace).
1662+
*
1663+
* @example
1664+
* ```ts
1665+
* const conversation = new chat.MessageAccumulator();
1666+
* for (let turn = 0; turn < 100; turn++) {
1667+
* const messages = await conversation.addIncoming(payload.messages, payload.trigger, turn);
1668+
* const result = streamText({ model, messages });
1669+
* const response = await chat.pipeAndCapture(result);
1670+
* if (response) await conversation.addResponse(response);
1671+
* }
1672+
* ```
1673+
*/
1674+
class ChatMessageAccumulator {
1675+
modelMessages: ModelMessage[] = [];
1676+
uiMessages: UIMessage[] = [];
1677+
1678+
/**
1679+
* Add incoming messages from the transport payload.
1680+
* Returns the full accumulated model messages for `streamText`.
1681+
*/
1682+
async addIncoming(
1683+
messages: UIMessage[],
1684+
trigger: string,
1685+
turn: number
1686+
): Promise<ModelMessage[]> {
1687+
const cleaned = messages.map((m) =>
1688+
m.role === "assistant" ? cleanupAbortedParts(m) : m
1689+
);
1690+
const model = await convertToModelMessages(cleaned);
1691+
1692+
if (turn === 0 || trigger === "regenerate-message") {
1693+
this.modelMessages = model;
1694+
this.uiMessages = [...cleaned];
1695+
} else {
1696+
this.modelMessages.push(...model);
1697+
this.uiMessages.push(...cleaned);
1698+
}
1699+
return this.modelMessages;
1700+
}
1701+
1702+
/**
1703+
* Add the assistant's response to the accumulator.
1704+
* Call after `pipeAndCapture` with the captured response.
1705+
*/
1706+
async addResponse(response: UIMessage): Promise<void> {
1707+
if (!response.id) {
1708+
response = { ...response, id: generateMessageId() };
1709+
}
1710+
this.uiMessages.push(response);
1711+
try {
1712+
const msgs = await convertToModelMessages([stripProviderMetadata(response)]);
1713+
this.modelMessages.push(...msgs);
1714+
} catch {
1715+
// Conversion failed — skip model message accumulation for this response
1716+
}
1717+
}
1718+
}
1719+
15701720
// ---------------------------------------------------------------------------
15711721
// chat.local — per-run typed data with Proxy access
15721722
// ---------------------------------------------------------------------------
@@ -1825,6 +1975,16 @@ export const chat = {
18251975
defer: chatDefer,
18261976
/** Typed chat output stream for writing custom chunks or piping from subtasks. */
18271977
stream: chatStream,
1978+
/** Pre-built input stream for receiving messages from the transport. */
1979+
messages: messagesInput,
1980+
/** Create a managed stop signal wired to the stop input stream. See {@link createStopSignal}. */
1981+
createStopSignal,
1982+
/** Signal the frontend that the current turn is complete. See {@link chatWriteTurnComplete}. */
1983+
writeTurnComplete: chatWriteTurnComplete,
1984+
/** Pipe a stream and capture the response message. See {@link pipeChatAndCapture}. */
1985+
pipeAndCapture: pipeChatAndCapture,
1986+
/** Message accumulator class for raw task chat. See {@link ChatMessageAccumulator}. */
1987+
MessageAccumulator: ChatMessageAccumulator,
18281988
};
18291989

18301990
/**

references/ai-chat/src/app/actions.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@ import { chat } from "@trigger.dev/sdk/ai";
44
import type { aiChat } from "@/trigger/chat";
55
import { prisma } from "@/lib/prisma";
66

7-
export const getChatToken = async () => chat.createAccessToken<typeof aiChat>("ai-chat");
7+
export const getChatToken = async (taskId?: string) =>
8+
chat.createAccessToken<typeof aiChat>((taskId ?? "ai-chat") as any);
89

910
export async function getChatList() {
1011
const chats = await prisma.chat.findMany({

references/ai-chat/src/app/page.tsx

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ export default function Home() {
2525
Record<string, { runId: string; publicAccessToken: string; lastEventId?: string }>
2626
>({});
2727
const [loaded, setLoaded] = useState(false);
28+
const [taskMode, setTaskMode] = useState<string>("ai-chat");
2829

2930
useEffect(() => {
3031
async function load() {
@@ -50,6 +51,9 @@ export default function Home() {
5051

5152
return (
5253
<ChatApp
54+
key={taskMode}
55+
taskMode={taskMode}
56+
onTaskModeChange={setTaskMode}
5357
initialChatList={chatList}
5458
initialActiveChatId={activeChatId}
5559
initialMessages={initialMessages}

references/ai-chat/src/components/chat-app.tsx

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,13 +32,17 @@ type SessionInfo = {
3232
};
3333

3434
type ChatAppProps = {
35+
taskMode: string;
36+
onTaskModeChange: (mode: string) => void;
3537
initialChatList: ChatMeta[];
3638
initialActiveChatId: string | null;
3739
initialMessages: UIMessage[];
3840
initialSessions: Record<string, SessionInfo>;
3941
};
4042

4143
export function ChatApp({
44+
taskMode,
45+
onTaskModeChange,
4246
initialChatList,
4347
initialActiveChatId,
4448
initialMessages,
@@ -70,9 +74,9 @@ export function ChatApp({
7074
[]
7175
);
7276

73-
const transport = useTriggerChatTransport<typeof aiChat>({
74-
task: "ai-chat",
75-
accessToken: getChatToken,
77+
const transport = useTriggerChatTransport({
78+
task: taskMode,
79+
accessToken: () => getChatToken(taskMode),
7680
baseURL: process.env.NEXT_PUBLIC_TRIGGER_API_URL,
7781
sessions: initialSessions,
7882
onSessionChange: handleSessionChange,
@@ -157,6 +161,8 @@ export function ChatApp({
157161
onPreloadChange={setPreloadEnabled}
158162
warmTimeoutInSeconds={warmTimeoutInSeconds}
159163
onWarmTimeoutChange={setWarmTimeoutInSeconds}
164+
taskMode={taskMode}
165+
onTaskModeChange={onTaskModeChange}
160166
/>
161167
<div className="flex-1">
162168
{activeChatId ? (

references/ai-chat/src/components/chat-sidebar.tsx

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,8 @@ type ChatSidebarProps = {
2828
onPreloadChange: (enabled: boolean) => void;
2929
warmTimeoutInSeconds: number;
3030
onWarmTimeoutChange: (seconds: number) => void;
31+
taskMode: string;
32+
onTaskModeChange: (mode: string) => void;
3133
};
3234

3335
export function ChatSidebar({
@@ -40,6 +42,8 @@ export function ChatSidebar({
4042
onPreloadChange,
4143
warmTimeoutInSeconds,
4244
onWarmTimeoutChange,
45+
taskMode,
46+
onTaskModeChange,
4347
}: ChatSidebarProps) {
4448
const sorted = [...chats].sort((a, b) => b.updatedAt - a.updatedAt);
4549

@@ -108,6 +112,17 @@ export function ChatSidebar({
108112
/>
109113
<span>s</span>
110114
</div>
115+
<div className="flex items-center gap-2 text-xs text-gray-500">
116+
<span className="shrink-0">Task</span>
117+
<select
118+
value={taskMode}
119+
onChange={(e) => onTaskModeChange(e.target.value)}
120+
className="flex-1 rounded border border-gray-300 px-1.5 py-0.5 text-xs text-gray-600 outline-none focus:border-blue-500"
121+
>
122+
<option value="ai-chat">ai-chat (chat.task)</option>
123+
<option value="ai-chat-raw">ai-chat-raw (raw task)</option>
124+
</select>
125+
</div>
111126
</div>
112127
</div>
113128
);

0 commit comments

Comments
 (0)