LLM index: /llms.txt
Collapsible reasoning UI for assistant "thinking" traces. The root tracks streaming state to update the trigger label (Thinking... -> Thought for N seconds) and auto-opens/closes content while reasoning streams.
"use client";
import * as React from "react";
import {
Reasoning,
ReasoningContent,
ReasoningTrigger,
} from "@/components/nexus-ui/reasoning";
const FULL_REASONING_TEXT = `Let me think about this step by step.
First, the user is asking about authentication. I need to consider:
- What framework are they using?
- Do they need session-based or token-based auth?
Actually, wait - I should check if they already have any auth setup. Let me reconsider..`;
function ReasoningDefault() {
const [isStreaming, setIsStreaming] = React.useState(true);
const [reasoningText, setReasoningText] = React.useState("");
React.useEffect(() => {
let i = 0;
setIsStreaming(true);
const timer = window.setInterval(() => {
i += 2;
const next = FULL_REASONING_TEXT.slice(0, i);
setReasoningText(next);
if (i >= FULL_REASONING_TEXT.length) {
window.clearInterval(timer);
setIsStreaming(false);
}
}, 28);
return () => window.clearInterval(timer);
}, []);
return (
<div className="h-[60%] w-full">
<Reasoning isStreaming={isStreaming}>
<ReasoningTrigger />
<ReasoningContent>{reasoningText}</ReasoningContent>
</Reasoning>
</div>
);
}
export default ReasoningDefault;
Installation
npx shadcn@latest add @nexus-ui/reasoningpnpm dlx shadcn@latest add @nexus-ui/reasoningyarn dlx shadcn@latest add @nexus-ui/reasoningbunx shadcn@latest add @nexus-ui/reasoningInstall the following dependencies:
npx shadcn@latest add collapsible && npm install @hugeicons/react @hugeicons/core-free-icons streamdownpnpm dlx shadcn@latest add collapsible && pnpm add @hugeicons/react @hugeicons/core-free-icons streamdownyarn dlx shadcn@latest add collapsible && yarn add @hugeicons/react @hugeicons/core-free-icons streamdownbunx shadcn@latest add collapsible && bun add @hugeicons/react @hugeicons/core-free-icons streamdownCopy and paste the following code into your project.
"use client";
import * as React from "react";
import { AiBrain01Icon, ArrowDown01Icon } from "@hugeicons/core-free-icons";
import { HugeiconsIcon } from "@hugeicons/react";
import { Streamdown } from "streamdown";
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from "@/components/ui/collapsible";
import { cn } from "@/lib/utils";
type ReasoningContextValue = {
isStreaming: boolean;
label: string;
};
const ReasoningContext = React.createContext<ReasoningContextValue | null>(
null,
);
function useReasoningContext(component: string): ReasoningContextValue {
const ctx = React.useContext(ReasoningContext);
if (!ctx) {
throw new Error(`${component} must be used within <Reasoning>`);
}
return ctx;
}
type ReasoningProps = Omit<
React.ComponentProps<typeof Collapsible>,
"open" | "defaultOpen" | "onOpenChange"
> & {
isStreaming?: boolean;
open?: boolean;
defaultOpen?: boolean;
onOpenChange?: (open: boolean) => void;
};
function Reasoning({
className,
isStreaming = false,
open: openProp,
defaultOpen = false,
onOpenChange,
children,
...props
}: ReasoningProps) {
const isControlled = openProp !== undefined;
const [internalOpen, setInternalOpen] = React.useState(
defaultOpen || isStreaming,
);
const open = isControlled ? openProp : internalOpen;
const [durationLabel, setDurationLabel] = React.useState<string | null>(null);
const [hasStreamed, setHasStreamed] = React.useState(isStreaming);
const startedAtRef = React.useRef<number | null>(
isStreaming ? Date.now() : null,
);
const prevStreamingRef = React.useRef(isStreaming);
React.useEffect(() => {
const wasStreaming = prevStreamingRef.current;
if (!wasStreaming && isStreaming) {
setHasStreamed(true);
startedAtRef.current = Date.now();
setDurationLabel(null);
if (!isControlled) {
setInternalOpen(true);
}
onOpenChange?.(true);
}
if (wasStreaming && !isStreaming) {
const startedAt = startedAtRef.current;
const elapsedSeconds =
startedAt != null
? Math.max(1, Math.round((Date.now() - startedAt) / 1000))
: null;
setDurationLabel(
elapsedSeconds != null ? String(elapsedSeconds) : "a few",
);
startedAtRef.current = null;
if (!isControlled) {
setInternalOpen(false);
}
onOpenChange?.(false);
}
prevStreamingRef.current = isStreaming;
}, [isControlled, isStreaming, onOpenChange]);
const label = React.useMemo(() => {
if (!hasStreamed || isStreaming) return "Thinking...";
if (durationLabel != null) return `Thought for ${durationLabel} seconds`;
return "Thought for a few seconds";
}, [durationLabel, hasStreamed, isStreaming]);
const contextValue = React.useMemo(
() => ({ isStreaming, label }),
[isStreaming, label],
);
const handleOpenChange = React.useCallback(
(nextOpen: boolean) => {
const resolvedOpen = isStreaming ? true : nextOpen;
if (!isControlled) {
setInternalOpen(resolvedOpen);
}
onOpenChange?.(resolvedOpen);
},
[isControlled, isStreaming, onOpenChange],
);
return (
<ReasoningContext.Provider value={contextValue}>
<Collapsible
data-slot="reasoning"
className={cn("not-prose w-full", className)}
data-streaming={isStreaming ? "true" : "false"}
open={open}
onOpenChange={handleOpenChange}
{...props}
>
{children}
</Collapsible>
</ReasoningContext.Provider>
);
}
type ReasoningTriggerProps = React.ComponentProps<typeof CollapsibleTrigger>;
function ReasoningTrigger({
className,
children,
...props
}: ReasoningTriggerProps) {
const { isStreaming, label } = useReasoningContext("ReasoningTrigger");
return (
<CollapsibleTrigger
data-slot="reasoning-trigger"
data-streaming={isStreaming ? "true" : "false"}
className={cn(
"group flex cursor-pointer items-center gap-1.25 text-muted-foreground transition-colors hover:text-foreground",
className,
)}
{...props}
>
<HugeiconsIcon
icon={AiBrain01Icon}
strokeWidth={1.75}
className="size-4"
/>
<span className="text-sm leading-6">{children ?? label}</span>
<HugeiconsIcon
icon={ArrowDown01Icon}
strokeWidth={2.0}
className="ml-0.5 size-4 opacity-0 transition-all group-data-[state=open]:rotate-180 group-hover:opacity-100 group-data-[state=open]:group-data-[streaming=false]:opacity-100"
/>
</CollapsibleTrigger>
);
}
type ReasoningContentProps = Omit<
React.ComponentProps<typeof CollapsibleContent>,
"children"
> & {
children: string;
};
function ReasoningContent({
className,
children,
...props
}: ReasoningContentProps) {
return (
<CollapsibleContent
data-slot="reasoning-content"
className={cn(
"mt-2 ml-2 overflow-hidden border-l pl-3 data-[state=closed]:animate-collapsible-up data-[state=open]:animate-collapsible-down",
className,
)}
{...props}
>
<Streamdown
className={cn(
"prose max-w-none text-sm leading-6 font-normal text-muted-foreground",
// body text
"[&_p]:mb-2.5",
// strong
"prose-strong:font-medium prose-strong:text-foreground",
// lists
"**:data-[streamdown=list-item]:pl-4 **:data-[streamdown=list-item]:py-0.25 prose-ol:my-0 prose-ol:pl-3 prose-ul:my-0 prose-li:my-[-0.5px] **:data-[streamdown=list-item]:marker:text-muted-foreground/50",
"[&>*:first-child]:mt-0 [&>*:last-child]:mb-0",
)}
>
{children}
</Streamdown>
</CollapsibleContent>
);
}
export { Reasoning, ReasoningTrigger, ReasoningContent };
Update import paths to match your project setup.
Usage
import {
Reasoning,
ReasoningTrigger,
ReasoningContent,
} from "@/components/nexus-ui/reasoning";<Reasoning isStreaming={isReasoningStreaming}>
<ReasoningTrigger />
<ReasoningContent>{reasoningMarkdown}</ReasoningContent>
</Reasoning>Vercel AI SDK Integration
Some models emit structured reasoning parts in the AI SDK response stream. In practice, models like DeepSeek R1 and Claude can return reasoning content that you can render with Reasoning.
Use Reasoning with Vercel AI SDK by mapping assistant reasoning parts to:
isStreaming: true while reasoning parts are still streamingReasoningContentchildren: merged reasoning text
Install the AI SDK
npm install ai @ai-sdk/reactCreate your chat API route
This mirrors the docs demo route: use Claude via the AI Gateway id and enable Anthropic thinking so reasoning parts are returned.
import { streamText, smoothStream, UIMessage, convertToModelMessages } from "ai";
export async function POST(req: Request) {
const {
messages,
model,
}: { messages: UIMessage[]; model?: string } = await req.json();
const result = streamText({
model: "anthropic/claude-sonnet-4.5",
messages: await convertToModelMessages(messages),
experimental_transform: smoothStream({
chunking: "word",
delayInMs: 18,
}),
providerOptions: {
anthropic: {
thinking: { type: "enabled", budgetTokens: 1024 },
},
},
});
return result.toUIMessageStreamResponse({
sendReasoning: true,
});
}Render assistant reasoning parts with Reasoning
"use client";
import { useChat } from "@ai-sdk/react";
import { DefaultChatTransport, isReasoningUIPart, type UIMessage } from "ai";
import {
Reasoning,
ReasoningContent,
ReasoningTrigger,
} from "@/components/nexus-ui/reasoning";
function reasoningPartsFromMessage(message: UIMessage) {
return message.parts.filter(isReasoningUIPart);
}
function reasoningTextFromMessage(message: UIMessage) {
return reasoningPartsFromMessage(message).map((p) => p.text).join("");
}
function reasoningStreamingFromMessage(message: UIMessage) {
const parts = reasoningPartsFromMessage(message);
return parts.some((p) => p.state === "streaming");
}
export default function ReasoningWithUseChat() {
const { messages } = useChat({
transport: new DefaultChatTransport({ api: "/api/chat" }),
});
const assistant = [...messages].reverse().find((m) => m.role === "assistant");
if (!assistant) return null;
const reasoningText = reasoningTextFromMessage(assistant);
const isReasoningStreaming = reasoningStreamingFromMessage(assistant);
if (!reasoningText.trim()) return null;
return (
<Reasoning isStreaming={isReasoningStreaming}>
<ReasoningTrigger />
<ReasoningContent>{reasoningText}</ReasoningContent>
</Reasoning>
);
}API Reference
Reasoning
Root component. Manages streaming timing, open state behavior, and label context for children. Wraps Collapsible.
Prop
Type
ReasoningTrigger
Trigger row with built-in brain icon, label text, and rotating chevron. If children is provided, it replaces the default label text. Wraps Collapsible Trigger.
Prop
Type
ReasoningContent
Collapsible content wrapper with enter/exit animation and markdown rendering via Streamdown. Wraps Collapsible Content.
Prop
Type