Next.js Route Handler
Use NjiraAI with Next.js App Router API routes.
This example shows a Next.js App Router API route with NjiraAI enforcement.
Full code
// app/api/chat/route.ts
import { NjiraAI } from "@njiraai/sdk";
const njira = new NjiraAI({
apiKey: process.env.NJIRA_API_KEY!,
projectId: process.env.NJIRA_PROJECT_ID!,
mode: "active",
});
export const POST = njira.nextRoute(async (request: Request) => {
const { message } = await request.json();
// Pre-enforcement
const pre = await njira.enforcePre({
input: message,
metadata: { endpoint: "/api/chat" },
});
if (pre.verdict === "block") {
return Response.json(
{
error: "Blocked by policy",
reasons: pre.reasons,
traceId: pre.traceId,
},
{ status: 403 }
);
}
const effectiveInput = pre.verdict === "modify" ? pre.modifiedInput : message;
// Trace the LLM call
const spanId = njira.trace.startSpan({
name: "llm-call",
type: "llm",
input: { prompt: effectiveInput },
});
try {
// Your LLM logic here
const response = `Echo: ${effectiveInput}`;
njira.trace.endSpan(spanId, { output: response });
// Post-enforcement
const post = await njira.enforcePost({ output: response });
if (post.verdict === "block") {
return Response.json(
{ error: "Output blocked", traceId: post.traceId },
{ status: 403 }
);
}
const finalResponse =
post.verdict === "modify" ? post.modifiedOutput : response;
return Response.json({
response: finalResponse,
traceId: pre.traceId,
});
} catch (error) {
njira.trace.error(spanId, error as Error);
return Response.json({ error: "Internal error" }, { status: 500 });
}
});
Run it
cd sdks/typescript/examples/nextjs-route-handler
pnpm install
pnpm dev
Test it
curl -X POST http://localhost:3000/api/chat \
-H "Content-Type: application/json" \
-d '{"message": "Hello from Next.js!"}'
Serverless considerations
The nextRoute wrapper automatically flushes traces before returning—important in Vercel or other serverless environments where the process may exit immediately after the response.