Gemini RAG
Used in this guide:Introduction
You are now at the most rewarding stage: Retrieval-Augmented Generation (RAG), which is the core of building your intelligent customer support bot. This time we call the generateContent method but tell the model that before answering a question, search this specific knowledge base first. This is achieved by passing the FSS ID in the tools configuration of your generation request.
You can adapt your existing general Gemini route (found in Gemini Basic) to create a specialized, grounded route. The only change required is adding the config object with the tools parameter to link your File Search Store ID.
Requirement:
1. Create Route
import { GoogleGenAI } from "@google/genai";
import { NextRequest, NextResponse } from "next/server";
const STORE_ID_ENV = process.env.FSS_ID;
const PROJECT_ID_ENV = process.env.GOOGLE_CLOUD_PROJECT;
if (!STORE_ID_ENV || !PROJECT_ID_ENV) {
throw new Error("Error: FSS_ID or GOOGLE_CLOUD_PROJECT environment variable is not set.");
}
const STORE_ID: string = STORE_ID_ENV;
const PROJECT_ID: string = PROJECT_ID_ENV;
const genAI = new GoogleGenAI({
// apiKey: process.env.GEMINI_API_KEY,
project: PROJECT_ID,
});
export async function POST(req: NextRequest) {
try {
const { prompt } = await req.json() as { prompt: string };
if (!prompt) {
return NextResponse.json({ error: "Missing 'prompt' in request body"}, { status: 400 });
}
const config = {
tools: [
{
fileSearch: {
fileSearchStoreNames: [STORE_ID],
}
}
]
};
console.log(`Received prompt: "${prompt}"`);
const res = await genAI.models.generateContent({
model: "gemini-2.5-flash",
contents: prompt,
config: config,
});
const output = res.text;
return NextResponse.json({ output });
} catch (error) {
console.error("Gemini API Error:", (error as Error).message);
return NextResponse.json({ error: "Failed to generate content."}, { status: 500 });
}
}Note: Make sure you have all the environment variables setup. For example:
GEMINI_API_KEY=_your_gemini_api_key_
FSS_ID=_your_created_fss_
GOOGLE_CLOUD_PROJECT=_your_project_id_- The Gemini API Key comes from Google AI Studio
- The FSS ID comes from the store you created with Gemini AI FSS
- The Google Cloud Project comes from Google AI Studio when you generate the key
2. Create a Page
"use client"
import { useState } from "react"
export default function GeminiRagPage() {
const [prompt, setPrompt] = useState("");
const [response, setResponse] = useState('Type a prompt and click "Generate" to ask Gemini AI.');
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState("");
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
setError("");
if (!prompt.trim()) {
setError("Please enter a prompt first.");
return;
}
setIsLoading(true);
try {
const res = await fetch('/api/gemini/jakkrit-bio', {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ prompt }),
});
const data = await res.json();
if (!res.ok || data.error) {
setError(data.error || `Error: Status ${res.status}`);
setResponse("Failed to get a response from Gemini.");
};
setResponse(data.output);
} catch (error) {
console.error("Fetch error: ", error);
setError("Error: A network error occurred. Check your connection or server status.");
setResponse("Failed to connect to Gemini.");
} finally {
setIsLoading(false);
}
}
return (
<div className="flex flex-col gap-4 w-full max-w-2xl p-8">
<div className="">
<h1 className="font-bold text-md">Gemini API Example</h1>
<p className="text-sm text-gray-400">This page calls your secure Next.js API Route which then talks to the Gemini API.</p>
</div>
<form onSubmit={handleSubmit}>
<textarea
placeholder="Ask Gemini anything..."
value={prompt}
rows={4}
disabled={isLoading}
onChange={(e) => setPrompt(e.target.value)}
className="bg-gray-700 rounded-lg w-full p-4 text-sm"
/>
<div className="flex w-full justify-end">
<button
type="submit"
disabled={isLoading}
className="bg-sky-600 py-2 px-4 rounded-md text-sm cursor-pointer"
>
{isLoading ? "Generating response...": "Submit"}
</button>
</div>
</form>
{error && (
<div className="text-amber-500">
<p>{error}</p>
</div>
)}
<div className="space-y-4 text-sm">
<h2>Gemini AI Response</h2>
{isLoading && !response.startsWith("Type a prompt") ? (
<p>Thinking...</p>
) : (
<div className="border border-gray-700 rounded-lg p-4 leading-8">
<p>{response}</p>
</div>
)}
</div>
</div>
)
}3. Render Markdown
This step is to better render Gemini response using react-markdown and remark-gfm
Install libraries:
npm install react-markdown remark-gfmCreate Markdown Renderer
"use client";
import ReactMarkdown from "react-markdown";
import remarkGfm from "remark-gfm";
interface GeminiMarkdownProps {
content: string;
}
const GeminiMarkdown = ({ content }: GeminiMarkdownProps) => {
if (!content.trim()) {
return <p className="text-sm">...</p>;
}
return (
<div className="text-sm leading-relaxed space-y-2">
<ReactMarkdown
remarkPlugins={[remarkGfm]}
components={{
h1: ({ node: _node, ...props }) => (
<h3 className="text-base font-semibold" {...props} />
),
h2: ({ node: _node, ...props }) => (
<h3 className="text-base font-semibold" {...props} />
),
p: ({ node: _node, ...props }) => (
<p className="mb-2 text-[var(--textLight)] leading-8" {...props} />
),
strong: ({ node: _node, ...props }) => (
<strong className="font-semibold text-[var(--foreground)]" {...props} />
),
ul: ({ node: _node, ...props }) => (
<ul className="list-decimal pl-6 space-y-1" {...props} />
),
ol: ({ node: _node, ...props }) => (
<ol className="list-decimal pl-6 space-y-1" {...props} />
),
li: ({ node: _node, ...props }) => (
<li className="" {...props} />
),
}}>
{content}
</ReactMarkdown>
</div>
);
};
export default GeminiMarkdown;Import and replace Gemini response part with GeminiMarkdown component.
<div className="space-y-4 text-sm">
<h2>Gemini AI Response</h2>
{isLoading && !response.startsWith("Type a prompt") ? (
<p>Thinking...</p>
) : (
<div className="border border-gray-700 rounded-lg p-4 leading-8">
<p>{response}</p>
</div>
)}
</div>import GeminiMarkdown from "@/components/GeminiMarkdown";
...
<div className="space-y-2 bg-[var(--bgOverlay)] w-full py-4 px-6 rounded-lg">
<h2>Gemini Response:</h2>
<GeminiMarkdown content={response} />
</div>