Gemini Integration

This commit is contained in:
codebox283 2025-06-27 09:40:26 +05:30
parent 89873a990f
commit 4ce7c42753
5 changed files with 237 additions and 80 deletions

2
.env Normal file
View File

@ -0,0 +1,2 @@
# Google Gemini API Key
NEXT_PUBLIC_GEMINI_API_KEY=AIzaSyAK8AyA8JYTprpqIif4qBiPql84Uh0VFh4

View File

@ -5,6 +5,7 @@ import TypingAnimation from './TypingAnimation';
import { getPromptContent } from '../utils/promptContent';
import { Plus, Send, Upload } from 'lucide-react';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from './ui/select';
import { generateResponse } from '@/services/geminiService';
interface ChatInterfaceProps {
onPromptSelect: (prompt: string, content: any) => void;
@ -92,13 +93,10 @@ const ChatInterface: React.FC<ChatInterfaceProps> = ({ onPromptSelect, isExpande
}, 1000); // "Thinking..." duration
};
const handleSendMessage = () => {
const handleSendMessage = async () => {
if (!inputValue.trim()) return;
const userMessage = {
text: inputValue,
isUser: true,
};
const userMessage = { text: inputValue, isUser: true };
setMessages(prev => [...prev, userMessage]);
const matched = promptPatterns.find(({ pattern }) => pattern.test(inputValue));
@ -108,25 +106,37 @@ const ChatInterface: React.FC<ChatInterfaceProps> = ({ onPromptSelect, isExpande
setThinkingStage("thinking");
setIsThinking(true);
setTimeout(() => {
setTimeout(async () => {
setThinkingStage("memory");
let responseText = promptResponses[promptToUse];
// 🔁 Fallback to Gemini if no predefined response
if (!responseText) {
try {
responseText = await generateResponse(promptToUse);
} catch (err) {
responseText = "Sorry, something went wrong while generating a response.";
console.error(err);
}
}
setTimeout(() => {
setIsThinking(false);
setThinkingStage(null);
setMessages(prev => [
...prev,
{
text: promptResponses[promptToUse] || `Let me show you detailed information about "${promptToUse}"`,
text: responseText,
isUser: false,
isTyping: true,
},
]);
setTimeout(() => {
const content = getPromptContent(promptToUse);
onPromptSelect(promptToUse, content);
}, 9000);
}, 1000);
}, 1000);
const content = getPromptContent(promptToUse);
onPromptSelect(promptToUse, content);
}, 1000); // "Activating memory..." delay
}, 1000); // "Thinking..." delay
};
@ -294,7 +304,7 @@ const ChatInterface: React.FC<ChatInterfaceProps> = ({ onPromptSelect, isExpande
</Button>
</div>
</div>
// ...existing code...
// ...in development...
</div>
);
};

View File

@ -13,8 +13,8 @@ const DynamicContent: React.FC<DynamicContentProps> = ({ content }) => {
const fetchGemini = async () => {
if (content?.type === 'gemini' && content.prompt) {
setLoading(true);
const res = await generateResponse(content.prompt, []);
setGeminiAnswer(res.message);
const res = await generateResponse(content.prompt);
setGeminiAnswer(res);
setLoading(false);
}
};
@ -47,4 +47,108 @@ const DynamicContent: React.FC<DynamicContentProps> = ({ content }) => {
);
};
export default DynamicContent;
export default DynamicContent;
// This would generate proper html jsx codes but it requires more advanced models
// import React, { useEffect, useState } from 'react';
// import { generateResponse } from '@/services/geminiService';
// interface DynamicContentProps {
// content: any;
// }
// const DynamicContent: React.FC<DynamicContentProps> = ({ content }) => {
// const [geminiAnswer, setGeminiAnswer] = useState<string | null>(null);
// const [loading, setLoading] = useState(false);
// useEffect(() => {
// const fetchGemini = async () => {
// if (content?.type === 'gemini' && content.prompt) {
// setLoading(true);
// try {
// const formattedPrompt = `
// You are an AI that outputs JSX for Tailwind-styled UI components.
// Task:
// - Return a single JSX <div> component with className="space-y-8"
// - Include a title section with heading and paragraph
// - Include 3 hardcoded MCP cards inside a grid layout
// - Each card must include: name, description, 3 features in <li>, and author
// - Use Tailwind CSS classes as shown in the example
// ⚠️ Output ONLY JSX — no explanations, no markdown, no \`\`\`jsx
// Example format:
// <div className="space-y-8">
// <div className="space-y-4">
// <h3 className="text-xl font-semibold text-gray-900 dark:text-white">
// Sample MCP Servers
// </h3>
// <p className="text-gray-600 dark:text-gray-300 leading-relaxed">
// Explore a few example MCPs you can build, deploy, and monetize on the Fastcode platform.
// </p>
// </div>
// <div className="grid gap-6 md:grid-cols-2">
// <div className="flex flex-col gap-3 p-6 rounded-2xl border border-gray-200/20 dark:border-gray-700/20 backdrop-blur-sm bg-white/5 dark:bg-black/5 hover:scale-105 transition-all duration-300">
// <div className="flex items-center gap-3">
// <h4 className="font-medium text-gray-900 dark:text-white">Image Enhancer</h4>
// </div>
// <div className="text-sm text-gray-600 dark:text-gray-300">Enhance image quality with AI.</div>
// <ul className="list-disc list-inside text-xs text-gray-500 dark:text-gray-400 pl-2">
// <li>Upscale resolution</li>
// <li>Reduce noise</li>
// <li>Sharpen details</li>
// </ul>
// <div className="text-xs text-gray-400 dark:text-gray-500 mt-2">By fastcoder.ai</div>
// </div>
// <!-- More cards -->
// </div>
// </div>
// `;
// const res = await generateResponse(formattedPrompt.trim());
// setGeminiAnswer(res);
// } catch (error) {
// setGeminiAnswer("Something went wrong while generating the response.");
// } finally {
// setLoading(false);
// }
// }
// };
// fetchGemini();
// }, [content]);
// if (!content) return null;
// if (content.type === 'gemini') {
// return (
// <div className="p-6">
// <h3 className="text-lg font-semibold mb-4">Gemini Answer</h3>
// {loading ? (
// <div className="text-gray-500">Loading...</div>
// ) : (
// <div
// className="text-gray-900 dark:text-white"
// dangerouslySetInnerHTML={{ __html: geminiAnswer || '' }}
// />
// )}
// </div>
// );
// }
// if (React.isValidElement(content)) return content;
// return (
// <div className="p-6 text-gray-900 dark:text-white">
// {typeof content === 'string' ? content : JSON.stringify(content)}
// </div>
// );
// };
// export default DynamicContent;

View File

@ -1,71 +1,106 @@
import { GoogleGenerativeAI } from '@google/generative-ai';
const GEMINI_API_KEY= 'AIzaSyAK8AyA8JYTprpqIif4qBiPql84Uh0VFh4';
// Initialize the Gemini API with your API key
const genAI = new GoogleGenerativeAI(process.env.NEXT_PUBLIC_GEMINI_API_KEY || '');
const genAI = new GoogleGenerativeAI(GEMINI_API_KEY || '');
const SYSTEM_PROMPT = `You are an AI assistant that helps users navigate and answer queries of a website which promotes the monetization of MCPs.`
const SYSTEM_PROMPT = `
You are an AI assistant for a platform that helps users discover and use MCPs (Monetizable Code Packages).
export const generateResponse = async (prompt: string, chatHistory: Array<{role: 'user' | 'model', parts: string}>) => {
try {
// Get the Gemini Pro model
const model = genAI.getGenerativeModel({ model: 'gemini-pro' });
// Format chat history for the API
const chat = model.startChat({
history: [
{
role: 'user',
parts: [{ text: SYSTEM_PROMPT }],
},
{
role: 'model',
parts: [{ text: 'I understand. I will follow these guidelines when responding to questions, especially when I don\'t have complete information.' }],
},
...chatHistory.map(msg => ({
role: msg.role === 'user' ? 'user' : 'model',
parts: [{ text: msg.parts }],
})),
],
generationConfig: {
maxOutputTokens: 1000,
temperature: 0.7,
- Developers can create and monetize MCPs by wrapping AI functionality into deployable, market-ready APIs.
- Non-developers can use a no-code tool to build and publish their own MCPs.
- Users interact with these MCPs through a smart chat interface, selecting the ones they want and getting tasks done directly inside the chat.
- Your job is to guide users in understanding and using the platform.
Always respond:
- In friendly, simple language
- In plain text (no markdown or formatting)
- In a single paragraph under 50 words
- Without repeating or rephrasing the user's question
`;
const GEMINI_URL = 'https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent';
export async function generateResponse(prompt: string): Promise<string> {
const model = genAI.getGenerativeModel({ model: 'gemini-2.0-flash' });
const result = await model.generateContent({
contents: [
{
role: 'user',
parts: [{ text: `${SYSTEM_PROMPT}\n\nUser: ${prompt}` }],
},
});
],
});
// Send the message and get the response
const result = await chat.sendMessage(prompt);
const response = await result.response;
const text = response.text();
const response = await result.response;
const text = response.text();
return text || "I'm not sure about that.";
}
// export const generateResponse = async (prompt: string, chatHistory: Array<{role: 'user' | 'model', parts: string}>) => {
// try {
// // Get the Gemini Pro model
// const model = genAI.getGenerativeModel({ model: 'gemini-pro' });
return {
success: true,
message: text,
};
} catch (error) {
console.error('Error generating response:', error);
return {
success: false,
message: 'Sorry, I encountered an error while processing your request. Please try again later.'
};
}
};
// // Format chat history for the API
// const chat = model.startChat({
// history: [
// {
// role: 'user',
// parts: [{ text: SYSTEM_PROMPT }],
// },
// {
// role: 'model',
// parts: [{ text: 'I understand. I will follow these guidelines when responding to questions, especially when I don\'t have complete information.' }],
// },
// ...chatHistory.map(msg => ({
// role: msg.role === 'user' ? 'user' : 'model',
// parts: [{ text: msg.parts }],
// })),
// ],
// generationConfig: {
// maxOutputTokens: 1000,
// temperature: 0.7,
// },
// });
export const isQuestionUnknown = (response: string): boolean => {
// Simple check for phrases that might indicate the model doesn't know the answer
const unknownPhrases = [
'i don\'t know',
'i\'m not sure',
'i don\'t have that information',
'i don\'t have specific information',
'i don\'t have access to',
'i don\'t have the capability',
'i don\'t have enough information',
'i can\'t provide',
'i\'m unable to',
'i don\'t have the ability',
];
// // Send the message and get the response
// const result = await chat.sendMessage(prompt);
// const response = await result.response;
// const text = response.text();
// return {
// success: true,
// message: text,
// };
// } catch (error) {
// console.error('Error generating response:', error);
// return {
// success: false,
// message: 'Sorry, I encountered an error while processing your request. Please try again later.'
// };
// }
// };
return unknownPhrases.some(phrase =>
response.toLowerCase().includes(phrase)
);
};
// export const isQuestionUnknown = (response: string): boolean => {
// // Simple check for phrases that might indicate the model doesn't know the answer
// const unknownPhrases = [
// 'i don\'t know',
// 'i\'m not sure',
// 'i don\'t have that information',
// 'i don\'t have specific information',
// 'i don\'t have access to',
// 'i don\'t have the capability',
// 'i don\'t have enough information',
// 'i can\'t provide',
// 'i\'m unable to',
// 'i don\'t have the ability',
// ];
// return unknownPhrases.some(phrase =>
// response.toLowerCase().includes(phrase)
// );
// };

View File

@ -6,6 +6,7 @@ import HowToEarnContent from '../components/content/HowToEarnContent';
import WhatCanHelpContent from '../components/content/WhatCanHelpContent';
import Developers from '@/components/content/Developers';
import SampleMCPs from '@/components/content/SampleMCPs';
import DynamicContent from '@/components/content/DynamicContent';
export const getPromptContent = (prompt: string) => {
switch (prompt) {
@ -39,10 +40,15 @@ export const getPromptContent = (prompt: string) => {
title: "Sample MCP Servers",
component: <SampleMCPs />
};
// case "Ask Gemini":
// return {
// title: "Gemini Answer",
// component: <DynamicContent content={{ type: 'gemini', prompt }} />
// }
default:
return {
title: "Information",
component: <div className="text-gray-600 dark:text-gray-300">Content for "{prompt}" coming soon...</div>
title: "Gemini Answer",
component: <DynamicContent content={{ type: 'gemini', prompt }} />
};
}
};