From 4ce7c427532a1abbff9c12575e2f8812c50a2c6f Mon Sep 17 00:00:00 2001 From: codebox283 Date: Fri, 27 Jun 2025 09:40:26 +0530 Subject: [PATCH] Gemini Integration --- .env | 2 + src/components/ChatInterface.tsx | 38 ++++-- src/components/content/DynamicContent.tsx | 110 ++++++++++++++- src/services/geminiService.ts | 157 +++++++++++++--------- src/utils/promptContent.tsx | 10 +- 5 files changed, 237 insertions(+), 80 deletions(-) create mode 100644 .env diff --git a/.env b/.env new file mode 100644 index 0000000..5da1525 --- /dev/null +++ b/.env @@ -0,0 +1,2 @@ +# Google Gemini API Key +NEXT_PUBLIC_GEMINI_API_KEY=AIzaSyAK8AyA8JYTprpqIif4qBiPql84Uh0VFh4 diff --git a/src/components/ChatInterface.tsx b/src/components/ChatInterface.tsx index f86698c..51da597 100644 --- a/src/components/ChatInterface.tsx +++ b/src/components/ChatInterface.tsx @@ -5,6 +5,7 @@ import TypingAnimation from './TypingAnimation'; import { getPromptContent } from '../utils/promptContent'; import { Plus, Send, Upload } from 'lucide-react'; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from './ui/select'; +import { generateResponse } from '@/services/geminiService'; interface ChatInterfaceProps { onPromptSelect: (prompt: string, content: any) => void; @@ -92,13 +93,10 @@ const ChatInterface: React.FC = ({ onPromptSelect, isExpande }, 1000); // "Thinking..." duration }; - const handleSendMessage = () => { + const handleSendMessage = async () => { if (!inputValue.trim()) return; - const userMessage = { - text: inputValue, - isUser: true, - }; + const userMessage = { text: inputValue, isUser: true }; setMessages(prev => [...prev, userMessage]); const matched = promptPatterns.find(({ pattern }) => pattern.test(inputValue)); @@ -108,25 +106,37 @@ const ChatInterface: React.FC = ({ onPromptSelect, isExpande setThinkingStage("thinking"); setIsThinking(true); - setTimeout(() => { + setTimeout(async () => { setThinkingStage("memory"); + + let responseText = promptResponses[promptToUse]; + + // 🔁 Fallback to Gemini if no predefined response + if (!responseText) { + try { + responseText = await generateResponse(promptToUse); + } catch (err) { + responseText = "Sorry, something went wrong while generating a response."; + console.error(err); + } + } + setTimeout(() => { setIsThinking(false); setThinkingStage(null); setMessages(prev => [ ...prev, { - text: promptResponses[promptToUse] || `Let me show you detailed information about "${promptToUse}"`, + text: responseText, isUser: false, isTyping: true, }, ]); - setTimeout(() => { - const content = getPromptContent(promptToUse); - onPromptSelect(promptToUse, content); - }, 9000); - }, 1000); - }, 1000); + + const content = getPromptContent(promptToUse); + onPromptSelect(promptToUse, content); + }, 1000); // "Activating memory..." delay + }, 1000); // "Thinking..." delay }; @@ -294,7 +304,7 @@ const ChatInterface: React.FC = ({ onPromptSelect, isExpande - // ...existing code... + // ...in development... ); }; diff --git a/src/components/content/DynamicContent.tsx b/src/components/content/DynamicContent.tsx index 85bac45..63cd28b 100644 --- a/src/components/content/DynamicContent.tsx +++ b/src/components/content/DynamicContent.tsx @@ -13,8 +13,8 @@ const DynamicContent: React.FC = ({ content }) => { const fetchGemini = async () => { if (content?.type === 'gemini' && content.prompt) { setLoading(true); - const res = await generateResponse(content.prompt, []); - setGeminiAnswer(res.message); + const res = await generateResponse(content.prompt); + setGeminiAnswer(res); setLoading(false); } }; @@ -47,4 +47,108 @@ const DynamicContent: React.FC = ({ content }) => { ); }; -export default DynamicContent; \ No newline at end of file +export default DynamicContent; + + +// This would generate proper html jsx codes but it requires more advanced models + +// import React, { useEffect, useState } from 'react'; +// import { generateResponse } from '@/services/geminiService'; + +// interface DynamicContentProps { +// content: any; +// } + +// const DynamicContent: React.FC = ({ content }) => { +// const [geminiAnswer, setGeminiAnswer] = useState(null); +// const [loading, setLoading] = useState(false); + +// useEffect(() => { +// const fetchGemini = async () => { +// if (content?.type === 'gemini' && content.prompt) { +// setLoading(true); +// try { +// const formattedPrompt = ` +// You are an AI that outputs JSX for Tailwind-styled UI components. + +// Task: +// - Return a single JSX
component with className="space-y-8" +// - Include a title section with heading and paragraph +// - Include 3 hardcoded MCP cards inside a grid layout +// - Each card must include: name, description, 3 features in
  • , and author +// - Use Tailwind CSS classes as shown in the example + +// ⚠️ Output ONLY JSX — no explanations, no markdown, no \`\`\`jsx + +// Example format: + +//
    +//
    +//

    +// Sample MCP Servers +//

    +//

    +// Explore a few example MCPs you can build, deploy, and monetize on the Fastcode platform. +//

    +//
    + +//
    +//
    +//
    +//

    Image Enhancer

    +//
    +//
    Enhance image quality with AI.
    +//
      +//
    • Upscale resolution
    • +//
    • Reduce noise
    • +//
    • Sharpen details
    • +//
    +//
    By fastcoder.ai
    +//
    + +// +//
    +//
    +// `; + + +// const res = await generateResponse(formattedPrompt.trim()); +// setGeminiAnswer(res); +// } catch (error) { +// setGeminiAnswer("Something went wrong while generating the response."); +// } finally { +// setLoading(false); +// } +// } +// }; +// fetchGemini(); +// }, [content]); + +// if (!content) return null; + +// if (content.type === 'gemini') { +// return ( +//
    +//

    Gemini Answer

    +// {loading ? ( +//
    Loading...
    +// ) : ( +//
    +// )} +//
    +// ); +// } + +// if (React.isValidElement(content)) return content; + +// return ( +//
    +// {typeof content === 'string' ? content : JSON.stringify(content)} +//
    +// ); +// }; + +// export default DynamicContent; diff --git a/src/services/geminiService.ts b/src/services/geminiService.ts index 0b55a0b..ec131fb 100644 --- a/src/services/geminiService.ts +++ b/src/services/geminiService.ts @@ -1,71 +1,106 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; +const GEMINI_API_KEY= 'AIzaSyAK8AyA8JYTprpqIif4qBiPql84Uh0VFh4'; + // Initialize the Gemini API with your API key -const genAI = new GoogleGenerativeAI(process.env.NEXT_PUBLIC_GEMINI_API_KEY || ''); +const genAI = new GoogleGenerativeAI(GEMINI_API_KEY || ''); -const SYSTEM_PROMPT = `You are an AI assistant that helps users navigate and answer queries of a website which promotes the monetization of MCPs.` +const SYSTEM_PROMPT = ` +You are an AI assistant for a platform that helps users discover and use MCPs (Monetizable Code Packages). -export const generateResponse = async (prompt: string, chatHistory: Array<{role: 'user' | 'model', parts: string}>) => { - try { - // Get the Gemini Pro model - const model = genAI.getGenerativeModel({ model: 'gemini-pro' }); - - // Format chat history for the API - const chat = model.startChat({ - history: [ - { - role: 'user', - parts: [{ text: SYSTEM_PROMPT }], - }, - { - role: 'model', - parts: [{ text: 'I understand. I will follow these guidelines when responding to questions, especially when I don\'t have complete information.' }], - }, - ...chatHistory.map(msg => ({ - role: msg.role === 'user' ? 'user' : 'model', - parts: [{ text: msg.parts }], - })), - ], - generationConfig: { - maxOutputTokens: 1000, - temperature: 0.7, +- Developers can create and monetize MCPs by wrapping AI functionality into deployable, market-ready APIs. +- Non-developers can use a no-code tool to build and publish their own MCPs. +- Users interact with these MCPs through a smart chat interface, selecting the ones they want and getting tasks done directly inside the chat. +- Your job is to guide users in understanding and using the platform. + +Always respond: +- In friendly, simple language +- In plain text (no markdown or formatting) +- In a single paragraph under 50 words +- Without repeating or rephrasing the user's question +`; + + +const GEMINI_URL = 'https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent'; + +export async function generateResponse(prompt: string): Promise { + const model = genAI.getGenerativeModel({ model: 'gemini-2.0-flash' }); + + const result = await model.generateContent({ + contents: [ + { + role: 'user', + parts: [{ text: `${SYSTEM_PROMPT}\n\nUser: ${prompt}` }], }, - }); + ], + }); - // Send the message and get the response - const result = await chat.sendMessage(prompt); - const response = await result.response; - const text = response.text(); + const response = await result.response; + const text = response.text(); + return text || "I'm not sure about that."; +} + +// export const generateResponse = async (prompt: string, chatHistory: Array<{role: 'user' | 'model', parts: string}>) => { +// try { +// // Get the Gemini Pro model +// const model = genAI.getGenerativeModel({ model: 'gemini-pro' }); - return { - success: true, - message: text, - }; - } catch (error) { - console.error('Error generating response:', error); - return { - success: false, - message: 'Sorry, I encountered an error while processing your request. Please try again later.' - }; - } -}; +// // Format chat history for the API +// const chat = model.startChat({ +// history: [ +// { +// role: 'user', +// parts: [{ text: SYSTEM_PROMPT }], +// }, +// { +// role: 'model', +// parts: [{ text: 'I understand. I will follow these guidelines when responding to questions, especially when I don\'t have complete information.' }], +// }, +// ...chatHistory.map(msg => ({ +// role: msg.role === 'user' ? 'user' : 'model', +// parts: [{ text: msg.parts }], +// })), +// ], +// generationConfig: { +// maxOutputTokens: 1000, +// temperature: 0.7, +// }, +// }); -export const isQuestionUnknown = (response: string): boolean => { - // Simple check for phrases that might indicate the model doesn't know the answer - const unknownPhrases = [ - 'i don\'t know', - 'i\'m not sure', - 'i don\'t have that information', - 'i don\'t have specific information', - 'i don\'t have access to', - 'i don\'t have the capability', - 'i don\'t have enough information', - 'i can\'t provide', - 'i\'m unable to', - 'i don\'t have the ability', - ]; +// // Send the message and get the response +// const result = await chat.sendMessage(prompt); +// const response = await result.response; +// const text = response.text(); + +// return { +// success: true, +// message: text, +// }; +// } catch (error) { +// console.error('Error generating response:', error); +// return { +// success: false, +// message: 'Sorry, I encountered an error while processing your request. Please try again later.' +// }; +// } +// }; - return unknownPhrases.some(phrase => - response.toLowerCase().includes(phrase) - ); -}; +// export const isQuestionUnknown = (response: string): boolean => { +// // Simple check for phrases that might indicate the model doesn't know the answer +// const unknownPhrases = [ +// 'i don\'t know', +// 'i\'m not sure', +// 'i don\'t have that information', +// 'i don\'t have specific information', +// 'i don\'t have access to', +// 'i don\'t have the capability', +// 'i don\'t have enough information', +// 'i can\'t provide', +// 'i\'m unable to', +// 'i don\'t have the ability', +// ]; + +// return unknownPhrases.some(phrase => +// response.toLowerCase().includes(phrase) +// ); +// }; diff --git a/src/utils/promptContent.tsx b/src/utils/promptContent.tsx index 6d4269c..8b7a396 100644 --- a/src/utils/promptContent.tsx +++ b/src/utils/promptContent.tsx @@ -6,6 +6,7 @@ import HowToEarnContent from '../components/content/HowToEarnContent'; import WhatCanHelpContent from '../components/content/WhatCanHelpContent'; import Developers from '@/components/content/Developers'; import SampleMCPs from '@/components/content/SampleMCPs'; +import DynamicContent from '@/components/content/DynamicContent'; export const getPromptContent = (prompt: string) => { switch (prompt) { @@ -39,10 +40,15 @@ export const getPromptContent = (prompt: string) => { title: "Sample MCP Servers", component: }; + // case "Ask Gemini": + // return { + // title: "Gemini Answer", + // component: + // } default: return { - title: "Information", - component:
    Content for "{prompt}" coming soon...
    + title: "Gemini Answer", + component: }; } };