diff --git a/app/api/chat/google/route.ts b/app/api/chat/google/route.ts index 4f6ca02..ac45be3 100644 --- a/app/api/chat/google/route.ts +++ b/app/api/chat/google/route.ts @@ -1,26 +1,34 @@ -const { - GoogleGenerativeAI, - HarmCategory, - HarmBlockThreshold, -} = require("@google/generative-ai"); -import { StreamingTextResponse } from "ai"; -import { NextResponse } from "next/server"; +import { getServerConfig } from "@/config/server"; +import { GoogleGenerativeAI, HarmCategory, HarmBlockThreshold } from "@google/generative-ai"; + +// Ensure the environment variable is correctly set and decoded +const googleServiceKey = process.env.GOOGLE_SERVICE_KEY; +if (!googleServiceKey) { + throw new Error("Missing GOOGLE_SERVICE_KEY environment variable."); +} const credentials = JSON.parse( - Buffer.from(process.env.GOOGLE_SERVICE_KEY || "", "base64").toString() + Buffer.from(googleServiceKey, "base64").toString() ); -const model = "gemini-1.5-pro-preview-0409"; -const genAI = new GoogleGenerativeAI(API_KEY); - const model = genAI.getGenerativeModel({ model: MODEL_NAME }); - +export async function POST(req: Request) { + try { + // Fetch server configuration + const config = await getServerConfig(); + + // Initialize Google Generative AI with the provided credentials + const genAI = new GoogleGenerativeAI(config.geminiKey); + const modelG = genAI.getGenerativeModel({ model: "gemini-pro" }); + + // Define the generation configuration const generationConfig = { temperature: 0.8, topK: 0.9, topP: 1, maxOutputTokens: 2048, }; - + + // Define the safety settings for content filtering const safetySettings = [ { category: HarmCategory.HARM_CATEGORY_HARASSMENT, @@ -39,103 +47,30 @@ const genAI = new GoogleGenerativeAI(API_KEY); threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE, }, ]; - - const chat = model.startChat({ + + // Start a chat session with the generative AI model + const chat = modelG.startChat({ generationConfig, - safetySettings, - history: [ - ], + safetySettings, // Pass safety settings if needed }); - - -function iteratorToStream(iterator: any) { - return new ReadableStream({ - async pull(controller) { - const { value, done } = await iterator.next(); - if (done || !value) { - controller.close(); - } else { - const data = value.candidates[0].content.parts[0].text; + // Extract messages from the request + const { messages } = await req.json(); - // controller.enqueue(`data: ${data}\n\n`); - controller.enqueue(data); - } - }, - }); -} + // Send the message to the model and await the response + const result = await chat.sendMessage(messages); + const response = await result.response; -export async function POST(req: Request) { - const formData = await req.formData(); - const files = formData.getAll("files") as File[]; - const notes = formData.get("notes"); - const totalQuizQuestions = formData.get("quizCount"); - const difficulty = formData.get("difficulty"); - const topic = formData.get("topic"); + return new Response(JSON.stringify(response), { + status: 200, + }); + } catch (error: any) { + const errorMessage = error.message || "An unexpected error occurred"; + const errorCode = error.status || 500; + console.error(error); - if (files.length < 1 && !notes) { - return new NextResponse("Please provide either a file or notes", { - status: 400, + return new Response(JSON.stringify({ message: errorMessage }), { + status: errorCode, }); } - - const text1 = { - text: `You are an all-rounder tutor with professional expertise in different fields. You are to generate a list of quiz questions from the document(s) with a difficutly of ${ - difficulty || "Easy" - }.`, - }; - const text2 = { - text: `You response should be in JSON as an array of the object below. Respond with ${ - totalQuizQuestions || 5 - } different questions. - { -  \"id\": 1, -  \"question\": \"\", -  \"description\": \"\", -  \"options\": { -    \"a\": \"\", -    \"b\": \"\", -    \"c\": \"\", -    \"d\": \"\" -  }, -  \"answer\": \"\", - }`, - }; - - const filesBase64 = await Promise.all( - files.map(async (file) => { - const arrayBuffer = await file.arrayBuffer(); - const buffer = Buffer.from(arrayBuffer); - // return "data:" + file.type + ";base64," + buffer.toString("base64"); - return buffer.toString("base64"); - }) - ); - - const filesData = filesBase64.map((b64, i) => ({ - inlineData: { - mimeType: files[i].type, - data: b64, - }, - })); - - const data = - files.length > 0 ? filesData : [{ text: notes?.toString() || "No notes" }]; - - const body = { - contents: [{ role: "user", parts: [text1, ...data, text2] }], - }; - - const resp = await generativeModel.generateContentStream(body); - - // Convert the response into a friendly text-stream - const stream = iteratorToStream(resp.stream); - - return new StreamingTextResponse(stream, { - headers: { - "Content-Type": "text/event-stream", - "Cache-Control": "no-cache", - Connection: "keep-alive", - "Transfer-Encoding": "chunked", - }, - }); -} \ No newline at end of file +} diff --git a/app/api/chat/openai/route.ts b/app/api/chat/openai/route.ts index 3c11be6..97758e0 100644 --- a/app/api/chat/openai/route.ts +++ b/app/api/chat/openai/route.ts @@ -42,6 +42,9 @@ export async function POST(request: Request) { return new Response(JSON.stringify({ message: errorMessage }), { status: errorCode, + headers: { + "Content-Type": "application/json", + }, }); } } diff --git a/app/page.tsx b/app/page.tsx index 00af5a8..639f2a2 100644 --- a/app/page.tsx +++ b/app/page.tsx @@ -46,24 +46,26 @@ export default function Home() {
+

Key Features

Customizable Chatbots

-

Deploy AI chatbots tailored to your specific needs.

+

Deploy AI chatbots tailored to your specific needs.Tailor your chatbot to your specific needs with ease. +

Easy Fine-Tuning

-

Effortlessly train and improve your models.

+

Effortlessly train and improve your models.Refine your models for optimal performance and accuracy.

Safety & Privacy

-

Ensure data protection and user privacy.

+

Your data is protected and secure with our platform.

Open-Source

-

Benefit from community-driven development.

+

Find, share, and use prompts to accelerate your AI development.

diff --git a/config/server.ts b/config/server.ts index ccd844d..f92cf46 100644 --- a/config/server.ts +++ b/config/server.ts @@ -10,6 +10,7 @@ declare global { // ... other environment variables OPENAI_BASE_URL: string; OPENAI_ORG_ID: string; + GEMINI_API_KEY:string; AZURE_OPENAI_API_KEY: string; NEXT_PUBLIC_AZURE_OPENAI_ENDPOINT: string; NEXT_PUBLIC_AZURE_GPT_35_TURBO_ID: string; @@ -33,6 +34,7 @@ declare global { OPENAI_BASE_URL, OPENAI_PROXY_URL, OPENAI_ORG_ID, + GEMINI_API_KEY, AZURE_OPENAI_API_KEY, NEXT_PUBLIC_AZURE_OPENAI_ENDPOINT, NEXT_PUBLIC_AZURE_GPT_35_TURBO_ID, @@ -45,6 +47,7 @@ declare global { vercelEnv: NEXT_PUBLIC_VERCEL_ENV, backendUrl: NEXT_PUBLIC_BACKEND_URL, openaiApiKey: OPENAI_API_KEY, + geminiKey:GEMINI_API_KEY, // ... other properties openaiBaseUrl: OPENAI_BASE_URL, openaiProxyUrl: OPENAI_PROXY_URL,