Skip to content

Commit

Permalink
fix gemini logic
Browse files Browse the repository at this point in the history
  • Loading branch information
Stan370 committed Aug 24, 2024
1 parent 8266c8d commit 0d6efa4
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 109 deletions.
145 changes: 40 additions & 105 deletions app/api/chat/google/route.ts
Original file line number Diff line number Diff line change
@@ -1,26 +1,34 @@
const {
GoogleGenerativeAI,
HarmCategory,
HarmBlockThreshold,
} = require("@google/generative-ai");
import { StreamingTextResponse } from "ai";
import { NextResponse } from "next/server";
import { getServerConfig } from "@/config/server";
import { GoogleGenerativeAI, HarmCategory, HarmBlockThreshold } from "@google/generative-ai";

// Ensure the environment variable is correctly set and decoded
const googleServiceKey = process.env.GOOGLE_SERVICE_KEY;
if (!googleServiceKey) {
throw new Error("Missing GOOGLE_SERVICE_KEY environment variable.");
}

const credentials = JSON.parse(
Buffer.from(process.env.GOOGLE_SERVICE_KEY || "", "base64").toString()
Buffer.from(googleServiceKey, "base64").toString()
);

const model = "gemini-1.5-pro-preview-0409";
const genAI = new GoogleGenerativeAI(API_KEY);
const model = genAI.getGenerativeModel({ model: MODEL_NAME });

export async function POST(req: Request) {
try {
// Fetch server configuration
const config = await getServerConfig();

// Initialize Google Generative AI with the provided credentials
const genAI = new GoogleGenerativeAI(config.geminiKey);
const modelG = genAI.getGenerativeModel({ model: "gemini-pro" });

// Define the generation configuration
const generationConfig = {
temperature: 0.8,
topK: 0.9,
topP: 1,
maxOutputTokens: 2048,
};


// Define the safety settings for content filtering
const safetySettings = [
{
category: HarmCategory.HARM_CATEGORY_HARASSMENT,
Expand All @@ -39,103 +47,30 @@ const genAI = new GoogleGenerativeAI(API_KEY);
threshold: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE,
},
];

const chat = model.startChat({

// Start a chat session with the generative AI model
const chat = modelG.startChat({
generationConfig,
safetySettings,
history: [
],
safetySettings, // Pass safety settings if needed
});


function iteratorToStream(iterator: any) {
return new ReadableStream({
async pull(controller) {
const { value, done } = await iterator.next();

if (done || !value) {
controller.close();
} else {
const data = value.candidates[0].content.parts[0].text;
// Extract messages from the request
const { messages } = await req.json();

// controller.enqueue(`data: ${data}\n\n`);
controller.enqueue(data);
}
},
});
}
// Send the message to the model and await the response
const result = await chat.sendMessage(messages);
const response = await result.response;

export async function POST(req: Request) {
const formData = await req.formData();
const files = formData.getAll("files") as File[];
const notes = formData.get("notes");
const totalQuizQuestions = formData.get("quizCount");
const difficulty = formData.get("difficulty");
const topic = formData.get("topic");
return new Response(JSON.stringify(response), {
status: 200,
});
} catch (error: any) {
const errorMessage = error.message || "An unexpected error occurred";
const errorCode = error.status || 500;
console.error(error);

if (files.length < 1 && !notes) {
return new NextResponse("Please provide either a file or notes", {
status: 400,
return new Response(JSON.stringify({ message: errorMessage }), {
status: errorCode,
});
}

const text1 = {
text: `You are an all-rounder tutor with professional expertise in different fields. You are to generate a list of quiz questions from the document(s) with a difficutly of ${
difficulty || "Easy"
}.`,
};
const text2 = {
text: `You response should be in JSON as an array of the object below. Respond with ${
totalQuizQuestions || 5
} different questions.
{
 \"id\": 1,
 \"question\": \"\",
 \"description\": \"\",
 \"options\": {
   \"a\": \"\",
   \"b\": \"\",
   \"c\": \"\",
   \"d\": \"\"
 },
 \"answer\": \"\",
}`,
};

const filesBase64 = await Promise.all(
files.map(async (file) => {
const arrayBuffer = await file.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
// return "data:" + file.type + ";base64," + buffer.toString("base64");
return buffer.toString("base64");
})
);

const filesData = filesBase64.map((b64, i) => ({
inlineData: {
mimeType: files[i].type,
data: b64,
},
}));

const data =
files.length > 0 ? filesData : [{ text: notes?.toString() || "No notes" }];

const body = {
contents: [{ role: "user", parts: [text1, ...data, text2] }],
};

const resp = await generativeModel.generateContentStream(body);

// Convert the response into a friendly text-stream
const stream = iteratorToStream(resp.stream);

return new StreamingTextResponse(stream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
Connection: "keep-alive",
"Transfer-Encoding": "chunked",
},
});
}
}
3 changes: 3 additions & 0 deletions app/api/chat/openai/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ export async function POST(request: Request) {

return new Response(JSON.stringify({ message: errorMessage }), {
status: errorCode,
headers: {
"Content-Type": "application/json",
},
});
}
}
10 changes: 6 additions & 4 deletions app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -46,24 +46,26 @@ export default function Home() {
</section>

<section className="py-20">

<div className="container mx-auto px-6">
<h2 className="text-3xl font-bold text-center mb-8">Key Features</h2>
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-8">
<div className="bg-white dark:bg-gray-800 p-6 rounded-lg shadow-md dark:shadow-gray-700">
<h3 className="font-bold text-xl mb-2 text-gray-900 dark:text-white">Customizable Chatbots</h3>
<p className="text-gray-700 dark:text-gray-300">Deploy AI chatbots tailored to your specific needs.</p>
<p className="text-gray-700 dark:text-gray-300">Deploy AI chatbots tailored to your specific needs.Tailor your chatbot to your specific needs with ease.
</p>
</div>
<div className="bg-white dark:bg-gray-800 p-6 rounded-lg shadow-md dark:shadow-gray-700">
<h3 className="font-bold text-xl mb-2 text-gray-900 dark:text-white">Easy Fine-Tuning</h3>
<p className="text-gray-700 dark:text-gray-300">Effortlessly train and improve your models.</p>
<p className="text-gray-700 dark:text-gray-300">Effortlessly train and improve your models.Refine your models for optimal performance and accuracy.</p>
</div>
<div className="bg-white dark:bg-gray-800 p-6 rounded-lg shadow-md dark:shadow-gray-700">
<h3 className="font-bold text-xl mb-2 text-gray-900 dark:text-white">Safety & Privacy</h3>
<p className="text-gray-700 dark:text-gray-300">Ensure data protection and user privacy.</p>
<p className="text-gray-700 dark:text-gray-300">Your data is protected and secure with our platform.</p>
</div>
<div className="bg-white dark:bg-gray-800 p-6 rounded-lg shadow-md dark:shadow-gray-700">
<h3 className="font-bold text-xl mb-2 text-gray-900 dark:text-white">Open-Source</h3>
<p className="text-gray-700 dark:text-gray-300">Benefit from community-driven development.</p>
<p className="text-gray-700 dark:text-gray-300">Find, share, and use prompts to accelerate your AI development.</p>
</div>
</div>
</div>
Expand Down
3 changes: 3 additions & 0 deletions config/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ declare global {
// ... other environment variables
OPENAI_BASE_URL: string;
OPENAI_ORG_ID: string;
GEMINI_API_KEY:string;
AZURE_OPENAI_API_KEY: string;
NEXT_PUBLIC_AZURE_OPENAI_ENDPOINT: string;
NEXT_PUBLIC_AZURE_GPT_35_TURBO_ID: string;
Expand All @@ -33,6 +34,7 @@ declare global {
OPENAI_BASE_URL,
OPENAI_PROXY_URL,
OPENAI_ORG_ID,
GEMINI_API_KEY,
AZURE_OPENAI_API_KEY,
NEXT_PUBLIC_AZURE_OPENAI_ENDPOINT,
NEXT_PUBLIC_AZURE_GPT_35_TURBO_ID,
Expand All @@ -45,6 +47,7 @@ declare global {
vercelEnv: NEXT_PUBLIC_VERCEL_ENV,
backendUrl: NEXT_PUBLIC_BACKEND_URL,
openaiApiKey: OPENAI_API_KEY,
geminiKey:GEMINI_API_KEY,
// ... other properties
openaiBaseUrl: OPENAI_BASE_URL,
openaiProxyUrl: OPENAI_PROXY_URL,
Expand Down

0 comments on commit 0d6efa4

Please sign in to comment.