Skip to content
Prev Previous commit
Next Next commit
DRY
  • Loading branch information
sid597 committed Apr 3, 2025
commit 2b99736b0ecc0f4b72ed6fb01be1fc2115a1fa45
141 changes: 8 additions & 133 deletions apps/website/app/api/llm/anthropic/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,143 +1,18 @@
import { NextRequest } from "next/server";
import cors from "../../../../../lib/cors";

type Message = {
role: string;
content: string;
};

type Settings = {
model: string;
maxTokens: number;
temperature: number;
};

type AnthropicUsage = {
input_tokens: number;
output_tokens: number;
};

type RequestBody = {
documents: Message[];
passphrase?: string;
settings: Settings;
};

const CONTENT_TYPE_JSON = "application/json";
const CONTENT_TYPE_TEXT = "text/plain";
const ANTHROPIC_API_VERSION = "2023-06-01";
import {
handleLLMRequest,
handleOptionsRequest,
} from "../../../../../lib/llm/handler";
import { anthropicConfig } from "../../../../../lib/llm/providers";

export const runtime = "nodejs";
export const preferredRegion = "auto";
export const maxDuration = 300;

export async function POST(request: NextRequest): Promise<Response> {
try {
const requestData: RequestBody = await request.json();
const { documents: messages, settings } = requestData;
const { model, maxTokens, temperature } = settings;

const apiKey = process.env.ANTHROPIC_API_KEY;

if (!apiKey) {
console.error("ANTHROPIC_API_KEY environment variable is not set");
return cors(
request,
new Response(
JSON.stringify({
error:
"API key not configured. Please set the ANTHROPIC_API_KEY environment variable in your Vercel project settings.",
}),
{
status: 500,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}

const url = "https://api.anthropic.com/v1/messages";

const body = JSON.stringify({
model: model,
max_tokens: maxTokens,
messages: messages,
temperature: temperature,
});

const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": CONTENT_TYPE_JSON,
"x-api-key": apiKey,
"anthropic-version": ANTHROPIC_API_VERSION,
},
body,
});

const responseData = await response.json();

if (!response.ok) {
console.error("Anthropic API error:", responseData);
return cors(
request,
new Response(
JSON.stringify({
error: `Anthropic API error: ${responseData.error?.message || "Unknown error"}`,
}),
{
status: response.status,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}

const replyText = responseData.content?.[0]?.text;

if (!replyText) {
console.error(
"Invalid response format from Anthropic API:",
responseData,
);
return cors(
request,
new Response(
JSON.stringify({
error:
"Invalid response format from Anthropic API. Check server logs for details.",
}),
{
status: 500,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}

return cors(
request,
new Response(replyText, {
headers: { "Content-Type": CONTENT_TYPE_TEXT },
}),
);
} catch (error) {
console.error("Error processing request:", error);
return cors(
request,
new Response(
JSON.stringify({
error: `Internal Server Error: ${error instanceof Error ? error.message : "Unknown error"}`,
}),
{
status: 500,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}
return handleLLMRequest(request, anthropicConfig);
}

export async function OPTIONS(request: NextRequest) {
return cors(request, new Response(null, { status: 204 }));
export async function OPTIONS(request: NextRequest): Promise<Response> {
return handleOptionsRequest(request);
}
150 changes: 8 additions & 142 deletions apps/website/app/api/llm/gemini/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,152 +1,18 @@
import { NextRequest } from "next/server";
import cors from "../../../../../lib/cors";

type Message = {
role: string;
content: string;
};

type Settings = {
model: string;
maxTokens: number;
temperature: number;
safetySettings?: Array<{
category: string;
threshold: string;
}>;
};

type RequestBody = {
documents: Message[];
passphrase?: string;
settings: Settings;
};

type GeminiMessage = {
role: string;
parts: Array<{
text: string;
}>;
};

const CONTENT_TYPE_JSON = "application/json";
const CONTENT_TYPE_TEXT = "text/plain";
import {
handleLLMRequest,
handleOptionsRequest,
} from "../../../../../lib/llm/handler";
import { geminiConfig } from "../../../../../lib/llm/providers";

export const runtime = "nodejs";
export const preferredRegion = "auto";
export const maxDuration = 300;

function convertToGeminiFormat(messages: Message[]): GeminiMessage[] {
return messages.map((msg) => ({
role: msg.role === "user" ? "user" : "model",
parts: [{ text: msg.content }],
}));
}

export async function POST(request: NextRequest): Promise<Response> {
try {
const requestData: RequestBody = await request.json();
const { documents: messages, settings } = requestData;
const { model, maxTokens, temperature } = settings;

const apiKey = process.env.GEMINI_API_KEY;

if (!apiKey) {
console.error("GEMINI_API_KEY environment variable is not set");
return cors(
request,
new Response(
JSON.stringify({
error:
"API key not configured. Please set the GEMINI_API_KEY environment variable in your Vercel project settings.",
}),
{
status: 500,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}

const url = `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${apiKey}`;

const body = JSON.stringify({
contents: convertToGeminiFormat(messages),
generationConfig: {
maxOutputTokens: maxTokens,
temperature: temperature,
},
safetySettings: settings.safetySettings,
});

const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": CONTENT_TYPE_JSON,
},
body,
});

const responseData = await response.json();

if (!response.ok) {
console.error("Gemini API error:", responseData);
return cors(
request,
new Response(
JSON.stringify({
error: `Gemini API error: ${responseData.error?.message || "Unknown error"}`,
}),
{
status: response.status,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}

const replyText = responseData.candidates?.[0]?.content?.parts?.[0]?.text;

if (!replyText) {
console.error("Invalid response format from Gemini API:", responseData);
return cors(
request,
new Response(
JSON.stringify({
error:
"Invalid response format from Gemini API. Check server logs for details.",
}),
{
status: 500,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}

return cors(
request,
new Response(replyText, {
headers: { "Content-Type": CONTENT_TYPE_TEXT },
}),
);
} catch (error) {
console.error("Error processing request:", error);
return cors(
request,
new Response(
JSON.stringify({
error: `Internal Server Error: ${error instanceof Error ? error.message : "Unknown error"}`,
}),
{
status: 500,
headers: { "Content-Type": CONTENT_TYPE_JSON },
},
),
);
}
return handleLLMRequest(request, geminiConfig);
}

export async function OPTIONS(request: NextRequest) {
return cors(request, new Response(null, { status: 204 }));
export async function OPTIONS(request: NextRequest): Promise<Response> {
return handleOptionsRequest(request);
}
Loading