Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions app/api/[provider]/[...path]/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import { handle as xaiHandler } from "../../xai";
import { handle as chatglmHandler } from "../../glm";
import { handle as proxyHandler } from "../../proxy";
import { handle as ai302Handler } from "../../302ai";
import { handle as qiniuHandler } from "../../qiniu";

async function handle(
req: NextRequest,
Expand Down Expand Up @@ -51,6 +52,8 @@ async function handle(
return chatglmHandler(req, { params });
case ApiPath.SiliconFlow:
return siliconflowHandler(req, { params });
case ApiPath.Qiniu:
return qiniuHandler(req, { params });
case ApiPath.OpenAI:
return openaiHandler(req, { params });
case ApiPath["302.AI"]:
Expand Down
3 changes: 3 additions & 0 deletions app/api/auth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,9 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
case ModelProvider.SiliconFlow:
systemApiKey = serverConfig.siliconFlowApiKey;
break;
case ModelProvider.Qiniu:
systemApiKey = serverConfig.qiniuApiKey;
break;
case ModelProvider.GPT:
default:
if (req.nextUrl.pathname.includes("azure/deployments")) {
Expand Down
123 changes: 123 additions & 0 deletions app/api/qiniu.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
import { getServerSideConfig } from "@/app/config/server";
import {
QINIU_BASE_URL,
ApiPath,
ModelProvider,
ServiceProvider,
} from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "@/app/api/auth";
import { isModelNotavailableInServer } from "@/app/utils/model";

const serverConfig = getServerSideConfig();

export async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
console.log("[Qiniu Route] params ", params);

if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}

const authResult = auth(req, ModelProvider.Qiniu);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}

try {
const response = await request(req);
return response;
} catch (e) {
console.error("[Qiniu] ", e);
return NextResponse.json(prettyObject(e));
}
}

async function request(req: NextRequest) {
const controller = new AbortController();

let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Qiniu, "");

let baseUrl = serverConfig.qiniuUrl || QINIU_BASE_URL;

if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
}

if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, -1);
}

console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);

const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);

const fetchUrl = `${baseUrl}${path}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
Authorization: req.headers.get("Authorization") ?? "",
},
method: req.method,
body: req.body,
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};

if (serverConfig.customModels && req.body) {
try {
const clonedBody = await req.text();
fetchOptions.body = clonedBody;

const jsonBody = JSON.parse(clonedBody) as { model?: string };

if (
isModelNotavailableInServer(
serverConfig.customModels,
jsonBody?.model as string,
ServiceProvider.Qiniu as string,
)
) {
return NextResponse.json(
{
error: true,
message: `you are not allowed to use ${jsonBody?.model} model`,
},
{
status: 403,
},
);
}
} catch (e) {
console.error(`[Qiniu] filter`, e);
}
}
try {
const res = await fetch(fetchUrl, fetchOptions);

const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
newHeaders.set("X-Accel-Buffering", "no");

return new Response(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
11 changes: 11 additions & 0 deletions app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import { XAIApi } from "./platforms/xai";
import { ChatGLMApi } from "./platforms/glm";
import { SiliconflowApi } from "./platforms/siliconflow";
import { Ai302Api } from "./platforms/ai302";
import { QiniuApi } from "./platforms/qiniu";

export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
Expand Down Expand Up @@ -177,6 +178,9 @@ export class ClientApi {
case ModelProvider["302.AI"]:
this.llm = new Ai302Api();
break;
case ModelProvider.Qiniu:
this.llm = new QiniuApi();
break;
default:
this.llm = new ChatGPTApi();
}
Expand Down Expand Up @@ -270,6 +274,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
const isSiliconFlow =
modelConfig.providerName === ServiceProvider.SiliconFlow;
const isAI302 = modelConfig.providerName === ServiceProvider["302.AI"];
const isQiniu = modelConfig.providerName === ServiceProvider.Qiniu;
const isEnabledAccessControl = accessStore.enabledAccessControl();
const apiKey = isGoogle
? accessStore.googleApiKey
Expand Down Expand Up @@ -297,6 +302,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
: ""
: isAI302
? accessStore.ai302ApiKey
: isQiniu
? accessStore.qiniuApiKey
: accessStore.openaiApiKey;
return {
isGoogle,
Expand All @@ -312,6 +319,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
isChatGLM,
isSiliconFlow,
isAI302,
isQiniu,
apiKey,
isEnabledAccessControl,
};
Expand Down Expand Up @@ -341,6 +349,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
isChatGLM,
isSiliconFlow,
isAI302,
isQiniu,
apiKey,
isEnabledAccessControl,
} = getConfig();
Expand Down Expand Up @@ -393,6 +402,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
return new ClientApi(ModelProvider.SiliconFlow);
case ServiceProvider["302.AI"]:
return new ClientApi(ModelProvider["302.AI"]);
case ServiceProvider.Qiniu:
return new ClientApi(ModelProvider.Qiniu);
default:
return new ClientApi(ModelProvider.GPT);
}
Expand Down
Loading