Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 14 additions & 9 deletions chat2db-client/src/blocks/Setting/AiSetting/aiTypeConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const AITypeName = {
[AIType.OPENAI]: 'Open AI',
[AIType.AZUREAI]: 'Azure AI',
[AIType.RESTAI]: i18n('setting.tab.custom'),
[AIType.OLLAMAAI]: i18n('setting.tab.aiType.ollama'),
};

const AIFormConfig: Record<AIType, IAiConfigBooleans> = {
Expand All @@ -34,29 +35,33 @@ const AIFormConfig: Record<AIType, IAiConfigBooleans> = {
},
[AIType.WENXINAI]: {
apiKey: true,
apiHost: true,
apiHost: 'https://api.weixin.qq.com',
},
[AIType.TONGYIQIANWENAI]: {
apiKey: true,
apiHost: true,
model: true,
apiHost: 'https://dashscope.aliyuncs.com/api/v1',
model: 'qwen-turbo',
},
[AIType.OPENAI]: {
apiKey: true,
apiHost: 'https://api.openai.com/',
httpProxyHost: true,
httpProxyPort: true,
httpProxyHost: '127.0.0.1',
httpProxyPort: '8080',
// model: 'gpt-3.5-turbo',
},
[AIType.AZUREAI]: {
apiKey: true,
apiHost: true,
model: true,
apiHost: 'https://your-resource.openai.azure.com',
model: 'gpt-35-turbo',
},
[AIType.RESTAI]: {
apiKey: true,
apiHost: true,
model: true,
apiHost: 'https://api.openai.com/v1',
model: 'gpt-3.5-turbo',
},
[AIType.OLLAMAAI]: {
ollamaApiHost: 'http://localhost:11434',
ollamaModel: 'qwen2.5-coder',
},
};

Expand Down
129 changes: 121 additions & 8 deletions chat2db-client/src/blocks/Setting/AiSetting/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -45,23 +45,90 @@ export default function SettingAI(props: IProps) {
const aiSqlSource = e.target.value;

// 查询对应ai类型的配置
const res = await configService.getAiSystemConfig({
aiSqlSource,
});
setAiConfig(res);
try {
const res = await configService.getAiSystemConfig({
aiSqlSource,
});

// Special handling for Ollama AI - set defaults if no config found
if (aiSqlSource === AIType.OLLAMAAI) {
if (!res || !res.apiHost || !res.model) {
const ollamaConfig: IAiConfig = {
aiSqlSource: AIType.OLLAMAAI,
ollamaApiHost: res?.apiHost || 'http://localhost:11434',
ollamaModel: res?.model || 'qwen3-coder',
apiHost: res?.apiHost || 'http://localhost:11434',
model: res?.model || 'qwen3-coder',
};
setAiConfig(ollamaConfig);
} else {
// Map server response to Ollama config
const ollamaConfig: IAiConfig = {
aiSqlSource: AIType.OLLAMAAI,
ollamaApiHost: res.apiHost,
ollamaModel: res.model,
apiHost: res.apiHost,
model: res.model,
};
setAiConfig(ollamaConfig);
}
} else {
setAiConfig(res);
}
} catch (error) {
console.error('Failed to get AI config:', error);
// Set default config for other AI types
if (aiSqlSource === AIType.OLLAMAAI) {
setAiConfig({
aiSqlSource: AIType.OLLAMAAI,
ollamaApiHost: 'http://localhost:11434',
ollamaModel: 'qwen3-coder',
});
} else {
setAiConfig({
aiSqlSource: aiSqlSource as AIType,
});
}
}
};

/** 应用Ai配置 */
const handleApplyAiConfig = () => {
const newAiConfig = { ...aiConfig };
/*if (newAiConfig.apiHost && !newAiConfig.apiHost?.endsWith('/')) {
newAiConfig.apiHost = newAiConfig.apiHost + '/';
}*/

if (aiConfig?.aiSqlSource === AIType.CHAT2DBAI) {
newAiConfig.apiHost = `${window._appGatewayParams.baseUrl || 'http://test.sqlgpt.cn/gateway'}${'/model/'}`;
}

// Special handling for Ollama AI
if (aiConfig?.aiSqlSource === AIType.OLLAMAAI) {
// Ensure Ollama config has proper defaults
if (!newAiConfig.ollamaApiHost) {
newAiConfig.ollamaApiHost = 'http://localhost:11434';
}
if (!newAiConfig.ollamaModel) {
newAiConfig.ollamaModel = 'deepseek-v3.1:671b-cloud';
}

// Map Ollama fields to standard AI config fields for server compatibility
newAiConfig.apiHost = newAiConfig.ollamaApiHost;
newAiConfig.model = newAiConfig.ollamaModel;

// Create JSON content for server storage
const configContent = JSON.stringify({
ollamaApiHost: newAiConfig.ollamaApiHost,
ollamaModel: newAiConfig.ollamaModel
});

// Update content for server storage
newAiConfig.content = configContent;

// Debug logging
console.log('DEBUG: Ollama config being sent:', newAiConfig);
}

if (props.handleApplyAiConfig) {
console.log('DEBUG: Calling handleApplyAiConfig with:', newAiConfig);
props.handleApplyAiConfig(newAiConfig);
}
};
Expand All @@ -82,6 +149,48 @@ export default function SettingAI(props: IProps) {
</Flex>
);
}

// Special handling for Ollama AI
if (aiConfig?.aiSqlSource === AIType.OLLAMAAI) {
return (
<>
<Form layout="vertical">
<Form.Item
label="Ollama API Host"
className={styles.title}
>
<Input
autoComplete="off"
value={aiConfig.ollamaApiHost || 'http://localhost:11434'}
placeholder="http://localhost:11434"
onChange={(e) => {
setAiConfig({ ...aiConfig, ollamaApiHost: e.target.value });
}}
/>
</Form.Item>
<Form.Item
label="Ollama Model"
className={styles.title}
>
<Input
autoComplete="off"
value={aiConfig.ollamaModel || 'qwen2.5-coder'}
placeholder="qwen2.5-coder"
onChange={(e) => {
setAiConfig({ ...aiConfig, ollamaModel: e.target.value });
}}
/>
</Form.Item>
</Form>
<div className={styles.bottomButton}>
<Button type="primary" onClick={handleApplyAiConfig}>
{i18n('setting.button.apply')}
</Button>
</div>
</>
);
}

return (
<>
<Form layout="vertical">
Expand All @@ -95,7 +204,11 @@ export default function SettingAI(props: IProps) {
<Input
autoComplete="off"
value={aiConfig[key]}
placeholder={AIFormConfig[aiConfig?.aiSqlSource]?.[key]}
placeholder={
typeof AIFormConfig[aiConfig?.aiSqlSource]?.[key] === 'boolean'
? ''
: AIFormConfig[aiConfig?.aiSqlSource]?.[key] || ''
}
onChange={(e) => {
setAiConfig({ ...aiConfig, [key]: e.target.value });
}}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ interface IProps<T> extends React.HTMLAttributes<HTMLDivElement> {
}

export default function LoadingContent<T>(props: IProps<T>) {
const { children, className, data = true, handleEmpty = false, empty, isLoading, coverLoading, ...args } = props;
const { children, className, data = true, handleEmpty = false, empty, isLoading, coverLoading } = props;
const isEmpty = !isLoading && handleEmpty && !(data as any)?.length;

const renderContent = () => {
if ((isLoading || !data) && !coverLoading) {
if ((isLoading || data === null || data === undefined) && !coverLoading) {
return <StateIndicator state="loading" />;
}

Expand Down
1 change: 1 addition & 0 deletions chat2db-client/src/i18n/en-us/setting.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export default {
'setting.tab.aiType.baichuan': 'BaiChuan AI',
'setting.tab.aiType.wenxin': 'WenXin AI',
'setting.tab.aiType.tongyiqianwen': 'TongYiQianWen AI',
'setting.tab.aiType.ollama': 'Ollama AI',
'setting.tab.aiType.custom.tips': 'The API format is consistent with the OpenAI API format',
'setting.label.serviceAddress': 'Service Address',
'setting.button.apply': 'Apply',
Expand Down
1 change: 1 addition & 0 deletions chat2db-client/src/i18n/zh-cn/setting.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export default {
'setting.tab.aiType.baichuan': '百川',
'setting.tab.aiType.wenxin': '文心一言',
'setting.tab.aiType.tongyiqianwen': '通义千问',
'setting.tab.aiType.ollama': 'Ollama AI',
'setting.tab.aiType.custom.tips': '接口格式与OpenAI接口格式一致',
'setting.label.serviceAddress': '服务地址',
'setting.button.apply': '应用',
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import React, { useState, useEffect, useRef } from 'react';
import i18n from '@/i18n';
import { Input, Dropdown, Modal } from 'antd';
import { Input, Dropdown, Modal, Form } from 'antd';
import Iconfont from '@/components/Iconfont';
import LoadingContent from '@/components/Loading/LoadingContent';
import historyServer from '@/service/history';
Expand All @@ -20,6 +20,7 @@ const SaveList = () => {
const saveBoxListRef = useRef<any>(null);
const consoleList = useWorkspaceStore((state) => state.savedConsoleList);
const [editData, setEditData] = useState<any>(null);
const [form] = Form.useForm();

useEffect(() => {
getSavedConsoleList();
Expand Down Expand Up @@ -173,27 +174,28 @@ const SaveList = () => {
title={i18n('common.text.rename')}
open={!!editData}
onOk={() => {
const params: any = {
id: editData.id,
name: editData.name,
};
historyServer.updateSavedConsole(params).then(() => {

getSavedConsoleList();
setEditData(null);
form.validateFields().then((values) => {
const params: any = {
id: editData.id,
name: values.name,
};
historyServer.updateSavedConsole(params).then(() => {
getSavedConsoleList();
setEditData(null);
form.resetFields();
});
});
}}
onCancel={() => setEditData(null)}
onCancel={() => {
setEditData(null);
form.resetFields();
}}
>
<Input
value={editData?.name}
onChange={(e) => {
setEditData({
...editData,
name: e.target.value,
});
}}
/>
<Form form={form} initialValues={{ name: editData?.name }}>
<Form.Item name="name" rules={[{ required: true, message: 'Please enter name' }]}>
<Input />
</Form.Item>
</Form>
</Modal>
</>
);
Expand Down
3 changes: 0 additions & 3 deletions chat2db-client/src/store/setting/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,3 @@ export const setHoldingService = (holdingService: boolean) => {
useSettingStore.setState({ holdingService });
}




1 change: 1 addition & 0 deletions chat2db-client/src/typings/ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export enum AIType {
OPENAI = 'OPENAI',
AZUREAI = 'AZUREAI',
RESTAI = 'RESTAI',
OLLAMAAI = 'OLLAMAAI',
}

export interface IRemainingUse {
Expand Down
3 changes: 3 additions & 0 deletions chat2db-client/src/typings/setting.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,7 @@ export interface IAiConfig {
stream?: boolean;
secretKey?:string;
model?: string;
ollamaApiHost?: string;
ollamaModel?: string;
content?: string;
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ public enum AiSqlSourceEnum implements BaseEnum<String> {
*/
FASTCHATAI("FAST CHAT AI"),

/**
* OLLAMA AI
*/
OLLAMAAI("OLLAMA AI"),

;

final String description;
Expand Down Expand Up @@ -91,4 +96,9 @@ public String getCode() {
return this.name();
}

@Override
public String getDescription() {
return this.description;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -71,4 +71,14 @@ public class ChatGptConfig {
* deploymentId of the deployed model, default gpt-3.5-turbo
*/
private String azureDeploymentId;

/**
* Ollama API endpoint, default http://localhost:11434
*/
private String ollamaApiHost;

/**
* Ollama model name, e.g., qwen3-coder, deepseek-v3.1
*/
private String ollamaModel;
}
Loading