Skip to content
Snippets Groups Projects
Unverified Commit cb2cbf50 authored by balibabu's avatar balibabu Committed by GitHub
Browse files

feat: support Xinference (#319)

### What problem does this PR solve?

support xorbitsai inference as model provider

Issue link:#299

### Type of change


- [x] New Feature (non-breaking change which adds functionality)
parent 4fa768e7
No related branches found
No related tags found
No related merge requests found
<svg t="1712735583481" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="6804"
width="22" height="22">
<path
d="M512 0a512 512 0 1 0 0 1024A512 512 0 0 0 512 0z m0 910.222222A398.222222 398.222222 0 1 1 512 113.777778a398.222222 398.222222 0 0 1 0 796.444444z"
fill="#ABEFC6" p-id="6805"></path>
d="M 512 0 a 512 512 0 1 0 0 1024 A 512 512 0 0 0 512 0 z m 0 990.2 A 438.2 438.2 0 1 1 512 33.8 a 438.2 438.2 0 0 1 0 956.4 z"
fill="#abefc6" p-id="6805"></path>
<path
d="M448.056889 584.305778L322.389333 458.638222 243.541333 537.543111l205.596445 205.539556h0.056889l0.512 0.512 325.859555-325.859556-80.440889-80.440889z"
fill="#17B26A" p-id="6806"></path>
<path d="M 448.1 584.3 L 322.4 458.6 L 243.5 537.5 l 205.6 205.5 h 0.1 l 0.5 0.5 l 325.9 -325.9 l -80.4 -80.4 z"
fill="#17b26a" p-id="6806"></path>
</svg>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<svg id="_图层_1" data-name="图层 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
viewBox="0 0 283.46 283.46">
<defs>
<style>
.cls-1 {
fill: url(#_未命名的渐变_5-2);
}
.cls-2 {
fill: url(#_未命名的渐变_9);
}
.cls-3 {
fill: url(#_未命名的渐变_5);
}
</style>
<linearGradient id="_未命名的渐变_5" data-name="未命名的渐变 5" x1="27.03" y1="287.05" x2="253.15" y2="1.14"
gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#e9a85e" />
<stop offset="1" stop-color="#f52b76" />
</linearGradient>
<linearGradient id="_未命名的渐变_5-2" data-name="未命名的渐变 5" x1="25.96" y1="286.21" x2="252.09" y2=".3"
xlink:href="#_未命名的渐变_5" />
<linearGradient id="_未命名的渐变_9" data-name="未命名的渐变 9" x1="-474.33" y1="476.58" x2="-160.37" y2="476.58"
gradientTransform="translate(669.07 -75.9) rotate(33.75)" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#6a0cf5" />
<stop offset="1" stop-color="#ab66f3" />
</linearGradient>
</defs>
<g>
<path class="cls-3"
d="M96.16,145.42c7.71,8.57,16.96,16.66,27.5,23.7,8.96,5.99,18.29,10.89,27.66,14.64,17.56-16.62,32.16-36.17,43.09-57.84L257.92,0l-110.84,87.19c-20.47,16.1-37.72,35.87-50.92,58.23Z" />
<path class="cls-1"
d="M87.08,223.86c-7.97-5.33-15.5-10.92-22.59-16.7l-38.49,76.31,69.17-54.4c-2.71-1.69-5.41-3.41-8.09-5.2Z" />
</g>
<path class="cls-2"
d="M229.81,101.27c20.81,27.65,26.93,58.36,12.79,79.51-20.63,30.88-76.59,29.69-124.98-2.64-48.39-32.34-70.89-83.58-50.25-114.46,14.14-21.15,44.86-27.25,78.36-18.6C87.76,20.46,32.59,22.86,11.22,54.85c-26.86,40.19,9.8,111.82,81.88,159.99,72.08,48.17,152.29,54.64,179.15,14.45,21.38-31.99,2.49-83.88-42.44-128.02Z" />
</svg>
\ No newline at end of file
......@@ -397,13 +397,14 @@ export default {
upgrade: 'Upgrade',
addLlmTitle: 'Add LLM',
modelName: 'Model name',
modelUid: 'Model UID',
modelNameMessage: 'Please input your model name!',
modelType: 'Model type',
modelTypeMessage: 'Please input your model type!',
addLlmBaseUrl: 'Base url',
baseUrlNameMessage: 'Please input your base url!',
vision: 'Does it support Vision?',
ollamaLink: 'How to integrate Ollama',
ollamaLink: 'How to integrate {{name}}',
},
message: {
registered: 'Registered!',
......
......@@ -382,13 +382,14 @@ export default {
upgrade: '升级',
addLlmTitle: '添加 LLM',
modelName: '模型名称',
modelUid: '模型UID',
modelType: '模型类型',
addLlmBaseUrl: '基础 Url',
vision: '是否支持 Vision',
modelNameMessage: '请输入模型名称!',
modelTypeMessage: '请输入模型类型!',
baseUrlNameMessage: '请输入基础 Url!',
ollamaLink: '如何集成 Ollama',
ollamaLink: '如何集成 {{name}}',
},
message: {
registered: '注册成功',
......
......@@ -14,3 +14,5 @@ export const UserSettingIconMap = {
};
export * from '@/constants/setting';
export const LocalLlmFactories = ['Ollama', 'Xinference'];
......@@ -132,6 +132,7 @@ export const useSelectModelProvidersLoading = () => {
export const useSubmitOllama = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
const [selectedLlmFactory, setSelectedLlmFactory] = useState<string>('');
const addLlm = useAddLlm();
const {
visible: llmAddingVisible,
......@@ -149,11 +150,17 @@ export const useSubmitOllama = () => {
[hideLlmAddingModal, addLlm],
);
const handleShowLlmAddingModal = (llmFactory: string) => {
setSelectedLlmFactory(llmFactory);
showLlmAddingModal();
};
return {
llmAddingLoading: loading,
onLlmAddingOk,
llmAddingVisible,
hideLlmAddingModal,
showLlmAddingModal,
showLlmAddingModal: handleShowLlmAddingModal,
selectedLlmFactory,
};
};
......@@ -25,6 +25,7 @@ import {
} from 'antd';
import { useCallback } from 'react';
import SettingTitle from '../components/setting-title';
import { isLocalLlmFactory } from '../utils';
import ApiKeyModal from './api-key-modal';
import {
useSelectModelProvidersLoading,
......@@ -43,6 +44,7 @@ const IconMap = {
'ZHIPU-AI': 'zhipu',
文心一言: 'wenxin',
Ollama: 'ollama',
Xinference: 'xinference',
};
const LlmIcon = ({ name }: { name: string }) => {
......@@ -89,7 +91,7 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
<Col span={12} className={styles.factoryOperationWrapper}>
<Space size={'middle'}>
<Button onClick={handleApiKeyClick}>
{item.name === 'Ollama' ? t('addTheModel') : 'API-Key'}
{isLocalLlmFactory(item.name) ? t('addTheModel') : 'API-Key'}
<SettingOutlined />
</Button>
<Button onClick={handleShowMoreClick}>
......@@ -147,12 +149,13 @@ const UserSettingModel = () => {
showLlmAddingModal,
onLlmAddingOk,
llmAddingLoading,
selectedLlmFactory,
} = useSubmitOllama();
const handleApiKeyClick = useCallback(
(llmFactory: string) => {
if (llmFactory === 'Ollama') {
showLlmAddingModal();
if (isLocalLlmFactory(llmFactory)) {
showLlmAddingModal(llmFactory);
} else {
showApiKeyModal({ llm_factory: llmFactory });
}
......@@ -161,8 +164,8 @@ const UserSettingModel = () => {
);
const handleAddModel = (llmFactory: string) => () => {
if (llmFactory === 'Ollama') {
showLlmAddingModal();
if (isLocalLlmFactory(llmFactory)) {
showLlmAddingModal(llmFactory);
} else {
handleApiKeyClick(llmFactory);
}
......@@ -252,6 +255,7 @@ const UserSettingModel = () => {
hideModal={hideLlmAddingModal}
onOk={onLlmAddingOk}
loading={llmAddingLoading}
llmFactory={selectedLlmFactory}
></OllamaModal>
</>
);
......
......@@ -13,7 +13,8 @@ const OllamaModal = ({
hideModal,
onOk,
loading,
}: IModalProps<IAddLlmRequestBody>) => {
llmFactory,
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
const [form] = Form.useForm<FieldType>();
const { t } = useTranslate('setting');
......@@ -28,7 +29,7 @@ const OllamaModal = ({
const data = {
...omit(values, ['vision']),
model_type: modelType,
llm_factory: 'Ollama',
llm_factory: llmFactory,
};
console.info(data);
......@@ -37,7 +38,7 @@ const OllamaModal = ({
return (
<Modal
title={t('addLlmTitle')}
title={t('addLlmTitle', { name: llmFactory })}
open={visible}
onOk={handleOk}
onCancel={hideModal}
......@@ -46,11 +47,11 @@ const OllamaModal = ({
return (
<Flex justify={'space-between'}>
<a
href="https://github.com/infiniflow/ragflow/blob/main/docs/ollama.md"
href={`https://github.com/infiniflow/ragflow/blob/main/docs/${llmFactory.toLowerCase()}.md`}
target="_blank"
rel="noreferrer"
>
{t('ollamaLink')}
{t('ollamaLink', { name: llmFactory })}
</a>
<Space>{originNode}</Space>
</Flex>
......@@ -76,7 +77,7 @@ const OllamaModal = ({
</Select>
</Form.Item>
<Form.Item<FieldType>
label={t('modelName')}
label={t(llmFactory === 'Xinference' ? 'modelUid' : 'modelName')}
name="llm_name"
rules={[{ required: true, message: t('modelNameMessage') }]}
>
......
import { LocalLlmFactories } from './constants';
export const isLocalLlmFactory = (llmFactory: string) =>
LocalLlmFactories.some((x) => x === llmFactory);
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment