Skip to content

Commit

Permalink
fix: error handling when active model service on certain model card i…
Browse files Browse the repository at this point in the history
…s not created yet
  • Loading branch information
lizable committed Jan 6, 2025
1 parent 5de6654 commit c18dd4b
Show file tree
Hide file tree
Showing 2 changed files with 166 additions and 112 deletions.
122 changes: 122 additions & 0 deletions react/src/components/ChatContent.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import { useTanQuery } from '../hooks/reactQueryAlias';
import { ChatContentEndpointDetailQuery } from './__generated__/ChatContentEndpointDetailQuery.graphql';
import { Model } from './lablupTalkativotUI/ChatUIModal';
import LLMChatCard from './lablupTalkativotUI/LLMChatCard';
import { ReloadOutlined } from '@ant-design/icons';
import { Alert, Button } from 'antd';
import graphql from 'babel-plugin-relay/macro';
import _ from 'lodash';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { useLazyLoadQuery } from 'react-relay/hooks';

interface ChatContentProps {
endpointId: string;
endpointUrl: string;
basePath: string;
}

const ChatContent: React.FC<ChatContentProps> = ({
endpointId,
endpointUrl,
basePath,
}) => {
const { t } = useTranslation();

const { endpoint_token_list } =
useLazyLoadQuery<ChatContentEndpointDetailQuery>(
graphql`
query ChatContentEndpointDetailQuery(
$endpointId: UUID!
$tokenListOffset: Int!
$tokenListLimit: Int!
) {
endpoint_token_list(
limit: $tokenListLimit
offset: $tokenListOffset
endpoint_id: $endpointId
) {
total_count
items {
id
token
endpoint_id
created_at
valid_until
}
}
}
`,
{
tokenListLimit: 100,
tokenListOffset: 0,
endpointId: endpointId as string,
},
{
fetchPolicy: 'network-only',
},
);

const newestValidToken =
_.orderBy(endpoint_token_list?.items, ['valid_until'], ['desc'])[0]
?.token ?? '';

const {
data: modelsResult,
// error,
refetch,
} = useTanQuery<{
data: Array<Model>;
}>({
queryKey: ['models', endpointUrl],
queryFn: () => {
return fetch(new URL(basePath + '/models', endpointUrl).toString(), {
headers: {
Authorization: `BackendAI ${newestValidToken}`,
},
})
.then((res) => res.json())
.catch((err) => {
console.log(err);
});
},
});

return (
<LLMChatCard
endpointId={endpointId || ''}
baseURL={new URL(basePath, endpointUrl).toString()}
models={_.map(modelsResult?.data, (m) => ({
id: m.id,
name: m.id,
}))}
apiKey={newestValidToken}
fetchOnClient
style={{ flex: 1 }}
allowCustomModel={false}
alert={
_.isEmpty(modelsResult?.data) && (
<Alert
type="warning"
showIcon
message={t('chatui.CannotFindModel')}
action={
<Button
icon={<ReloadOutlined />}
onClick={() => {
refetch();
}}
>
{t('button.Refresh')}
</Button>
}
/>
)
}
modelId={modelsResult?.data?.[0].id ?? 'custom'}
modelToken={newestValidToken}
/>
);
};

export default ChatContent;
156 changes: 44 additions & 112 deletions react/src/components/ModelCardChat.tsx
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
import { useUpdatableState } from '../hooks';
import { useTanQuery } from '../hooks/reactQueryAlias';
import { ModelCardChatEndpointDetailQuery } from './__generated__/ModelCardChatEndpointDetailQuery.graphql';
import ChatContent from './ChatContent';
import { ModelCardChatQuery } from './__generated__/ModelCardChatQuery.graphql';
import { Model } from './lablupTalkativotUI/ChatUIModal';
import LLMChatCard from './lablupTalkativotUI/LLMChatCard';
import { ReloadOutlined } from '@ant-design/icons';
import { Alert, Button, theme } from 'antd/lib';
import { Alert, Card, theme } from 'antd/lib';
import graphql from 'babel-plugin-relay/macro';
import dayjs from 'dayjs';
import _ from 'lodash';
import React, { useState } from 'react';
import React from 'react';
import { useTranslation } from 'react-i18next';
import { useLazyLoadQuery } from 'react-relay';

Expand All @@ -27,13 +22,6 @@ const ModelCardChat: React.FC<ModelCardChatProps> = ({
const { t } = useTranslation();
const { token } = theme.useToken();
const [fetchKey, updateFetchKey] = useUpdatableState('first');
const [paginationState] = useState<{
current: number;
pageSize: number;
}>({
current: 1,
pageSize: 100,
});

const { endpoint_list } = useLazyLoadQuery<ModelCardChatQuery>(
graphql`
Expand Down Expand Up @@ -74,104 +62,48 @@ const ModelCardChat: React.FC<ModelCardChatProps> = ({
return item?.status == 'HEALTHY';
});

const { endpoint_token_list } =
useLazyLoadQuery<ModelCardChatEndpointDetailQuery>(
graphql`
query ModelCardChatEndpointDetailQuery(
$endpointId: UUID!
$tokenListOffset: Int!
$tokenListLimit: Int!
) {
endpoint_token_list(
limit: $tokenListLimit
offset: $tokenListOffset
endpoint_id: $endpointId
) {
total_count
items {
id
token
endpoint_id
created_at
valid_until
}
}
}
`,
{
tokenListLimit: 100,
tokenListOffset: 0,
endpointId: healthyEndpoint[0]?.endpoint_id || '',
},
{
fetchPolicy: 'network-only',
fetchKey,
},
);

const newestValidToken = _.orderBy(
endpoint_token_list?.items,
['valid_until'],
['desc'],
)[0]?.token;

const {
data: modelsResult,
// error,
refetch,
} = useTanQuery<{
data: Array<Model>;
}>({
queryKey: ['models', healthyEndpoint[0]?.url],
queryFn: () => {
return fetch(
new URL(basePath + '/models', healthyEndpoint[0]?.url || '').toString(),
{
headers: {
Authorization: `BackendAI ${newestValidToken}`,
},
// FIXME: temporally render chat UI only if at least one endpoint is healthy.
return healthyEndpoint.length > 0 ? (
<ChatContent
endpointId={healthyEndpoint[0]?.endpoint_id as string}
endpointUrl={healthyEndpoint[0]?.url as string}
basePath={basePath}
/>
) : (
<Card
style={{
height: '100%',
width: '100%',
display: 'flex',
flexDirection: 'column',
minHeight: '50vh',
}}
styles={{
body: {
backgroundColor: token.colorFillQuaternary,
borderRadius: 0,
flex: 1,
display: 'flex',
flexDirection: 'column',
padding: 0,
height: '50%',
position: 'relative',
},
)
.then((res) => res.json())
.catch((err) => {
console.log(err);
});
},
});

return (
<LLMChatCard
endpointId={endpoint_list?.items[0]?.endpoint_id || ''}
baseURL={new URL(basePath, healthyEndpoint[0]?.url || '').toString()}
models={_.map(modelsResult?.data, (m) => ({
id: m.id,
name: m.id,
}))}
apiKey={newestValidToken}
fetchOnClient
style={{ flex: 1 }}
allowCustomModel={false}
alert={
_.isEmpty(modelsResult?.data) && (
<Alert
type="warning"
showIcon
message={t('chatui.CannotFindModel')}
action={
<Button
icon={<ReloadOutlined />}
onClick={() => {
refetch();
}}
>
{t('button.Refresh')}
</Button>
}
/>
)
}
modelId={modelsResult?.data?.[0].id ?? 'custom'}
modelToken={newestValidToken}
actions: {
paddingLeft: token.paddingContentHorizontal,
paddingRight: token.paddingContentHorizontal,
},
header: {
zIndex: 1,
},
}}
actions={[
<Alert
message="Chat UI is not provided yet. Why don't you create a new one?"
type="warning"
showIcon
/>,
]}
/>
);
};
Expand Down

0 comments on commit c18dd4b

Please sign in to comment.