Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add model redeploying status #3513

Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
} from '@patternfly/react-icons';
import { InferenceServiceKind } from '~/k8sTypes';
import { InferenceServiceModelState } from '~/pages/modelServing/screens/types';
import { getInferenceServiceActiveModelState, getInferenceServiceStatusMessage } from './utils';
import { getInferenceServiceModelState, getInferenceServiceStatusMessage } from './utils';
import { useModelStatus } from './useModelStatus';

type InferenceServiceStatusProps = {
Expand All @@ -30,7 +30,7 @@ const InferenceServiceStatus: React.FC<InferenceServiceStatusProps> = ({

const state = modelStatus?.failedToSchedule
? 'FailedToLoad'
: getInferenceServiceActiveModelState(inferenceService);
: getInferenceServiceModelState(inferenceService);

const statusIcon = () => {
switch (state) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { InferenceServiceKind } from '~/k8sTypes';
import { getRoute } from '~/api';
import { getUrlFromKserveInferenceService } from '~/pages/modelServing/screens/projects/utils';
import { InferenceServiceModelState } from '~/pages/modelServing/screens/types';
import { getInferenceServiceActiveModelState } from './utils';
import { getInferenceServiceModelState } from './utils';

const useRouteForInferenceService = (
inferenceService: InferenceServiceKind,
Expand All @@ -17,7 +17,7 @@ const useRouteForInferenceService = (
const routeName = inferenceService.metadata.name;
const routeNamespace = inferenceService.metadata.namespace;
const kserveRoute = isKServe ? getUrlFromKserveInferenceService(inferenceService) : null;
const state = getInferenceServiceActiveModelState(inferenceService);
const state = getInferenceServiceModelState(inferenceService);
const kserveLoaded = state === InferenceServiceModelState.LOADED;

React.useEffect(() => {
Expand Down
26 changes: 19 additions & 7 deletions frontend/src/pages/modelServing/screens/global/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,36 @@ import { getDisplayNameFromK8sResource } from '~/concepts/k8s/utils';
import { InferenceServiceModelState, ModelStatus } from '~/pages/modelServing/screens/types';
import { asEnumMember } from '~/utilities/utils';

export const getInferenceServiceActiveModelState = (
export const getInferenceServiceModelState = (
is: InferenceServiceKind,
): InferenceServiceModelState =>
asEnumMember(is.status?.modelStatus?.states?.activeModelState, InferenceServiceModelState) ||
asEnumMember(is.status?.modelStatus?.states?.targetModelState, InferenceServiceModelState) ||
asEnumMember(is.status?.modelStatus?.states?.activeModelState, InferenceServiceModelState) ||
InferenceServiceModelState.UNKNOWN;

export const getInferenceServiceStatusMessage = (is: InferenceServiceKind): string => {
const activeModelState = is.status?.modelStatus?.states?.activeModelState;
const targetModelState = is.status?.modelStatus?.states?.targetModelState;

const failedToLoad = InferenceServiceModelState.FAILED_TO_LOAD;
const isFailedToLoad = activeModelState === failedToLoad || targetModelState === failedToLoad;
const stateMessage = targetModelState || activeModelState || 'Unknown';

if (
activeModelState === InferenceServiceModelState.FAILED_TO_LOAD ||
targetModelState === InferenceServiceModelState.FAILED_TO_LOAD
) {
const lastFailureMessage = is.status?.modelStatus?.lastFailureInfo?.message;
return lastFailureMessage || stateMessage;
}

const lastFailureMessage = is.status?.modelStatus?.lastFailureInfo?.message;
const stateMessage = activeModelState ?? targetModelState ?? 'Unknown';
if (
activeModelState === InferenceServiceModelState.LOADED &&
(targetModelState === InferenceServiceModelState.LOADING ||
targetModelState === InferenceServiceModelState.PENDING)
) {
return 'Redeploying';
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@emilys314 did you get a response yet on suggestion of the new text? If not, we can always do a followup to update the text if everything else is good to go with the PR.

Should add unit tests for this util function.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No updates on the wording, i believe Katie is out until next week

Sure I'll add tests

}

return isFailedToLoad ? lastFailureMessage ?? stateMessage : stateMessage;
return stateMessage;
};

export const getInferenceServiceProjectDisplayName = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import { isModelMesh } from '~/pages/modelServing/utils';
import { getPodsForKserve, getPodsForModelMesh } from '~/api';
import {
checkModelStatus,
getInferenceServiceActiveModelState,
getInferenceServiceModelState,
} from '~/pages/modelServing/screens/global/utils';
import { InferenceServiceModelState, ModelStatus } from '~/pages/modelServing/screens/types';
import { InferenceServiceKind, ServingRuntimeKind } from '~/k8sTypes';
Expand Down Expand Up @@ -58,7 +58,7 @@ const DeployedModelsGallery: React.FC<DeployedModelsGalleryProps> = ({
const updateServiceState = (inferenceService: InferenceServiceKind, status?: ModelStatus) => {
const state = status?.failedToSchedule
? InferenceServiceModelState.FAILED_TO_LOAD
: getInferenceServiceActiveModelState(inferenceService);
: getInferenceServiceModelState(inferenceService);

setInferenceServiceStates((prev) => {
const states = { ...prev };
Expand Down