Skip to content

Commit

Permalink
apply lint fixes and additional tests
Browse files Browse the repository at this point in the history
  • Loading branch information
LinoyBitan1 committed Jan 8, 2025
1 parent 0835f15 commit 3c945fe
Show file tree
Hide file tree
Showing 28 changed files with 655 additions and 728 deletions.
47 changes: 1 addition & 46 deletions frontend/src/__mocks__/mockKserveMetricsConfigMap.ts
Original file line number Diff line number Diff line change
Expand Up @@ -271,21 +271,7 @@ export const MOCK_NIM_METRICS_CONFIG_MISSING_QUERY = `{
}
]
},
{
"title": "Current running, waiting, and max requests count",
"type": "CURRENT_REQUESTS",
"queries": [
{
"title": "Requests waiting",
"query": "num_requests_waiting{namespace='tomer-test-2', pod=~'nim-deploy-predictor-.*'}"
},
{
"title": "Requests running",
"query": "num_requests_running{namespace='tomer-test-2', pod=~'nim-deploy-predictor-.*'}"
}
]
},
{
"title": "Tokens count",
"type": "TOKENS_COUNT",
"queries": [
Expand All @@ -294,36 +280,6 @@ export const MOCK_NIM_METRICS_CONFIG_MISSING_QUERY = `{
"query": "round(rate(prompt_tokens_total{namespace='tomer-test-2', pod=~'nim-deploy-predictor-.*'}[1m]))"
}
]
},
{
"title": "Time to first token",
"type": "TIME_TO_FIRST_TOKEN",
"queries": [
{
"title": "Time to first token",
"query": "rate(time_to_first_token_seconds_sum{namespace='tomer-test-2', pod=~'nim-deploy-predictor-.*'}[1m])"
}
]
},
{
"title": "Time per output token",
"type": "TIME_PER_OUTPUT_TOKEN",
"queries": [
{
"title": "Time per output token",
"query": "rate(time_per_output_token_seconds_sum{namespace='tomer-test-2', pod=~'nim-deploy-predictor-.*'}[1m])"
}
]
},
{
"title": "Requests outcomes",
"type": "REQUEST_OUTCOMES",
"queries": [
{
"title": "Number of successful incoming requests",
"query": "round(sum(increase(request_success_total{namespace='tomer-test-2', pod=~'nim-deploy-predictor-.*'}[5m])))"
}
]
}
]
}`;
Expand All @@ -341,7 +297,6 @@ export const mockKserveMetricsConfigMap = ({
return mockConfigMap({ data, namespace, name: `${modelName}-metrics-dashboard` });
};


export const mockNimMetricsConfigMap = ({
namespace = 'test-project',
modelName = 'test-inference-service',
Expand All @@ -353,4 +308,4 @@ export const mockNimMetricsConfigMap = ({
supported: String(supported),
};
return mockConfigMap({ data, namespace, name: `${modelName}-metrics-dashboard` });
};
};
7 changes: 2 additions & 5 deletions frontend/src/__tests__/cypress/cypress/pages/modelMetrics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,11 @@ class ModelMetricsNim extends ModelMetricsGlobal {

findTab() {
return {
nimTab: cy.findByTestId('nim-tab')
}
nimTab: cy.findByTestId('nim-tab'),
};
}
}


class ModelMetricsKserve extends ModelMetricsPerformance {
findKserveAreaDisabledCard() {
return cy.findByTestId('kserve-metrics-disabled');
Expand All @@ -79,7 +78,6 @@ class ModelMetricsKserve extends ModelMetricsPerformance {
}
}


class ModelMetricsKserveNim extends ModelMetricsNim {
findKserveAreaDisabledCard() {
return cy.findByTestId('kserve-metrics-disabled');
Expand All @@ -94,7 +92,6 @@ class ModelMetricsKserveNim extends ModelMetricsNim {
}
}


class ModelMetricsBias extends ModelMetricsGlobal {
visit(project: string, model: string, disableA11y = false) {
cy.visitWithLogin(`/modelServing/${project}/metrics/${model}/bias`);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,10 +205,10 @@ const initIntercepts = ({
},
isTrustyAIInstalled
? mockTrustyAIServiceForDbK8sResource({
isAvailable: isTrustyAIAvailable,
// If you're already installed for the test, it doesn't matter when
creationTimestamp: new Date('1970-01-01').toISOString(),
})
isAvailable: isTrustyAIAvailable,
// If you're already installed for the test, it doesn't matter when
creationTimestamp: new Date('1970-01-01').toISOString(),
})
: { statusCode: 404, body: mock404Error({}) },
);
cy.interceptK8s(RouteModel, mockRouteK8sResource({ name: 'trustyai-service' }));
Expand Down Expand Up @@ -786,25 +786,8 @@ describe('KServe performance metrics', () => {
});
});



//Nim Metrics Tests
describe('KServe NIM metrics', () => {

it('should inform user when area disabled', () => {
initIntercepts({
disableTrustyBiasMetrics: false,
disablePerformanceMetrics: false,
disableNIMModelServing: false,
disableKServeMetrics: true,
hasServingData: false,
hasBiasData: false,
inferenceServices: [mockInferenceServiceK8sResource({ isModelMesh: false })],
});
modelMetricsKserveNim.visit('test-project', 'test-inference-service');
modelMetricsKserveNim.findKserveAreaDisabledCard().should('be.visible');
});

it('should show error when ConfigMap is missing', () => {
initIntercepts({
disableTrustyBiasMetrics: false,
Expand Down Expand Up @@ -846,7 +829,6 @@ describe('KServe NIM metrics', () => {
modelMetricsKserveNim.findUnsupportedRuntimeCard().should('be.visible');
});


it('should handle a malformed graph definition gracefully', () => {
initIntercepts({
disableTrustyBiasMetrics: false,
Expand All @@ -867,7 +849,6 @@ describe('KServe NIM metrics', () => {
modelMetricsKserveNim.findUnknownErrorCard().should('be.visible');
});


it('should display only 2 graphs, when the config specifies', () => {
initIntercepts({
disableTrustyBiasMetrics: false,
Expand All @@ -879,14 +860,13 @@ describe('KServe NIM metrics', () => {
inferenceServices: [mockInferenceServiceK8sResource({ isModelMesh: false })],
});

cy.interceptK8s(
ConfigMapModel,
mockNimMetricsConfigMap({ config: MOCK_NIM_METRICS_CONFIG_3 }),
);
cy.interceptK8s(ConfigMapModel, mockNimMetricsConfigMap({ config: MOCK_NIM_METRICS_CONFIG_3 }));

modelMetricsKserveNim.visit('test-project', 'test-inference-service');
modelMetricsKserveNim.getMetricsChart('GPU cache usage over time').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Current running, waiting, and max requests count').shouldHaveData();
modelMetricsKserveNim
.getMetricsChart('Current running, waiting, and max requests count')
.shouldHaveData();
modelMetricsKserveNim.getAllMetricsCharts().should('have.length', 2);
});

Expand All @@ -907,13 +887,9 @@ describe('KServe NIM metrics', () => {
);

modelMetricsKserveNim.visit('test-project', 'test-inference-service');
modelMetricsKserveNim.getAllMetricsCharts().should('have.length', 6);
modelMetricsKserveNim.getAllMetricsCharts().should('have.length', 2);
modelMetricsKserveNim.getMetricsChart('GPU cache usage over time').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Current running, waiting, and max requests count').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Tokens count').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Time to first token').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Time per output token').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Requests outcomes').shouldHaveData();
});

it('charts should not error out if a query is missing and there is no data', () => {
Expand All @@ -933,13 +909,9 @@ describe('KServe NIM metrics', () => {
);

modelMetricsKserveNim.visit('test-project', 'test-inference-service');
modelMetricsKserveNim.getAllMetricsCharts().should('have.length', 6);
modelMetricsKserveNim.getAllMetricsCharts().should('have.length', 2);
modelMetricsKserveNim.getMetricsChart('GPU cache usage over time').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Current running, waiting, and max requests count').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Tokens count').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Time to first token').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Time per output token').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Requests outcomes').shouldHaveNoData();
});

it('charts should show data when serving data is available', () => {
Expand All @@ -957,7 +929,9 @@ describe('KServe NIM metrics', () => {
modelMetricsKserveNim.visit('test-project', 'test-inference-service');
modelMetricsKserveNim.getAllMetricsCharts().should('have.length', 6);
modelMetricsKserveNim.getMetricsChart('GPU cache usage over time').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Current running, waiting, and max requests count').shouldHaveData();
modelMetricsKserveNim
.getMetricsChart('Current running, waiting, and max requests count')
.shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Tokens count').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Time to first token').shouldHaveData();
modelMetricsKserveNim.getMetricsChart('Time per output token').shouldHaveData();
Expand All @@ -979,7 +953,9 @@ describe('KServe NIM metrics', () => {

modelMetricsKserveNim.visit('test-project', 'test-inference-service');
modelMetricsKserveNim.getMetricsChart('GPU cache usage over time').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Current running, waiting, and max requests count').shouldHaveNoData();
modelMetricsKserveNim
.getMetricsChart('Current running, waiting, and max requests count')
.shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Tokens count').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Time to first token').shouldHaveNoData();
modelMetricsKserveNim.getMetricsChart('Time per output token').shouldHaveNoData();
Expand Down
1 change: 1 addition & 0 deletions frontend/src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ export * from './prometheus/pvcs';
export * from './prometheus/serving';
export * from './prometheus/distributedWorkloads';
export * from './prometheus/kservePerformanceMetrics';
export * from './prometheus/NimPerformanceMetrics';

// Network error handling
export * from './errorUtils';
Expand Down
Loading

0 comments on commit 3c945fe

Please sign in to comment.