Skip to content

Commit

Permalink
[Index management] Refactor api_integration tests for create inferenc…
Browse files Browse the repository at this point in the history
…e endpoint (#187521)

## Summary
 * delete underlying trained model during `after all` clean up 
 * handle request time out error when creating inference endpoint

Tested against QA deployment and locally.

---------

Co-authored-by: kibanamachine <[email protected]>
  • Loading branch information
saarikabhasi and kibanamachine authored Jul 10, 2024
1 parent 696bb88 commit 46b2154
Show file tree
Hide file tree
Showing 5 changed files with 82 additions and 68 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
*/

import expect from '@kbn/expect';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { FtrProviderContext } from '../../../ftr_provider_context';

const API_BASE_PATH = '/api/index_management';
Expand All @@ -17,46 +18,48 @@ export default function ({ getService }: FtrProviderContext) {
const inferenceId = 'my-elser-model';
const taskType = 'sparse_embedding';
const service = 'elser';
const modelId = '.elser_model_2';

describe('Inference endpoints', function () {
before(async () => {
log.debug(`Creating inference endpoint`);
try {
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
},
});
} catch (err) {
log.debug('[Setup error] Error creating inference endpoint');
throw err;
}
});

after(async () => {
// Cleanup inference endpoints created for testing purposes
try {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug(`Deleting underlying trained model`);
await ml.api.deleteTrainedModelES(modelId);
await ml.testResources.cleanMLSavedObjects();
} catch (err) {
log.debug('[Cleanup error] Error deleting inference endpoint');
log.debug('[Cleanup error] Error deleting trained model or saved ml objects');
throw err;
}
});

describe('get inference endpoints', () => {
it('returns the existing inference endpoints', async () => {
const { body: inferenceEndpoints } = await supertest
.get(`${API_BASE_PATH}/inference/all`)
.set('kbn-xsrf', 'xxx')
.set('x-elastic-internal-origin', 'xxx')
.expect(200);

expect(inferenceEndpoints).to.be.ok();
expect(inferenceEndpoints[0].model_id).to.eql(inferenceId);
it('create inference endpoint', async () => {
log.debug(`create inference endpoint`);
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
model_id: modelId,
},
});
});
it('get all inference endpoints and confirm inference endpoint exist', async () => {
const { body: inferenceEndpoints } = await supertest
.get(`${API_BASE_PATH}/inference/all`)
.set('kbn-xsrf', 'xxx')
.set('x-elastic-internal-origin', 'xxx')
.expect(200);

expect(inferenceEndpoints).to.be.ok();
expect(
inferenceEndpoints.some(
(endpoint: InferenceAPIConfigResponse) => endpoint.model_id === inferenceId
)
).to.be(true);
});
it('can delete inference endpoint', async () => {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug('> Inference endpoint deleted');
});
});
}
14 changes: 9 additions & 5 deletions x-pack/test/functional/services/ml/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -247,18 +247,22 @@ export function MachineLearningAPIProvider({ getService }: FtrProviderContext) {
log.debug(`Inference endpoint '${inferenceId}' already exists. Nothing to create.`);
return;
}
const { body, status } = await esSupertest
.put(`/_inference/${taskType}/${inferenceId}`)
const response = await kbnSupertest
.put(`/internal/ml/_inference/${taskType}/${inferenceId}`)
.set(getCommonRequestHeader('1'))
.send(requestBody);
this.assertResponseStatusCode(200, status, body);

return body;
this.assertResponseStatusCode(200, response.status, response.body);
log.debug('> Inference endpoint created');
return response;
},

async deleteInferenceEndpoint(inferenceId: string, taskType: string) {
const { body, status } = await esSupertest.delete(`/_inference/${taskType}/${inferenceId}`);
this.assertResponseStatusCode(200, status, body);

expect(body)
.to.have.property('acknowledged')
.eql(true, 'Response for delete inference endpoint should be acknowledged');
return body;
},

Expand Down
3 changes: 2 additions & 1 deletion x-pack/test/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@
"@kbn/alerting-comparators",
"@kbn/alerting-state-types",
"@kbn/reporting-server",
"@kbn/data-quality-plugin"
"@kbn/data-quality-plugin",
"@kbn/ml-trained-models-utils"
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
*/

import expect from '@kbn/expect';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { InternalRequestHeader, RoleCredentials } from '../../../../shared/services';
import { FtrProviderContext } from '../../../ftr_provider_context';

Expand All @@ -17,57 +18,61 @@ export default function ({ getService }: FtrProviderContext) {
const inferenceId = 'my-elser-model';
const taskType = 'sparse_embedding';
const service = 'elser';

const modelId = '.elser_model_2';
const svlCommonApi = getService('svlCommonApi');
const svlUserManager = getService('svlUserManager');
const supertestWithoutAuth = getService('supertestWithoutAuth');
let roleAuthc: RoleCredentials;
let internalReqHeader: InternalRequestHeader;

// FLAKY: https://github.com/elastic/kibana/issues/185216
describe.skip('Inference endpoints', function () {
// test adds new trained model '.elser_model_2_linux-x86_64', but does not clean it. Follow up tests are affected
this.tags(['failsOnMKI']);
describe('Inference endpoints', function () {
before(async () => {
roleAuthc = await svlUserManager.createApiKeyForRole('admin');
internalReqHeader = svlCommonApi.getInternalRequestHeader();
log.debug(`Creating inference endpoint`);
try {
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
},
});
} catch (err) {
log.debug('[Setup error] Error creating inference endpoint');
throw err;
}
});

after(async () => {
// Cleanup inference endpoints created for testing purposes
try {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug(`Deleting underlying trained model`);
await ml.api.deleteTrainedModelES(modelId);
await ml.testResources.cleanMLSavedObjects();
} catch (err) {
log.debug('[Cleanup error] Error deleting inference endpoint');
log.debug('[Cleanup error] Error deleting trained model and saved ml objects');
throw err;
}
await svlUserManager.invalidateApiKeyForRole(roleAuthc);
});

describe('get inference endpoints', () => {
it('returns the existing inference endpoints', async () => {
const { body: inferenceEndpoints } = await supertestWithoutAuth
.get(`${API_BASE_PATH}/inference/all`)
.set(internalReqHeader)
.set(roleAuthc.apiKeyHeader)
.expect(200);

expect(inferenceEndpoints).to.be.ok();
expect(inferenceEndpoints[0].model_id).to.eql(inferenceId);
it('create inference endpoint', async () => {
log.debug(`create inference endpoint`);
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
model_id: modelId,
},
});
});
it('get all inference endpoints and confirm inference endpoint exist', async () => {
const { body: inferenceEndpoints } = await supertestWithoutAuth
.get(`${API_BASE_PATH}/inference/all`)
.set(internalReqHeader)
.set(roleAuthc.apiKeyHeader)
.expect(200);

expect(inferenceEndpoints).to.be.ok();
expect(
inferenceEndpoints.some(
(endpoint: InferenceAPIConfigResponse) => endpoint.model_id === inferenceId
)
).to.be(true);
});
it('can delete inference endpoint', async () => {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug('> Inference endpoint deleted');
});
});
}
1 change: 1 addition & 0 deletions x-pack/test_serverless/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -105,5 +105,6 @@
"@kbn/config-schema",
"@kbn/features-plugin",
"@kbn/observability-ai-assistant-plugin",
"@kbn/ml-trained-models-utils",
]
}

0 comments on commit 46b2154

Please sign in to comment.