Skip to content

Commit

Permalink
better names
Browse files Browse the repository at this point in the history
  • Loading branch information
davidkyle committed Oct 24, 2024
1 parent ccc4023 commit 3809f63
Showing 1 changed file with 8 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,7 @@ public void chunkedInfer(
// Avoid filling the inference queue by executing the batches in series
// Each batch contains up to EMBEDDING_MAX_BATCH_SIZE inference request
var sequentialRunner = new BatchIterator(esModel, inputType, timeout, batchedRequests);
sequentialRunner.doNextRequest();
sequentialRunner.run();
}
} else {
listener.onFailure(notElasticsearchModelException(model));
Expand Down Expand Up @@ -1015,11 +1015,11 @@ class BatchIterator {
this.timeout = timeout;
}

void doNextRequest() {
inferenceExecutor.execute(() -> inferOnBatch(requestAndListeners.get(index.get())));
void run() {
inferenceExecutor.execute(() -> inferBatchAndRunAfter(requestAndListeners.get(index.get())));
}

private void inferOnBatch(EmbeddingRequestChunker.BatchRequestAndListener batch) {
private void inferBatchAndRunAfter(EmbeddingRequestChunker.BatchRequestAndListener batch) {
var inferenceRequest = buildInferenceRequest(
esModel.mlNodeDeploymentId(),
EmptyConfigUpdate.INSTANCE,
Expand All @@ -1034,14 +1034,14 @@ private void inferOnBatch(EmbeddingRequestChunker.BatchRequestAndListener batch)
);

// schedule the next request once the results have been processed
var scheduleNextListener = ActionListener.runAfter(mlResultsListener, () -> {
var runNextListener = ActionListener.runAfter(mlResultsListener, () -> {
if (index.incrementAndGet() < requestAndListeners.size()) {
doNextRequest();
run();
}
});

var maybeDeployListener = scheduleNextListener.delegateResponse(
(l, exception) -> maybeStartDeployment(esModel, exception, inferenceRequest, scheduleNextListener)
var maybeDeployListener = runNextListener.delegateResponse(
(l, exception) -> maybeStartDeployment(esModel, exception, inferenceRequest, runNextListener)
);

client.execute(InferModelAction.INSTANCE, inferenceRequest, maybeDeployListener);
Expand Down

0 comments on commit 3809f63

Please sign in to comment.