From 0b53ade8de2f3b6dae52d92f9fe9f45690eb8bcb Mon Sep 17 00:00:00 2001 From: Florian Reifschneider Date: Mon, 2 Sep 2024 15:35:18 -0700 Subject: [PATCH 1/2] Turns class field functions into class methods --- src/client.js | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/client.js b/src/client.js index 5be9e7f..f183dc3 100644 --- a/src/client.js +++ b/src/client.js @@ -107,7 +107,7 @@ class MistralClient { * @param {*} formData * @return {Promise<*>} */ - _request = async function(method, path, request, signal, formData = null) { + async _request(method, path, request, signal, formData = null) { const url = `${this.endpoint}/${path}`; const options = { method: method, @@ -207,7 +207,7 @@ class MistralClient { * @param {*} responseFormat * @return {Promise} */ - _makeChatCompletionRequest = function( + _makeChatCompletionRequest( model, messages, tools, @@ -253,7 +253,7 @@ class MistralClient { * @param {*} stream * @return {Promise} */ - _makeCompletionRequest = function( + _makeCompletionRequest( model, prompt, suffix, @@ -285,7 +285,7 @@ class MistralClient { * Returns a list of the available models * @return {Promise} */ - listModels = async function() { + async listModels() { const response = await this._request('get', 'v1/models'); return response; }; @@ -317,7 +317,7 @@ class MistralClient { * default timeout signal * @return {Promise} */ - chat = async function( + async chat( { model, messages, @@ -383,7 +383,7 @@ class MistralClient { * default timeout signal * @return {Promise} */ - chatStream = async function* ( + async * chatStream( { model, messages, @@ -446,7 +446,7 @@ class MistralClient { * e.g. ['What is the best French cheese?'] * @return {Promise} */ - embeddings = async function({model, input}) { + async embeddings({model, input}) { const request = { model: model, input: input, @@ -478,7 +478,7 @@ class MistralClient { * default timeout signal * @return {Promise} */ - completion = async function( + async completion( {model, prompt, suffix, temperature, maxTokens, topP, randomSeed, stop}, {signal} = {}, ) { @@ -525,7 +525,7 @@ class MistralClient { * default timeout signal * @return {Promise} */ - completionStream = async function* ( + async * completionStream( {model, prompt, suffix, temperature, maxTokens, topP, randomSeed, stop}, {signal} = {}, ) { From b0ad0ea3139bd80bf67bd03712abc6a8c284b786 Mon Sep 17 00:00:00 2001 From: Florian Reifschneider Date: Mon, 2 Sep 2024 22:51:43 +0000 Subject: [PATCH 2/2] Fixes code styling --- src/client.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/client.js b/src/client.js index f183dc3..a6b4ef4 100644 --- a/src/client.js +++ b/src/client.js @@ -383,7 +383,7 @@ class MistralClient { * default timeout signal * @return {Promise} */ - async * chatStream( + async* chatStream( { model, messages, @@ -525,7 +525,7 @@ class MistralClient { * default timeout signal * @return {Promise} */ - async * completionStream( + async* completionStream( {model, prompt, suffix, temperature, maxTokens, topP, randomSeed, stop}, {signal} = {}, ) {