-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.js
70 lines (57 loc) · 1.87 KB
/
main.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import { config } from 'dotenv';
import OpenAI from 'openai';
import chalk from 'chalk';
import {
enterMessage,
getMaxTokens,
getSystemMessage,
getModel,
} from './utils.js'
/**
* 23.10.03
*
* @todo: retain chat context FINISHED
* but the history also consumes the tokens limit tho
*
* @todo: fine tune the inquirer prompt types for
* better user experience. FINISHED
* I think my implementaion of inquirer.js is decent, so no need
* to use the native api's that much.
*/
config();
const client = new OpenAI({
apiKey: process.env.API_KEY,
});
async function main() {
console.clear()
const selectedModel = await getModel();
const temperature = 0.3;
console.clear()
const max_tokens = await getMaxTokens();
console.clear()
let conversationHistory = [];
while (true) {
const system_message = await getSystemMessage();
const userMessage = await enterMessage();
if (userMessage === 'exit') {
console.log("Exiting program");
break;
}
// Add user message to the conversation history
conversationHistory.push({ "role": "user", "content": userMessage });
const completion = await client.chat.completions.create({
model: selectedModel,
messages: [
{ "role": "system", "content": system_message },
...conversationHistory, // Include conversation history
{ "role": "user", "content": userMessage },
],
temperature: temperature,
max_tokens: max_tokens,
});
// Add assistant response to the conversation history
conversationHistory.push({ "role": "assistant", "content": completion.choices[0].message.content });
console.log(`\n${chalk.redBright('Assistant: ')}${completion.choices[0].message.content}\n`);
}
}
main();