From c6547f60e5dd12d7d0f79dea4382d7e9cc6d4270 Mon Sep 17 00:00:00 2001 From: "Pavelka, Roman (ADV D EU CZ PDS1 CIO 1)" Date: Tue, 31 Oct 2023 14:16:53 +0100 Subject: [PATCH 1/2] Work on multiline input --- cli.py | 39 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/cli.py b/cli.py index e077de5..2401305 100755 --- a/cli.py +++ b/cli.py @@ -1,14 +1,34 @@ #!/usr/bin/env python3 +import argparse import os import readline import core +def check_exit(user_input): + return user_input in ('q', 'x', 'quit', 'exit') + + def cli_input(): user_input = input("> ") - if user_input in ('q', 'x', 'quit', 'exit'): + if check_exit(user_input): + return None + return user_input + + +def cli_input_multiline(): + user_input = [] + while True: + line = input("> ") + if line == "SEND": + break + user_input.append(line) + user_input = '\n'.join(user_input) + + if check_exit(user_input): return None + return user_input @@ -17,4 +37,19 @@ def cli_output(msg, info): print(info) -core.GptCore(cli_input, cli_output).main() +def main(): + parser = argparse.ArgumentParser(description='') + parser.add_argument('-m', '--multiline', action='store_true', + help='Multiline mode, input SEND when you are happy.') + args = parser.parse_args() + + if args.multiline: + input_f = cli_input_multiline + else: + input_f = cli_input + + core.GptCore(input_f, cli_output).main() + + +if __name__ == "__main__": + main() From fe3bdf85ac9aa60b208955866e579a35099e3f12 Mon Sep 17 00:00:00 2001 From: "Pavelka, Roman (ADV D EU CZ PDS1 CIO 1)" Date: Wed, 1 Nov 2023 16:24:01 +0100 Subject: [PATCH 2/2] Improve docuementation, comments and messages --- README.md | 3 +++ cli.py | 4 ++-- core.py | 28 ++++++++++++++++++++++++++++ test.py | 28 ++++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 2 deletions(-) create mode 100755 test.py diff --git a/README.md b/README.md index bbe0a77..6d33e10 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,9 @@ python cli.py The CLI client will prompt you to enter your input. The response from ChatGPT will be printed in the console. +You can also enable multiline mode with the `-m` or `--multiline` option. In this mode, you can input multiple +lines and input "SEND" when you are done. + Quit with either `q`, `x`, `exit` or `quit` as the input. ### GUI Client (WIP) diff --git a/cli.py b/cli.py index 2401305..b7678b7 100755 --- a/cli.py +++ b/cli.py @@ -38,9 +38,9 @@ def cli_output(msg, info): def main(): - parser = argparse.ArgumentParser(description='') + parser = argparse.ArgumentParser(description="Interact with OpenAI's GPT-4 model.") parser.add_argument('-m', '--multiline', action='store_true', - help='Multiline mode, input SEND when you are happy.') + help='Enable multiline input mode. Input "SEND" when you are done.') args = parser.parse_args() if args.multiline: diff --git a/core.py b/core.py index 9afd782..88414fb 100644 --- a/core.py +++ b/core.py @@ -12,6 +12,22 @@ class GptCore: + """ + A class to interact with OpenAI's GPT-4 model. + + Attributes + ---------- + input : function + a function to get user input, takes no arguments, returns str or None + output : function + a function to output the model's response and info, takes str and Info + object, returns None + + Methods + ------- + main(): + The main loop to interact with the model. + """ def __init__(self, input, output): self.input = input self.output = output @@ -42,6 +58,18 @@ def main(self): @dataclass class Info: + """ + A class to represent the information about the interaction with the model. + + Attributes + ---------- + prompt_tokens : int + the number of tokens in the prompt + completion_tokens : int + the number of tokens in the completion + price : float + the total price of the interaction + """ prompt_tokens: int completion_tokens: int price: float diff --git a/test.py b/test.py new file mode 100755 index 0000000..9ad9b91 --- /dev/null +++ b/test.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +import openai +import os +import sys + +MODEL = "gpt-4" + +os.chdir(os.path.dirname(__file__)) + +with open('.api_key', 'r') as f: + openai.api_key = f.read().strip() + +prompt = " ".join(sys.argv[1:]) + +# System message can provide further control over tone and task +# There is a way how to send more advanced discussion too +messages = [ +# {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": prompt}, +# {"role": "user", "content": "Knock knock."}, +# {"role": "assistant", "content": "Who's there?"}, +# {"role": "user", "content": "Orange."}, # And model would proceed with "Orange who?" +] + +response = openai.ChatCompletion.create( + model=MODEL, messages=messages, temperature=0.1) + +print(response.choices[0]["message"]["content"].strip())