Skip to content

Commit

Permalink
Switch to kwargs only
Browse files Browse the repository at this point in the history
  • Loading branch information
NivekT committed Feb 9, 2024
1 parent eda1b54 commit a42db4e
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 33 deletions.
53 changes: 29 additions & 24 deletions examples/notebooks/remote/Logging.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -65,19 +65,24 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"Passing arguments with keyword is STRONGLY recommended. Logging is done in the background without blocking your response."
"Passing arguments with keyword is REQUIRED. Logging is done in the background without blocking your response."
]
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The first president of the United States was George Washington.\n"
"ename": "TypeError",
"evalue": "create() takes 1 argument(s) but 2 were given",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[5], line 10\u001b[0m\n\u001b[1;32m 4\u001b[0m messages \u001b[38;5;241m=\u001b[39m [\n\u001b[1;32m 5\u001b[0m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrole\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msystem\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontent\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mYou are a helpful assistant.\u001b[39m\u001b[38;5;124m\"\u001b[39m},\n\u001b[1;32m 6\u001b[0m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrole\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muser\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontent\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mWho was the first president?\u001b[39m\u001b[38;5;124m\"\u001b[39m},\n\u001b[1;32m 7\u001b[0m ]\n\u001b[1;32m 9\u001b[0m \u001b[38;5;66;03m# Passing arguments with keyword is STRONGLY recommended\u001b[39;00m\n\u001b[0;32m---> 10\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mopenai\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mchat\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcompletions\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmessages\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 11\u001b[0m message \u001b[38;5;241m=\u001b[39m response\u001b[38;5;241m.\u001b[39mchoices[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mmessage\u001b[38;5;241m.\u001b[39mcontent\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28mprint\u001b[39m(message)\n",
"File \u001b[0;32m~/miniconda3/envs/ptools/lib/python3.11/site-packages/openai/_utils/_utils.py:246\u001b[0m, in \u001b[0;36mrequired_args.<locals>.inner.<locals>.wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 244\u001b[0m given_params\u001b[38;5;241m.\u001b[39madd(positional[i])\n\u001b[1;32m 245\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mIndexError\u001b[39;00m:\n\u001b[0;32m--> 246\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\n\u001b[1;32m 247\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m() takes \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(positional)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m argument(s) but \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mlen\u001b[39m(args)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m were given\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 248\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 250\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m key \u001b[38;5;129;01min\u001b[39;00m kwargs\u001b[38;5;241m.\u001b[39mkeys():\n\u001b[1;32m 251\u001b[0m given_params\u001b[38;5;241m.\u001b[39madd(key)\n",
"\u001b[0;31mTypeError\u001b[0m: create() takes 1 argument(s) but 2 were given"
]
}
],
Expand All @@ -90,8 +95,8 @@
" {\"role\": \"user\", \"content\": \"Who was the first president?\"},\n",
"]\n",
"\n",
"# Passing arguments with keyword is STRONGLY recommended\n",
"response = openai.chat.completions.create(model=model, messages=messages)\n",
"# Passing arguments with keyword is REQUIRED\n",
"response = openai.chat.completions.create(model, messages=messages)\n",
"message = response.choices[0].message.content\n",
"\n",
"print(message)"
Expand Down Expand Up @@ -150,21 +155,28 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"### Logging with main async client"
"### Logging with async client instance\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "markdown",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(1, 2)\n"
]
}
],
"source": [
"### Logging with single async client"
"def test_fn(*args):\n",
" print(args)\n",
"\n",
"\n",
"test_fn(1,2)"
]
},
{
Expand All @@ -181,13 +193,6 @@
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
Expand Down
22 changes: 13 additions & 9 deletions prompttools/logger/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,30 +40,34 @@ def add_feedback(self, log_id, metric_name, value):
self.feedback_queue.put({"log_id": log_id, "key": metric_name, "value": value})

def add_to_queue(
self, hegel_model: str, result: dict, input_parameters: dict, latency: float, log_id: str, other_args
self,
hegel_model: str,
result: dict,
input_parameters: dict,
latency: float,
log_id: str,
):
# TODO: Deal with other_args
self.data_queue.put(
{
"hegel_model": hegel_model,
"result": result,
"input_parameters": input_parameters, # TODO: Need to record `*args`
"input_parameters": input_parameters,
"latency": latency,
"log_id": log_id,
}
)

def execute_and_add_to_queue(self, callable_func, *args, **kwargs):
def execute_and_add_to_queue(self, callable_func, **kwargs):
if "hegel_model" in kwargs:
hegel_model = kwargs["hegel_model"]
del kwargs["hegel_model"]
else:
hegel_model = None
start = perf_counter()
result = callable_func(*args, **kwargs)
result = callable_func(**kwargs)
latency = perf_counter() - start
log_id = str(uuid.uuid4())
self.add_to_queue(hegel_model, result.model_dump_json(), json.dumps(kwargs), latency, log_id, args)
self.add_to_queue(hegel_model, result.model_dump_json(), json.dumps(kwargs), latency, log_id)
result.log_id = log_id
return result

Expand Down Expand Up @@ -120,7 +124,7 @@ def send_feedback_to_remote(self, feedback_data):


def logging_wrapper(original_fn):
def wrapped_function(*args, **kwargs):
def wrapped_function(**kwargs):
# Call the original function with the provided arguments

if "hegel_model" in kwargs:
Expand All @@ -129,10 +133,10 @@ def wrapped_function(*args, **kwargs):
else:
hegel_model = None
start = perf_counter()
result = original_fn(*args, **kwargs)
result = original_fn(**kwargs)
latency = perf_counter() - start
log_id = str(uuid.uuid4())
sender.add_to_queue(hegel_model, result.model_dump_json(), json.dumps(kwargs), latency, log_id, args)
sender.add_to_queue(hegel_model, result.model_dump_json(), json.dumps(kwargs), latency, log_id)
result.log_id = log_id
return result

Expand Down

0 comments on commit a42db4e

Please sign in to comment.