diff --git a/notebook/distributed_debate.ipynb b/notebook/distributed_debate.ipynb new file mode 100644 index 000000000..2b2b5a1da --- /dev/null +++ b/notebook/distributed_debate.ipynb @@ -0,0 +1,317 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "478efc38-ac99-40a7-9e13-b72840f14e19", + "metadata": {}, + "source": [ + "# Distributed debate" + ] + }, + { + "cell_type": "markdown", + "id": "2f0c5593-c810-4c93-90de-b2c389b878ab", + "metadata": { + "collapsed": true + }, + "source": [ + "This example simulates a debate competition with three participant agents, including the affirmative side (Pro), the negative side (Con), and the adjudicator (Judge). \n", + "\n", + "Pro believes that AGI can be achieved using the GPT model framework, while Con contests it. Judge listens to both sides' arguments and provides an analytical judgment on which side presented a more compelling and reasonable case.\n", + "\n", + "Messages generated by any agents can be observed by other agents in the debate.\n", + "\n", + "The full codes can be found in `examples/distributed/distributed_debate.py`" + ] + }, + { + "cell_type": "markdown", + "id": "321e5966-752c-4a28-b63e-3239008d6b3a", + "metadata": {}, + "source": [ + "To install AgentScope, please follow the steps in [README.md](https://github.com/alibaba/AgentScope/blob/main/README.md#installation)." + ] + }, + { + "cell_type": "markdown", + "id": "fc97a3fc-6bed-4a0f-bf61-e977630a159c", + "metadata": {}, + "source": [ + "In this case, we need to initialize three agents. The codes are the follows." + ] + }, + { + "cell_type": "markdown", + "id": "868f7187-fc64-4ead-87a0-143650eeda1e", + "metadata": {}, + "source": [ + "```bash\n", + "def setup_server(parsed_args: argparse.Namespace) -> None:\n", + " \"\"\"Setup rpc server for participant agent\"\"\"\n", + " agentscope.init(\n", + " model_configs=\"configs/model_configs.json\",\n", + " )\n", + " with open(\n", + " \"configs/debate_agent_configs.json\",\n", + " \"r\",\n", + " encoding=\"utf-8\",\n", + " ) as f:\n", + " configs = json.load(f)\n", + " config = configs[parsed_args.role]\n", + " host = getattr(parsed_args, f\"{parsed_args.role}_host\")\n", + " port = getattr(parsed_args, f\"{parsed_args.role}_port\")\n", + " server_launcher = RpcAgentServerLauncher(\n", + " host=host,\n", + " port=port,\n", + " local_mode=False,\n", + " agent_class=RpcDialogAgent,\n", + " **config,\n", + " )\n", + " server_launcher.launch()\n", + " server_launcher.wait_until_terminate()\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "01ca8024-fa7e-4d7f-bf35-a78511a47ab3", + "metadata": {}, + "source": [ + "To make it running on jupyter, we run them in the backend." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "7f3d7a0b-3c46-40aa-a877-f0b4d8423c14", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/Users/qianbingchen/AgentScope/examples/distributed\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/qianbingchen/miniconda3/envs/agent/lib/python3.9/site-packages/IPython/core/magics/osm.py:417: UserWarning: using dhist requires you to install the `pickleshare` library.\n", + " self.shell.db['dhist'] = compress_dhist(dhist)[-100:]\n" + ] + } + ], + "source": [ + "%cd ../examples/distributed/" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "1058a769-bedf-4832-85e2-bc943c0741ac", + "metadata": {}, + "outputs": [], + "source": [ + "import subprocess\n", + "\n", + "# Using Popen to initialize a backstage process\n", + "\n", + "proc_pro = subprocess.Popen(\n", + " ['python', 'distributed_debate.py', '--role', 'pro', '--pro-host', 'localhost', '--pro-port', '12011'],\n", + " stdout=subprocess.PIPE,\n", + " stderr=subprocess.PIPE\n", + ")\n", + "\n", + "proc_con = subprocess.Popen(\n", + " ['python', 'distributed_debate.py', '--role', 'con', '--con-host', 'localhost', '--con-port', '12012'],\n", + " stdout=subprocess.PIPE,\n", + " stderr=subprocess.PIPE\n", + ")\n", + "\n", + "proc_judge = subprocess.Popen(\n", + " ['python', 'distributed_debate.py', '--role', 'judge', '--judge-host', 'localhost', '--judge-port', '12013'],\n", + " stdout=subprocess.PIPE,\n", + " stderr=subprocess.PIPE\n", + ")\n", + "\n", + "# The above codes will return immediately and the process will run in the background\n" + ] + }, + { + "cell_type": "markdown", + "id": "b02bfd7f-967f-402b-a4e4-b2754faba736", + "metadata": {}, + "source": [ + "Now, we turn back to run the main function." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "8d4a4f80-e631-49f4-9265-cdf116cd11c3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/Users/qianbingchen/AgentScope/notebook\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/qianbingchen/miniconda3/envs/agent/lib/python3.9/site-packages/IPython/core/magics/osm.py:417: UserWarning: using dhist requires you to install the `pickleshare` library.\n", + " self.shell.db['dhist'] = compress_dhist(dhist)[-100:]\n" + ] + } + ], + "source": [ + "%cd ../../notebook/" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "bdae9ef1-08b1-424b-ab5b-735cb854af7d", + "metadata": {}, + "outputs": [], + "source": [ + "model_configs = [\n", + " {\n", + " \"type\": \"openai\",\n", + " \"name\": \"gpt-3.5-turbo\",\n", + " \"parameters\": {\n", + " \"api_key\": \"xxx\",\n", + " \"organization_id\": \"xxx\",\n", + " \"temperature\": 0.0\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"openai\",\n", + " \"name\": \"gpt-4\",\n", + " \"parameters\": {\n", + " \"api_key\": \"xxx\",\n", + " \"organization_id\": \"xxx\",\n", + " \"temperature\": 0.0\n", + " }\n", + " }\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6391fb00-f74c-42c5-b742-56b7a773f875", + "metadata": {}, + "outputs": [], + "source": [ + "# -*- coding: utf-8 -*-\n", + "\"\"\" An example of distributed debate \"\"\"\n", + "\n", + "import argparse\n", + "import json\n", + "\n", + "import agentscope\n", + "from agentscope.msghub import msghub\n", + "from agentscope.agents.rpc_dialog_agent import RpcDialogAgent\n", + "from agentscope.message import Msg\n", + "from agentscope.utils.logging_utils import logger\n", + "\n", + "ANNOUNCEMENT = \"\"\"\n", + "Welcome to the debate on whether Artificial General Intelligence (AGI) can be achieved using the GPT model framework. This debate will consist of three rounds. In each round, the affirmative side will present their argument first, followed by the negative side. After both sides have presented, the adjudicator will summarize the key points and analyze the strengths of the arguments.\n", + "\n", + "The rules are as follows:\n", + "\n", + "Each side must present clear, concise arguments backed by evidence and logical reasoning.\n", + "No side may interrupt the other while they are presenting their case.\n", + "After both sides have presented, the adjudicator will have time to deliberate and will then provide a summary, highlighting the most persuasive points from both sides.\n", + "The adjudicator's summary will not declare a winner for the individual rounds but will focus on the quality and persuasiveness of the arguments.\n", + "At the conclusion of the three rounds, the adjudicator will declare the overall winner based on which side won two out of the three rounds, considering the consistency and strength of the arguments throughout the debate.\n", + "Let us begin the first round. The affirmative side: please present your argument for why AGI can be achieved using the GPT model framework.\n", + "\"\"\" # noqa\n", + "\n", + "\n", + "\"\"\"Setup the main debate competition process\"\"\"\n", + "\n", + "agentscope.init(\n", + " model_configs=model_configs,\n", + ")\n", + "pro_agent = RpcDialogAgent(\n", + " name=\"Pro\",\n", + " host=\"localhost\",\n", + " port=12011,\n", + " launch_server=False,\n", + ")\n", + "con_agent = RpcDialogAgent(\n", + " name=\"Con\",\n", + " host=\"localhost\",\n", + " port=12012,\n", + " launch_server=False,\n", + ")\n", + "judge_agent = RpcDialogAgent(\n", + " name=\"Judge\",\n", + " host=\"localhost\",\n", + " port=12013,\n", + " launch_server=False,\n", + ")\n", + "participants = [pro_agent, con_agent, judge_agent]\n", + "hint = Msg(name=\"System\", content=ANNOUNCEMENT)\n", + "x = None\n", + "with msghub(participants=participants, announcement=hint):\n", + " for _ in range(3):\n", + " pro_resp = pro_agent(x)\n", + " logger.chat(pro_resp.update_value())\n", + " con_resp = con_agent(pro_resp)\n", + " logger.chat(con_resp.update_value())\n", + " x = judge_agent(con_resp)\n", + " logger.chat(x.update_value())\n", + " x = judge_agent(x)\n", + " logger.chat(x.update_value())\n" + ] + }, + { + "cell_type": "markdown", + "id": "92afdcd1-244a-48b9-8d3c-ab3cdc024f62", + "metadata": {}, + "source": [ + "Finally, terminte process in the backend." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d7946ae1-4828-4ad6-a075-204e61352b5d", + "metadata": {}, + "outputs": [], + "source": [ + "proc_pro.terminate()\n", + "proc_con.terminate()\n", + "proc_judge.terminate()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebook/distributed_dialog.ipynb b/notebook/distributed_dialog.ipynb new file mode 100644 index 000000000..61d449319 --- /dev/null +++ b/notebook/distributed_dialog.ipynb @@ -0,0 +1,252 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d8bb3d3e-eec5-4a14-bb36-9fdf6b7d00b2", + "metadata": {}, + "source": [ + "# Distributed dialogue" + ] + }, + { + "cell_type": "markdown", + "id": "8626bd94-3a0b-4c61-85d6-b157ffc5ac25", + "metadata": {}, + "source": [ + "This example initializes an assistant agent and a user agent as separate processes and uses RPC to communicate between them. The full codes can be found in in `examples/distributed/distributed_dialog.py`" + ] + }, + { + "cell_type": "markdown", + "id": "605ebd1c-3222-4dce-b974-6377da37d555", + "metadata": {}, + "source": [ + "To install AgentScope, please follow the steps in [README.md](https://github.com/alibaba/AgentScope/blob/main/README.md#installation)." + ] + }, + { + "cell_type": "markdown", + "id": "710f835a-ecc8-481f-a4ab-7f0db33e68f4", + "metadata": {}, + "source": [ + "In this case, we need to initialize two agents: assistant agent and user agnent." + ] + }, + { + "cell_type": "markdown", + "id": "7938f3a2-38e5-424c-a589-71b31ea7fde1", + "metadata": {}, + "source": [ + "The code for assistant is as follows." + ] + }, + { + "cell_type": "markdown", + "id": "eb93b3c3-aee4-4c59-b6cd-a6b107a23b49", + "metadata": {}, + "source": [ + "```bash\n", + "def setup_assistant_server(assistant_host: str, assistant_port: int) -> None:\n", + " \"\"\"Set up assistant rpc server\"\"\"\n", + " agentscope.init(\n", + " model_configs=\"configs/model_configs.json\",\n", + " )\n", + " assistant_server_launcher = RpcAgentServerLauncher(\n", + " name=\"Assitant\",\n", + " agent_class=RpcDialogAgent,\n", + " host=assistant_host,\n", + " port=assistant_port,\n", + " sys_prompt=\"You are a helpful assistant.\",\n", + " model=\"gpt-3.5-turbo\",\n", + " use_memory=True,\n", + " local_mode=False,\n", + " )\n", + " assistant_server_launcher.launch()\n", + " assistant_server_launcher.wait_until_terminate()\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "6dc37a09-4077-4ab9-a6e9-e2afa9b87228", + "metadata": {}, + "source": [ + "To run two processes on jupyter at the same time, we put `assistant agent` in the background." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2c09ed71-ede2-4f03-923a-67a2668fcd00", + "metadata": {}, + "outputs": [], + "source": [ + "%cd ../examples/distributed/" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "c6148137-29f8-48c9-a91c-62e9b48f9e6c", + "metadata": {}, + "outputs": [], + "source": [ + "import subprocess\n", + "\n", + "# Using Popen to initialize a backstage process\n", + "\n", + "proc = subprocess.Popen(\n", + " ['python', 'distributed_dialog.py', '--role', 'assistant', '--assistant-host', 'localhost', '--assistant-port', '12010'],\n", + " stdout=subprocess.PIPE,\n", + " stderr=subprocess.PIPE\n", + ")\n", + "\n", + "# This code will return immediately and the process will run in the background\n" + ] + }, + { + "cell_type": "markdown", + "id": "d5229b68-8c6d-4063-bfe7-f1d836b0b74d", + "metadata": {}, + "source": [ + "Then we run the user agent on jupyter." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c1ce5f86-4088-4f1f-a010-377f5b7550f0", + "metadata": {}, + "outputs": [], + "source": [ + "%cd ../../notebook/" + ] + }, + { + "cell_type": "markdown", + "id": "fa7ce528-5447-4168-a83a-ffcce53d2155", + "metadata": {}, + "source": [ + "Define the model_configs." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "d7deca99-21c7-4746-9288-67b9a2f733b1", + "metadata": {}, + "outputs": [], + "source": [ + "model_configs = [\n", + " {\n", + " \"type\": \"openai\",\n", + " \"name\": \"gpt-3.5-turbo\",\n", + " \"parameters\": {\n", + " \"api_key\": \"xxx\",\n", + " \"organization_id\": \"xxx\",\n", + " \"temperature\": 0.0\n", + " }\n", + " },\n", + " {\n", + " \"type\": \"openai\",\n", + " \"name\": \"gpt-4\",\n", + " \"parameters\": {\n", + " \"api_key\": \"xxx\",\n", + " \"organization_id\": \"xxx\",\n", + " \"temperature\": 0.0\n", + " }\n", + " }\n", + "]" + ] + }, + { + "cell_type": "markdown", + "id": "62914494-1a6f-409e-96ab-01eaf3dcd12e", + "metadata": {}, + "source": [ + "Run the following codes to initialize the user agent." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85367a4c-ec23-40e1-967f-fa7a0e932661", + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "\n", + "import agentscope\n", + "from agentscope.agents import UserAgent\n", + "from agentscope.agents.rpc_dialog_agent import RpcDialogAgent\n", + "from agentscope.utils.logging_utils import logger\n", + "\n", + "assistant_host = \"localhost\"\n", + "assistant_port = 12010\n", + "\n", + "agentscope.init(\n", + " model_configs=model_configs,\n", + ")\n", + "assistant_agent = RpcDialogAgent(\n", + " name=\"Assistant\",\n", + " host=assistant_host,\n", + " port=assistant_port,\n", + " launch_server=False,\n", + ")\n", + "user_agent = UserAgent(\n", + " name=\"User\",\n", + " require_url=False,\n", + ")\n", + "logger.info(\n", + " \"Setup successfully, have fun chatting! (enter 'exit' to close the \"\n", + " \"agent)\",\n", + ")\n", + "msg = user_agent()\n", + "while not msg.content.endswith(\"exit\"):\n", + " msg = assistant_agent(msg)\n", + " logger.chat(msg.update_value())\n", + " time.sleep(0.5)\n", + " msg = user_agent(msg)" + ] + }, + { + "cell_type": "markdown", + "id": "829a2053-9643-4153-a324-0762177e8c1c", + "metadata": {}, + "source": [ + "Finally, terminate the assistant agent in the backend. " + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "59dc514a-c108-4d9e-95ce-bedf757c8958", + "metadata": {}, + "outputs": [], + "source": [ + "proc.terminate()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}