Skip to content

Commit

Permalink
Merge pull request geekan#156 from martcpp/main
Browse files Browse the repository at this point in the history
English comment translation
  • Loading branch information
stellaHSR authored Aug 9, 2023
2 parents 2a4da6c + 050d55e commit 0be4911
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 12 deletions.
29 changes: 22 additions & 7 deletions metagpt/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@


class Environment(BaseModel):
"""环境,承载一批角色,角色可以向环境发布消息,可以被其他角色观察到"""
"""环境,承载一批角色,角色可以向环境发布消息,可以被其他角色观察到
Environment, hosting a batch of roles, roles can publish messages to the environment, and can be observed by other roles
"""

roles: dict[str, Role] = Field(default_factory=dict)
memory: Memory = Field(default_factory=Memory)
Expand All @@ -26,23 +29,31 @@ class Config:
arbitrary_types_allowed = True

def add_role(self, role: Role):
"""增加一个在当前环境的Role"""
"""增加一个在当前环境的角色
Add a role in the current environment
"""
role.set_env(self)
self.roles[role.profile] = role

def add_roles(self, roles: Iterable[Role]):
"""增加一批在当前环境的Role"""
"""增加一批在当前环境的角色
Add a batch of characters in the current environment
"""
for role in roles:
self.add_role(role)

def publish_message(self, message: Message):
"""向当前环境发布信息"""
"""向当前环境发布信息
Post information to the current environment
"""
# self.message_queue.put(message)
self.memory.add(message)
self.history += f"\n{message}"

async def run(self, k=1):
"""处理一次所有Role的运行"""
"""处理一次所有信息的运行
Process all Role runs at once
"""
# while not self.message_queue.empty():
# message = self.message_queue.get()
# rsp = await self.manager.handle(message, self)
Expand All @@ -56,9 +67,13 @@ async def run(self, k=1):
await asyncio.gather(*futures)

def get_roles(self) -> dict[str, Role]:
"""获得环境内的所有Role"""
"""获得环境内的所有角色
Process all Role runs at once
"""
return self.roles

def get_role(self, name: str) -> Role:
"""获得环境内的指定Role"""
"""获得环境内的指定角色
get all the environment roles
"""
return self.roles.get(name, None)
4 changes: 3 additions & 1 deletion metagpt/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,7 @@


async def ai_func(prompt):
"""使用LLM进行QA"""
"""使用LLM进行QA
QA with LLMs
"""
return await DEFAULT_LLM.aask(prompt)
4 changes: 3 additions & 1 deletion metagpt/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@


def define_log_level(print_level="INFO", logfile_level="DEBUG"):
"""调整日志级别到level之上"""
"""调整日志级别到level之上
Adjust the log level to above level
"""
_logger.remove()
_logger.add(sys.stderr, level=print_level)
_logger.add(PROJECT_ROOT / 'logs/log.txt', level=logfile_level)
Expand Down
2 changes: 2 additions & 0 deletions metagpt/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ def __init__(self, llm: LLM = LLM()):
async def handle(self, message: Message, environment):
"""
管理员处理信息,现在简单的将信息递交给下一个人
The administrator processes the information, now simply passes the information on to the next person
:param message:
:param environment:
:return:
Expand All @@ -50,6 +51,7 @@ async def handle(self, message: Message, environment):
# chosen_role_name = self.llm.ask(self.prompt_template.format(context))

# FIXME: 现在通过简单的字典决定流向,但之后还是应该有思考过程
#The direction of flow is now determined by a simple dictionary, but there should still be a thought process afterwards
next_role_profile = self.role_directions[message.role]
# logger.debug(f"{next_role_profile}")
for _, role in roles.items():
Expand Down
12 changes: 9 additions & 3 deletions metagpt/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,21 +46,27 @@ def to_dict(self) -> dict:

@dataclass
class UserMessage(Message):
"""便于支持OpenAI的消息"""
"""便于支持OpenAI的消息
Facilitate support for OpenAI messages
"""
def __init__(self, content: str):
super().__init__(content, 'user')


@dataclass
class SystemMessage(Message):
"""便于支持OpenAI的消息"""
"""便于支持OpenAI的消息
Facilitate support for OpenAI messages
"""
def __init__(self, content: str):
super().__init__(content, 'system')


@dataclass
class AIMessage(Message):
"""便于支持OpenAI的消息"""
"""便于支持OpenAI的消息
Facilitate support for OpenAI messages
"""
def __init__(self, content: str):
super().__init__(content, 'assistant')

Expand Down

0 comments on commit 0be4911

Please sign in to comment.