Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into feat/cdp-cyclotron-pa…
Browse files Browse the repository at this point in the history
…rt-2
  • Loading branch information
bretthoerner committed Sep 4, 2024
2 parents 1311f64 + 73fce74 commit 971e27d
Show file tree
Hide file tree
Showing 48 changed files with 2,450 additions and 77 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/rust-docker-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ jobs:
- name: Container image digest
id: digest
run: |
echo ${{ steps.docker_build.outputs.digest }}
echo "${{matrix.image}}_digest=${{ steps.docker_build.outputs.digest }}" >> $GITHUB_OUTPUT
echo ${{ steps.docker_build.outputs.digest }}
echo "${{matrix.image}}_digest=${{ steps.docker_build.outputs.digest }}" >> $GITHUB_OUTPUT
deploy:
name: Deploy capture-replay
Expand Down
Empty file added ee/hogai/__init__.py
Empty file.
55 changes: 55 additions & 0 deletions ee/hogai/generate_trends_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from typing import Literal, Optional

from langchain_core.output_parsers.openai_tools import PydanticToolsParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI
from pydantic import BaseModel, Field

from ee.hogai.system_prompt import trends_system_prompt
from ee.hogai.team_prompt import TeamPrompt
from ee.hogai.trends_function import TrendsFunction
from posthog.models.team.team import Team
from posthog.schema import ExperimentalAITrendsQuery


class output_insight_schema(BaseModel):
reasoning_steps: Optional[list[str]] = None
answer: ExperimentalAITrendsQuery


class ChatMessage(BaseModel):
role: Literal["user", "assistant"]
content: str = Field(..., max_length=2500)


class Conversation(BaseModel):
messages: list[ChatMessage] = Field(..., max_length=20)
session_id: str


class GenerateTrendsAgent:
_team: Team

def __init__(self, team: Team):
self._team = team

def bootstrap(self, messages: list[ChatMessage], user_prompt: str | None = None):
llm = ChatOpenAI(model="gpt-4o-2024-08-06", stream_usage=True).bind_tools(
[TrendsFunction().generate_function()], tool_choice="output_insight_schema"
)
user_prompt = (
user_prompt
or "Answer to my question:\n<question>{{question}}</question>\n" + TeamPrompt(self._team).generate_prompt()
)

prompts = ChatPromptTemplate.from_messages(
[
("system", trends_system_prompt),
("user", user_prompt),
*[(message.role, message.content) for message in messages[1:]],
],
template_format="mustache",
)

chain = prompts | llm | PydanticToolsParser(tools=[output_insight_schema]) # type: ignore
return chain
Loading

0 comments on commit 971e27d

Please sign in to comment.