From 28df3c99d786c673c028d1912e865725bf96ed8c Mon Sep 17 00:00:00 2001 From: mcamou Date: Mon, 11 Nov 2024 19:09:00 +0100 Subject: [PATCH] Remove sentiment analysis since it's no longer called from anywhere --- docs/oracle-node/discord-sentiment.md | 84 ---- docs/oracle-node/telegram-sentiment.md | 75 ---- docs/oracle-node/twitter-sentiment.md | 120 ------ go.mod | 1 - go.sum | 2 - node/oracle_node.go | 1 + pkg/llmbridge/client.go | 177 --------- pkg/llmbridge/config.go | 37 -- pkg/llmbridge/payload.go | 48 --- pkg/llmbridge/sentiment.go | 404 -------------------- pkg/network/kdht.go | 1 + pkg/pubsub/node_event_tracker.go | 8 +- pkg/scrapers/discord/getchannelmessages.go | 26 -- pkg/scrapers/telegram/getchannelmessages.go | 19 - pkg/workers/handlers/discord.go | 21 +- pkg/workers/handlers/telegram.go | 19 - pkg/workers/worker_manager.go | 1 - 17 files changed, 9 insertions(+), 1035 deletions(-) delete mode 100644 docs/oracle-node/discord-sentiment.md delete mode 100644 docs/oracle-node/telegram-sentiment.md delete mode 100644 docs/oracle-node/twitter-sentiment.md delete mode 100644 pkg/llmbridge/client.go delete mode 100644 pkg/llmbridge/config.go delete mode 100644 pkg/llmbridge/payload.go delete mode 100644 pkg/llmbridge/sentiment.go diff --git a/docs/oracle-node/discord-sentiment.md b/docs/oracle-node/discord-sentiment.md deleted file mode 100644 index d73e5a08..00000000 --- a/docs/oracle-node/discord-sentiment.md +++ /dev/null @@ -1,84 +0,0 @@ ---- -id: discord-sentiment -title: Discord Sentiment ---- - -## Masa Node Discord Sentiment Analysis Feature - -The Masa Node introduces a robust feature for analyzing the sentiment of Discord messages. This functionality utilizes advanced language models to determine the sentiment conveyed in a collection of Discord messages, offering critical insights into community mood and trends. - -## Overview - -The Discord sentiment analysis feature extends the Masa Node's capabilities to include interaction with social media data, specifically from Discord channels. It employs cutting-edge language models to assess the sentiment of messages, classifying them as positive, negative, or neutral. - -## How It Works - -The sentiment analysis process begins by collecting Discord messages based on specific channel IDs, followed by sentiment evaluation using the chosen language models. The system is compatible with a variety of models, such as Claude and GPT variants, ensuring versatile and robust sentiment analysis. - -> **Important**: To retrieve message content, you must toggle "Message Content Intent" in the Discord Developer Portal under the `Bot` section. -> -> ![Message Content Intent](/img/discord-message-content-intent.png) - - -### Models - -```go -const ( - ClaudeOpus ModelType = "claude-3-opus" - ClaudeOpus20240229 ModelType = "claude-3-opus-20240229" - ClaudeSonnet20240229 ModelType = "claude-3-sonnet-20240229" - ClaudeHaiku20240307 ModelType = "claude-3-haiku-20240307" - GPT4 ModelType = "gpt-4" - GPT4o ModelType = "gpt-4o" - GPT4TurboPreview ModelType = "gpt-4-turbo-preview" - GPT35Turbo ModelType = "gpt-3.5-turbo" - LLama2 ModelType = "ollama/llama2" - LLama3 ModelType = "ollama/llama3" - Mistral ModelType = "ollama/mistral" - Gemma ModelType = "ollama/gemma" - Mixtral ModelType = "ollama/mixtral" - OpenChat ModelType = "ollama/openchat" - NeuralChat ModelType = "ollama/neural-chat" - CloudflareQwen15Chat ModelType = "@cf/qwen/qwen1.5-0.5b-chat" - CloudflareLlama27bChatFp16 ModelType = "@cf/meta/llama-2-7b-chat-fp16" - CloudflareLlama38bInstruct ModelType = "@cf/meta/llama-3-8b-instruct" - CloudflareMistral7bInstruct ModelType = "@cf/mistral/mistral-7b-instruct" - CloudflareMistral7bInstructV01 ModelType = "@cf/mistral/mistral-7b-instruct-v0.1" - HuggingFaceGoogleGemma7bIt ModelType = "@hf/google/gemma-7b-it" - HuggingFaceNousresearchHermes2ProMistral7b ModelType = "@hf/nousresearch/hermes-2-pro-mistral-7b" - HuggingFaceTheblokeLlama213bChatAwq ModelType = "@hf/thebloke/llama-2-13b-chat-awq" - HuggingFaceTheblokeNeuralChat7bV31Awq ModelType = "@hf/thebloke/neural-chat-7b-v3-1-awq" - CloudflareOpenchat35_0106 ModelType = "@cf/openchat/openchat-3.5-0106" - CloudflareMicrosoftPhi2 ModelType = "@cf/microsoft/phi-2" -) -``` - -### Fetching Discord Messages - -#### Masa API - -> POST to the endpoint /sentiment/discord - -```json -{ - "channelID": "1049433598505267250", - "prompt": "question", - "model": "claude-3-opus-20240229" -} -``` - -#### Masa CLI or code integration - -Discord messages are retrieved using a custom scraper or the Discord API, as demonstrated in the [llmbridge](file:///path/to/masa/masa-oracle/pkg/llmbridge/sentiment_discord.go#L123) package. This process is designed to be straightforward and does not necessarily require Discord bot tokens. - -```go -func AnalyzeSentimentDiscord(messages []string, model string, prompt string) (string, string, error) { ... } -``` - -### Analyzing Sentiment - -After the messages are collected, they are processed through the selected language model for sentiment analysis. The system currently accommodates models with "claude-" and "gpt-" prefixes, among others, to suit various analysis requirements. - -### Conclusion - -Masa Node's Discord sentiment analysis feature is a potent instrument for gauging public sentiment within Discord communities. By leveraging state-of-the-art language models, it provides in-depth understanding of the sentiments expressed in messages, which is invaluable for community management and sentiment tracking. diff --git a/docs/oracle-node/telegram-sentiment.md b/docs/oracle-node/telegram-sentiment.md deleted file mode 100644 index 5c399367..00000000 --- a/docs/oracle-node/telegram-sentiment.md +++ /dev/null @@ -1,75 +0,0 @@ ---- -id: telegram-sentiment -title: Telegram Sentiment ---- - -## Masa Node Telegram Sentiment Analysis Feature - -The Masa Node introduces a powerful feature for analyzing the sentiment of telegram messages. This functionality leverages advanced language models to interpret the sentiment behind a collection of Telegram messages, providing valuable insights into public perception and trends. - -## Overview - -The Telegram sentiment analysis feature is part of the broader capabilities of the Masa Node, designed to interact with Telegram messages data in a meaningful way. It uses state-of-the-art language models to evaluate the sentiment of Telegram messages, categorizing them into positive, negative, or neutral sentiments. - -## How It Works - -The sentiment analysis process involves fetching Telegram messages based on specific queries, and then analyzing these messages using selected language models. The system supports various models, including Claude and GPT variants, allowing for flexible and powerful sentiment analysis. - -### Models - -```go -const ( - ClaudeOpus ModelType = "claude-3-opus" - ClaudeOpus20240229 ModelType = "claude-3-opus-20240229" - ClaudeSonnet20240229 ModelType = "claude-3-sonnet-20240229" - ClaudeHaiku20240307 ModelType = "claude-3-haiku-20240307" - GPT4 ModelType = "gpt-4" - GPT4o ModelType = "gpt-4o" - GPT4TurboPreview ModelType = "gpt-4-turbo-preview" - GPT35Turbo ModelType = "gpt-3.5-turbo" - LLama2 ModelType = "ollama/llama2" - LLama3 ModelType = "ollama/llama3" - Mistral ModelType = "ollama/mistral" - Gemma ModelType = "ollama/gemma" - Mixtral ModelType = "ollama/mixtral" - OpenChat ModelType = "ollama/openchat" - NeuralChat ModelType = "ollama/neural-chat" - CloudflareQwen15Chat ModelType = "@cf/qwen/qwen1.5-0.5b-chat" - CloudflareLlama27bChatFp16 ModelType = "@cf/meta/llama-2-7b-chat-fp16" - CloudflareLlama38bInstruct ModelType = "@cf/meta/llama-3-8b-instruct" - CloudflareMistral7bInstruct ModelType = "@cf/mistral/mistral-7b-instruct" - CloudflareMistral7bInstructV01 ModelType = "@cf/mistral/mistral-7b-instruct-v0.1" - HuggingFaceGoogleGemma7bIt ModelType = "@hf/google/gemma-7b-it" - HuggingFaceNousresearchHermes2ProMistral7b ModelType = "@hf/nousresearch/hermes-2-pro-mistral-7b" - HuggingFaceTheblokeLlama213bChatAwq ModelType = "@hf/thebloke/llama-2-13b-chat-awq" - HuggingFaceTheblokeNeuralChat7bV31Awq ModelType = "@hf/thebloke/neural-chat-7b-v3-1-awq" - CloudflareOpenchat35_0106 ModelType = "@cf/openchat/openchat-3.5-0106" - CloudflareMicrosoftPhi2 ModelType = "@cf/microsoft/phi-2" -) -``` - -### Fetching Telegram Sentiment - -#### Masa API - -> POST to the endpoint /sentiment/telegram - -```json -{ - "username": "coinlistofficialchannel", - "model": "all", // or replace with a single model type - "prompt": "new tokens" -} -``` - -#### Masa cli or code integration - -Messages are fetched using the Telegram Scraper library, as seen in the [llmbridge](/masa-oracle/pkg/llmbridge/sentiment.go#) package. This process does not require Telegram API keys, making it accessible and straightforward. - -```go -func AnalyzeSentimentTelegram(messages []*tg.Message, model string, prompt string) (string, string, error) { -``` - -### Analyzing Sentiment - -Once Telegram Messages are fetched, they are sent to the chosen language model for sentiment analysis. The system currently supports models prefixed with "claude-" and "gpt-", catering to a range of analysis needs. \ No newline at end of file diff --git a/docs/oracle-node/twitter-sentiment.md b/docs/oracle-node/twitter-sentiment.md deleted file mode 100644 index a8b3fcb4..00000000 --- a/docs/oracle-node/twitter-sentiment.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -id: twitter-sentiment -title: Twitter Sentiment ---- - -## Masa Node Twitter Sentiment Analysis Feature - -The Masa Node introduces a powerful feature for analyzing the sentiment of tweets. This functionality leverages advanced language models to interpret the sentiment behind a collection of tweets, providing valuable insights into public perception and trends. - -## Overview - -The Twitter sentiment analysis feature is part of the broader capabilities of the Masa Node, designed to interact with social media data in a meaningful way. It uses state-of-the-art language models to evaluate the sentiment of tweets, categorizing them into positive, negative, or neutral sentiments. - -## How It Works - -The sentiment analysis process involves fetching tweets based on specific queries, and then analyzing these tweets using selected language models. The system supports various models, including Claude and GPT variants, allowing for flexible and powerful sentiment analysis. - -### Models - -```go -const ( - ClaudeOpus ModelType = "claude-3-opus" - ClaudeOpus20240229 ModelType = "claude-3-opus-20240229" - ClaudeSonnet20240229 ModelType = "claude-3-sonnet-20240229" - ClaudeHaiku20240307 ModelType = "claude-3-haiku-20240307" - GPT4 ModelType = "gpt-4" - GPT4o ModelType = "gpt-4o" - GPT4TurboPreview ModelType = "gpt-4-turbo-preview" - GPT35Turbo ModelType = "gpt-3.5-turbo" - LLama2 ModelType = "ollama/llama2" - LLama3 ModelType = "ollama/llama3" - Mistral ModelType = "ollama/mistral" - Gemma ModelType = "ollama/gemma" - Mixtral ModelType = "ollama/mixtral" - OpenChat ModelType = "ollama/openchat" - NeuralChat ModelType = "ollama/neural-chat" - CloudflareQwen15Chat ModelType = "@cf/qwen/qwen1.5-0.5b-chat" - CloudflareLlama27bChatFp16 ModelType = "@cf/meta/llama-2-7b-chat-fp16" - CloudflareLlama38bInstruct ModelType = "@cf/meta/llama-3-8b-instruct" - CloudflareMistral7bInstruct ModelType = "@cf/mistral/mistral-7b-instruct" - CloudflareMistral7bInstructV01 ModelType = "@cf/mistral/mistral-7b-instruct-v0.1" - HuggingFaceGoogleGemma7bIt ModelType = "@hf/google/gemma-7b-it" - HuggingFaceNousresearchHermes2ProMistral7b ModelType = "@hf/nousresearch/hermes-2-pro-mistral-7b" - HuggingFaceTheblokeLlama213bChatAwq ModelType = "@hf/thebloke/llama-2-13b-chat-awq" - HuggingFaceTheblokeNeuralChat7bV31Awq ModelType = "@hf/thebloke/neural-chat-7b-v3-1-awq" - CloudflareOpenchat35_0106 ModelType = "@cf/openchat/openchat-3.5-0106" - CloudflareMicrosoftPhi2 ModelType = "@cf/microsoft/phi-2" -) -``` - -### Fetching Tweets - -#### Masa API - -> POST to the endpoint /sentiment/twitter - -```json -{ - "query": "$MASA Token Launch", - "count": 5, - "model": "all" // or replace with a single model type -} -``` - -#### Masa cli or code integration - -Tweets are fetched using the Twitter Scraper library, as seen in the [llmbridge](file:///Users/john/Projects/masa/masa-oracle/pkg/llmbridge/sentiment_twitter.go#1%2C9-1%2C9) package. This process does not require Twitter API keys, making it accessible and straightforward. - -```go -func AnalyzeSentimentTweets(tweets []*twitterscraper.Tweet, model string) (string, string, error) { ... } -``` - -### Analyzing Sentiment - -Once tweets are fetched, they are sent to the chosen language model for sentiment analysis. The system currently supports models prefixed with "claude-" and "gpt-", catering to a range of analysis needs. - -### Integration with Masa Node CLI - -The sentiment analysis feature is integrated into the Masa Node CLI, allowing users to interact with it directly from the command line. Users can specify the query, the number of tweets to analyze, and the model to use for analysis. - -```go -var countMessage string -var userMessage string - -inputCountField := tview.NewInputField(). - SetLabel("# of Tweets to analyze "). - SetFieldWidth(10) -``` - -### Fetching Web Data - -> POST to the endpoint /sentiment/web - -```json -{ - "url": "https://masa.finance", - "depth": 10, - "model": "all" // or replace with a single model type -} -``` - -```go -func AnalyzeSentimentWeb(data string, depth int, model string) (string, error) { ... } -``` - -### Example Usage - -o analyze the sentiment of tweets, users can follow these steps: - -1. Launch the Masa Node CLI. -2. Navigate to the sentiment analysis section. -3. Enter the query for fetching tweets. -4. Specify the number of tweets to analyze. -5. Choose the language model for analysis. - -The system will then display the sentiment analysis results, providing insights into the overall sentiment of the tweets related to the query. - -### Conclusion - -The Twitter sentiment analysis feature of the Masa Node offers a powerful tool for understanding public sentiment on various topics. By leveraging advanced language models, it provides deep insights into the emotional tone behind tweets, making it a valuable asset for data analysis and decision-making. diff --git a/go.mod b/go.mod index f815b6c7..d0e96075 100644 --- a/go.mod +++ b/go.mod @@ -29,7 +29,6 @@ require ( github.com/masa-finance/masa-twitter-scraper v0.0.1 github.com/multiformats/go-multiaddr v0.13.0 github.com/multiformats/go-multihash v0.2.3 - github.com/ollama/ollama v0.3.12 github.com/onsi/ginkgo/v2 v2.20.2 github.com/onsi/gomega v1.34.2 github.com/rivo/tview v0.0.0-20240505185119-ed116790de0f diff --git a/go.sum b/go.sum index 88e349ba..ae85ca6e 100644 --- a/go.sum +++ b/go.sum @@ -527,8 +527,6 @@ github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY= github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/ollama/ollama v0.3.12 h1:u7KFCKNDOEDD3kLlziB9XGeK6Jb1sLmlkrJSe8d5fS0= -github.com/ollama/ollama v0.3.12/go.mod h1:YrWoNkFnPOYsnDvsf/Ztb1wxU9/IXrNsQHqcxbY2r94= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= diff --git a/node/oracle_node.go b/node/oracle_node.go index 762ff7c9..d4d153c7 100644 --- a/node/oracle_node.go +++ b/node/oracle_node.go @@ -93,6 +93,7 @@ func NewOracleNode(ctx context.Context, opts ...Option) (*OracleNode, error) { var addrStr []string libp2pOptions := []libp2p.Option{ libp2p.ResourceManager(resourceManager), + // WTF Why? libp2p.Ping(false), // disable built-in ping libp2p.EnableNATService(), libp2p.NATPortMap(), diff --git a/pkg/llmbridge/client.go b/pkg/llmbridge/client.go deleted file mode 100644 index dd9b35a4..00000000 --- a/pkg/llmbridge/client.go +++ /dev/null @@ -1,177 +0,0 @@ -package llmbridge - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "log" - "net/http" - "strings" - - "github.com/masa-finance/masa-oracle/pkg/config" - "github.com/sashabaranov/go-openai" -) - -type ClaudeClient struct { - config *ClaudeAPIConfig -} - -type GPTClient struct { - config *GPTAPIConfig -} - -// NewClaudeClient creates a new ClaudeClient instance with default configuration. -func NewClaudeClient() *ClaudeClient { - cnf := NewClaudeAPIConfig() - return &ClaudeClient{config: cnf} -} - -// NewGPTClient creates a new GPTClient instance with default configuration. -func NewGPTClient() *GPTClient { - cnf := NewGPTConfig() - return &GPTClient{config: cnf} -} - -// SendRequest sends an HTTP request to the Claude API with the given payload. -// It sets the required headers like Content-Type, x-api-key etc. -// Returns the HTTP response and any error. -func (c *ClaudeClient) SendRequest(payloadBytes []byte) (*http.Response, error) { - req, err := http.NewRequest("POST", c.config.URL, bytes.NewBuffer(payloadBytes)) - if err != nil { - return nil, err - } - req.Header.Set("Content-Type", "application/json") - req.Header.Set("x-api-key", c.config.APIKey) - req.Header.Set("anthropic-version", c.config.Version) - - client := &http.Client{} - return client.Do(req) -} - -func (c *GPTClient) SendRequest(payload string, model string, prompt string) (string, error) { - var openAiModel string - switch model { - case "gpt-4": - openAiModel = openai.GPT4 - case "gpt-4-turbo-preview": - openAiModel = openai.GPT40613 - case "gpt-3.5-turbo": - openAiModel = openai.GPT3Dot5Turbo - default: - break - } - - cfg := config.GetInstance() - key := cfg.GPTApiKey - if key == "" { - return "", errors.New("OPENAI_API_KEY is not set") - } - client := openai.NewClient(key) - resp, err := client.CreateChatCompletion( - context.Background(), - openai.ChatCompletionRequest{ - Model: openAiModel, - Messages: []openai.ChatCompletionMessage{ - { - Role: openai.ChatMessageRoleSystem, - Content: prompt, - }, - { - Role: openai.ChatMessageRoleUser, - Content: payload, - }, - }, - }, - ) - if err != nil { - log.Print(err) - return "", err - } - return resp.Choices[0].Message.Content, nil -} - -type Response struct { - ID string `json:"id"` - Type string `json:"type"` - Role string `json:"role"` - Content []ResponseContent `json:"content"` - Model string `json:"model"` - StopReason string `json:"stop_reason"` - StopSequence *string `json:"stop_sequence"` // Use *string for nullable fields - Usage Usage `json:"usage"` -} - -type ResponseContent struct { - Type string `json:"type"` - Text string `json:"text,omitempty"` - Error *ResponseError `json:"error,omitempty"` -} - -type ResponseError struct { - Type string `json:"type"` - Message string `json:"message"` -} - -type Usage struct { - InputTokens int `json:"input_tokens"` - OutputTokens int `json:"output_tokens"` -} - -// SanitizeResponse removes non-ASCII characters and unnecessary whitespace from a string. -// It also strips away double quotes for cleaner presentation. -// Parameters: -// - str: The input string to be sanitized. -// Returns: A sanitized string with only ASCII characters, reduced whitespace, and no double quotes. -func SanitizeResponse(str string) string { - var result []rune - for _, r := range str { - if r >= 0 && r <= 127 { - result = append(result, r) - } - } - sanitizedString := string(result) - sanitizedString = strings.ReplaceAll(sanitizedString, "\n\n", " ") - sanitizedString = strings.ReplaceAll(sanitizedString, "\n", "") - sanitizedString = strings.ReplaceAll(sanitizedString, "\"", "") - return sanitizedString -} - -// ParseResponse takes an http.Response, reads its body, and attempts to unmarshal it into a Response struct. -// It then sanitizes the text content of each ResponseContent within the Response and returns a summary string. -// Parameters: -// - resp: A pointer to an http.Response object that contains the server's response to an HTTP request. -// Returns: -// - A string that represents a sanitized summary of the response content. -// - An error if reading the response body or unmarshalling fails. -func ParseResponse(resp *http.Response) (string, error) { - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return "", err - } - defer resp.Body.Close() - - var response Response - if err := json.Unmarshal(bodyBytes, &response); err != nil { - return "", err - } - var summary = "" - if response.Content != nil { - for _, t := range response.Content { - summary = SanitizeResponse(t.Text) - } - } else { - var responseError map[string]interface{} - if err := json.Unmarshal(bodyBytes, &responseError); err == nil { - if errVal, ok := responseError["error"].(map[string]interface{}); ok { - if message, ok := errVal["message"].(string); ok { - summary = fmt.Sprintf("error from llm: Service %v", message) - } - } - } - return summary, nil - } - return summary, nil -} diff --git a/pkg/llmbridge/config.go b/pkg/llmbridge/config.go deleted file mode 100644 index 0b6e2b33..00000000 --- a/pkg/llmbridge/config.go +++ /dev/null @@ -1,37 +0,0 @@ -package llmbridge - -import "github.com/masa-finance/masa-oracle/pkg/config" - -type ClaudeAPIConfig struct { - URL string - APIKey string - Version string -} - -type GPTAPIConfig struct { - APIKey string -} - -// NewClaudeAPIConfig creates a new ClaudeAPIConfig instance with values loaded -// from the application config. -func NewClaudeAPIConfig() *ClaudeAPIConfig { - appConfig := config.GetInstance() - - // need to add these to the config package - return &ClaudeAPIConfig{ - URL: appConfig.ClaudeApiURL, - APIKey: appConfig.ClaudeApiKey, - Version: appConfig.ClaudeApiVersion, - } -} - -// NewGPTConfig creates a new GPTConfig instance with values loaded -// from the application config. -func NewGPTConfig() *GPTAPIConfig { - appConfig := config.GetInstance() - - // need to add these to the config package - return &GPTAPIConfig{ - APIKey: appConfig.GPTApiKey, - } -} diff --git a/pkg/llmbridge/payload.go b/pkg/llmbridge/payload.go deleted file mode 100644 index 5d227469..00000000 --- a/pkg/llmbridge/payload.go +++ /dev/null @@ -1,48 +0,0 @@ -package llmbridge - -import ( - "encoding/json" -) - -type Payload struct { - Model string `json:"model"` - MaxTokens int `json:"max_tokens"` - Temperature float64 `json:"temperature"` - System string `json:"system"` - Messages []Message `json:"messages"` -} - -type Message struct { - Role string `json:"role"` - Content []Content `json:"content"` -} - -type Content struct { - Type string `json:"type"` - Text string `json:"text"` -} - -// CreatePayload generates a JSON payload for the OpenAI API from the given -// tweetsContent string. This payload configures the model, max tokens, -// temperature and prompt to analyze the sentiment of the tweets without -// bias and summarize the overall sentiment. -func CreatePayload(tweetsContent string, model string, prompt string) ([]byte, error) { - payload := Payload{ - Model: model, - MaxTokens: 4000, - Temperature: 0, - System: prompt, - Messages: []Message{ - { - Role: "user", - Content: []Content{ - { - Type: "text", - Text: tweetsContent, - }, - }, - }, - }, - } - return json.Marshal(payload) -} diff --git a/pkg/llmbridge/sentiment.go b/pkg/llmbridge/sentiment.go deleted file mode 100644 index 799e50cc..00000000 --- a/pkg/llmbridge/sentiment.go +++ /dev/null @@ -1,404 +0,0 @@ -package llmbridge - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "strings" - - "github.com/gotd/td/tg" - twitterscraper "github.com/masa-finance/masa-twitter-scraper" - "github.com/ollama/ollama/api" - "github.com/sirupsen/logrus" - - "github.com/masa-finance/masa-oracle/pkg/config" -) - -// AnalyzeSentimentTweets analyzes the sentiment of the provided tweets by sending them to the Claude API. -// It concatenates the tweets, creates a payload, sends a request to Claude, parses the response, -// and returns the concatenated tweets content, a sentiment summary, and any error. -func AnalyzeSentimentTweets(tweets []*twitterscraper.TweetResult, model string, prompt string) (string, string, error) { - // check if we are using claude or gpt, can add others easily - if strings.Contains(model, "claude-") { - client := NewClaudeClient() // Adjusted to call without arguments - - var validTweets []*twitterscraper.TweetResult - for _, tweet := range tweets { - if tweet.Error != nil { - logrus.WithError(tweet.Error).Warn("[-] Error in tweet") - continue - } - validTweets = append(validTweets, tweet) - } - - tweetsContent := ConcatenateTweets(validTweets) - payloadBytes, err := CreatePayload(tweetsContent, model, prompt) - if err != nil { - logrus.Errorf("[-] Error creating payload: %v", err) - return "", "", err - } - resp, err := client.SendRequest(payloadBytes) - if err != nil { - logrus.Errorf("[-] Error sending request to Claude API: %v", err) - return "", "", err - } - defer resp.Body.Close() - sentimentSummary, err := ParseResponse(resp) - if err != nil { - logrus.Errorf("[-] Error parsing response from Claude: %v", err) - return "", "", err - } - return tweetsContent, sentimentSummary, nil - - } else if strings.Contains(model, "gpt-") { - client := NewGPTClient() - tweetsContent := ConcatenateTweets(tweets) - sentimentSummary, err := client.SendRequest(tweetsContent, model, prompt) - if err != nil { - logrus.Errorf("[-] Error sending request to GPT: %v", err) - return "", "", err - } - return tweetsContent, sentimentSummary, nil - } else { - stream := false - tweetsContent := ConcatenateTweets(tweets) - - genReq := api.ChatRequest{ - Model: model, - Messages: []api.Message{ - {Role: "user", Content: tweetsContent}, - {Role: "assistant", Content: prompt}, - }, - Stream: &stream, - Options: map[string]interface{}{ - "temperature": 0.0, - "seed": 42, - "num_ctx": 4096, - }, - } - - requestJSON, err := json.Marshal(genReq) - if err != nil { - return "", "", err - } - uri := config.GetInstance().LLMChatUrl - if uri == "" { - return "", "", errors.New("ollama api url not set") - } - resp, err := http.Post(uri, "application/json", bytes.NewReader(requestJSON)) - if err != nil { - return "", "", err - } - defer resp.Body.Close() - body, err := io.ReadAll(resp.Body) - if err != nil { - return "", "", err - } - - var payload api.ChatResponse - err = json.Unmarshal(body, &payload) - if err != nil { - return "", "", err - } - - sentimentSummary := payload.Message.Content - return tweetsContent, SanitizeResponse(sentimentSummary), nil - } - -} - -// ConcatenateTweets concatenates the text of the provided tweets into a single string, -// with each tweet separated by a newline character. -func ConcatenateTweets(tweets []*twitterscraper.TweetResult) string { - var tweetsTexts []string - for _, t := range tweets { - tweetsTexts = append(tweetsTexts, t.Tweet.Text) - } - return strings.Join(tweetsTexts, "\n") -} - -// AnalyzeSentimentWeb analyzes the sentiment of the provided web page text data by sending them to the Claude API. -// It concatenates the text, creates a payload, sends a request to Claude, parses the response, -// and returns the concatenated content, a sentiment summary, and any error. -func AnalyzeSentimentWeb(data string, model string, prompt string) (string, string, error) { - // check if we are using claude or gpt, can add others easily - if strings.Contains(model, "claude-") { - client := NewClaudeClient() // Adjusted to call without arguments - payloadBytes, err := CreatePayload(data, model, prompt) - if err != nil { - logrus.Errorf("[-] Error creating payload: %v", err) - return "", "", err - } - resp, err := client.SendRequest(payloadBytes) - if err != nil { - logrus.Errorf("[-] Error sending request to Claude API: %v", err) - return "", "", err - } - defer resp.Body.Close() - sentimentSummary, err := ParseResponse(resp) - if err != nil { - logrus.Errorf("[-] Error parsing response from Claude: %v", err) - return "", "", err - } - return data, sentimentSummary, nil - - } else if strings.Contains(model, "gpt-") { - client := NewGPTClient() - sentimentSummary, err := client.SendRequest(data, model, prompt) - if err != nil { - logrus.Errorf("[-] Error sending request to GPT: %v", err) - return "", "", err - } - return data, sentimentSummary, nil - } else if strings.HasPrefix(model, "@") { - genReq := api.ChatRequest{ - Model: model, - Messages: []api.Message{ - {Role: "user", Content: data}, - {Role: "assistant", Content: prompt}, - }, - } - - requestJSON, err := json.Marshal(genReq) - if err != nil { - return "", "", err - } - cfUrl := config.GetInstance().LLMCfUrl - if cfUrl == "" { - return "", "", errors.New("cloudflare workers url not set") - } - uri := fmt.Sprintf("%s%s", cfUrl, model) - resp, err := http.Post(uri, "application/json", bytes.NewReader(requestJSON)) - if err != nil { - return "", "", err - } - defer resp.Body.Close() - body, err := io.ReadAll(resp.Body) - if err != nil { - return "", "", err - } - - var payload api.ChatResponse - err = json.Unmarshal(body, &payload) - if err != nil { - return "", "", err - } - - sentimentSummary := payload.Message.Content - return data, SanitizeResponse(sentimentSummary), nil - } else if strings.HasPrefix(model, "ollama/") { - stream := false - - genReq := api.ChatRequest{ - Model: strings.TrimPrefix(model, "ollama/"), - Messages: []api.Message{ - {Role: "assistant", Content: prompt}, - {Role: "user", Content: data}, - }, - Stream: &stream, - Options: map[string]interface{}{ - "temperature": 0.0, - "seed": 42, - "num_ctx": 4096, - }, - } - - requestJSON, err := json.Marshal(genReq) - if err != nil { - return "", "", err - } - uri := config.GetInstance().LLMChatUrl - if uri == "" { - return "", "", errors.New("ollama api url not set") - } - resp, err := http.Post(uri, "application/json", bytes.NewReader(requestJSON)) - if err != nil { - return "", "", err - } - defer resp.Body.Close() - body, err := io.ReadAll(resp.Body) - if err != nil { - return "", "", err - } - - var payload api.ChatResponse - err = json.Unmarshal(body, &payload) - if err != nil { - return "", "", err - } - - sentimentSummary := payload.Message.Content - return data, SanitizeResponse(sentimentSummary), nil - } else { - return "", "", errors.New("model not supported") - } -} - -// AnalyzeSentimentDiscord analyzes the sentiment of the provided Discord messages by sending them to the sentiment analysis API. -// It concatenates the messages, creates a payload, sends a request to the sentiment analysis service, parses the response, -// and returns the concatenated messages content, a sentiment summary, and any error. -func AnalyzeSentimentDiscord(messages []string, model string, prompt string) (string, string, error) { - // Concatenate messages with a newline character - messagesContent := strings.Join(messages, "\n") - - // The rest of the code follows the same pattern as AnalyzeSentimentTweets - // Replace with the actual logic you have for sending requests to your sentiment analysis service - // For example, if you're using the Claude API: - if strings.Contains(model, "claude-") { - client := NewClaudeClient() // Adjusted to call without arguments - payloadBytes, err := CreatePayload(messagesContent, model, prompt) - if err != nil { - logrus.Errorf("[-] Error creating payload: %v", err) - return "", "", err - } - resp, err := client.SendRequest(payloadBytes) - if err != nil { - logrus.Errorf("[-] Error sending request to Claude API: %v", err) - return "", "", err - } - defer resp.Body.Close() - sentimentSummary, err := ParseResponse(resp) - if err != nil { - logrus.Errorf("[-] Error parsing response from Claude: %v", err) - return "", "", err - } - return messagesContent, sentimentSummary, nil - - } else { - stream := false - - genReq := api.ChatRequest{ - Model: model, - Messages: []api.Message{ - {Role: "user", Content: messagesContent}, - {Role: "assistant", Content: prompt}, - }, - Stream: &stream, - Options: map[string]interface{}{ - "temperature": 0.0, - "seed": 42, - "num_ctx": 4096, - }, - } - - requestJSON, err := json.Marshal(genReq) - if err != nil { - logrus.Errorf("[-] Error marshaling request JSON: %v", err) - return "", "", err - } - uri := config.GetInstance().LLMChatUrl - if uri == "" { - errMsg := "ollama api url not set" - logrus.Errorf("[-] %v", errMsg) - return "", "", errors.New(errMsg) - } - resp, err := http.Post(uri, "application/json", bytes.NewReader(requestJSON)) - if err != nil { - logrus.Errorf("[-] Error sending request to API: %v", err) - return "", "", err - } - defer resp.Body.Close() - body, err := io.ReadAll(resp.Body) - if err != nil { - logrus.Errorf("[-] Error reading response body: %v", err) - return "", "", err - } - - var payload api.ChatResponse - err = json.Unmarshal(body, &payload) - if err != nil { - logrus.Errorf("[-] Error unmarshaling response JSON: %v", err) - return "", "", err - } - - sentimentSummary := payload.Message.Content - return messagesContent, SanitizeResponse(sentimentSummary), nil - } -} - -// AnalyzeSentimentTelegram analyzes the sentiment of the provided Telegram messages by sending them to the sentiment analysis API. -func AnalyzeSentimentTelegram(messages []*tg.Message, model string, prompt string) (string, string, error) { - // Concatenate messages with a newline character - var messageTexts []string - for _, msg := range messages { - if msg != nil { - messageTexts = append(messageTexts, msg.Message) - } - } - messagesContent := strings.Join(messageTexts, "\n") - - // The rest of the code follows the same pattern as AnalyzeSentimentDiscord - if strings.Contains(model, "claude-") { - client := NewClaudeClient() // Adjusted to call without arguments - payloadBytes, err := CreatePayload(messagesContent, model, prompt) - if err != nil { - logrus.Errorf("Error creating payload: %v", err) - return "", "", err - } - resp, err := client.SendRequest(payloadBytes) - if err != nil { - logrus.Errorf("Error sending request to Claude API: %v", err) - return "", "", err - } - defer resp.Body.Close() - sentimentSummary, err := ParseResponse(resp) - if err != nil { - logrus.Errorf("Error parsing response from Claude: %v", err) - return "", "", err - } - return messagesContent, sentimentSummary, nil - - } else { - stream := false - - genReq := api.ChatRequest{ - Model: model, - Messages: []api.Message{ - {Role: "user", Content: messagesContent}, - {Role: "assistant", Content: prompt}, - }, - Stream: &stream, - Options: map[string]interface{}{ - "temperature": 0.0, - "seed": 42, - "num_ctx": 4096, - }, - } - - requestJSON, err := json.Marshal(genReq) - if err != nil { - logrus.Errorf("[-] Error marshaling request JSON: %v", err) - return "", "", err - } - uri := config.GetInstance().LLMChatUrl - if uri == "" { - err := errors.New("[-] ollama api url not set") - logrus.Errorf("%v", err) - return "", "", err - } - resp, err := http.Post(uri, "application/json", bytes.NewReader(requestJSON)) - if err != nil { - logrus.Errorf("[-] Error sending request to API: %v", err) - return "", "", err - } - defer resp.Body.Close() - body, err := io.ReadAll(resp.Body) - if err != nil { - logrus.Errorf("[-] Error reading response body: %v", err) - return "", "", err - } - - var payload api.ChatResponse - err = json.Unmarshal(body, &payload) - if err != nil { - logrus.Errorf("[-] Error unmarshaling response JSON: %v", err) - return "", "", err - } - - sentimentSummary := payload.Message.Content - return messagesContent, SanitizeResponse(sentimentSummary), nil - } -} diff --git a/pkg/network/kdht.go b/pkg/network/kdht.go index 49cab0e8..26006b7b 100644 --- a/pkg/network/kdht.go +++ b/pkg/network/kdht.go @@ -37,6 +37,7 @@ func EnableDHT(ctx context.Context, host host.Host, bootstrapNodes []multiaddr.M options = append(options, dht.RoutingTableRefreshPeriod(time.Minute*5)) // Set refresh interval options = append(options, dht.Mode(dht.ModeAutoServer)) options = append(options, dht.ProtocolPrefix(prefix)) + // WTF Why? options = append(options, dht.NamespacedValidator("db", dbValidator{})) kademliaDHT, err := dht.New(ctx, host, options...) diff --git a/pkg/pubsub/node_event_tracker.go b/pkg/pubsub/node_event_tracker.go index 6abe70e3..022850ed 100644 --- a/pkg/pubsub/node_event_tracker.go +++ b/pkg/pubsub/node_event_tracker.go @@ -17,8 +17,10 @@ import ( ) type NodeEventTracker struct { - NodeDataChan chan *NodeData - nodeData *SafeMap + NodeDataChan chan *NodeData + // WTF: Do we really need this? Can't we store it in the libp2p PeerStore metadata? + nodeData *SafeMap + // WTF: Unused? nodeDataFile string ConnectBuffer map[string]ConnectBufferEntry nodeVersion string @@ -138,6 +140,7 @@ func (net *NodeEventTracker) Connected(n network.Network, c network.Conn) { nodeData, exists := net.nodeData.Get(peerID) if !exists { + // WTF: Shouldn't we add it? We don't yet have the NodeData but we can at least add it. return } else { if nodeData.IsActive { @@ -172,6 +175,7 @@ func (net *NodeEventTracker) Disconnected(n network.Network, c network.Conn) { nodeData, exists := net.nodeData.Get(peerID) if !exists { // this should never happen + // WTF: Since we're never adding it on `Connected`.... logrus.Debugf("Node data does not exist for disconnected node: %s", peerID) return } diff --git a/pkg/scrapers/discord/getchannelmessages.go b/pkg/scrapers/discord/getchannelmessages.go index 6f0034d6..ffadee82 100644 --- a/pkg/scrapers/discord/getchannelmessages.go +++ b/pkg/scrapers/discord/getchannelmessages.go @@ -8,8 +8,6 @@ import ( "net/http" "os" "strconv" - - "github.com/masa-finance/masa-oracle/pkg/llmbridge" ) // ChannelMessage represents a Discord channel message structure @@ -76,27 +74,3 @@ func GetChannelMessages(channelID string, limit string, before string) ([]Channe return messages, nil } - -// ScrapeDiscordMessagesForSentiment scrapes messages from a Discord channel and analyzes their sentiment. -func ScrapeDiscordMessagesForSentiment(channelID string, model string, prompt string) (string, string, error) { - // Fetch messages from the Discord channel - messages, err := GetChannelMessages(channelID, "100", "") - if err != nil { - return "", "", fmt.Errorf("error fetching messages from Discord channel: %v", err) - } - - // Extract the content of the messages - var messageContents []string - for _, message := range messages { - messageContents = append(messageContents, message.Content) - } - - // Analyze the sentiment of the fetched messages - // Note: Ensure that llmbridge.AnalyzeSentimentDiscord is implemented and can handle the analysis - analysisPrompt, sentiment, err := llmbridge.AnalyzeSentimentDiscord(messageContents, model, prompt) - if err != nil { - return "", "", fmt.Errorf("error analyzing sentiment of Discord messages: %v", err) - } - return analysisPrompt, sentiment, nil - -} diff --git a/pkg/scrapers/telegram/getchannelmessages.go b/pkg/scrapers/telegram/getchannelmessages.go index 6e17eb99..0c493ee1 100644 --- a/pkg/scrapers/telegram/getchannelmessages.go +++ b/pkg/scrapers/telegram/getchannelmessages.go @@ -6,8 +6,6 @@ import ( "log" "github.com/gotd/td/tg" - - "github.com/masa-finance/masa-oracle/pkg/llmbridge" ) // FetchChannelMessages Fetch messages from a group @@ -67,20 +65,3 @@ func FetchChannelMessages(ctx context.Context, username string) ([]*tg.Message, return messagesSlice, err // Return the slice of messages and any error } - -// ScrapeTelegramMessagesForSentiment scrapes messages from a Telegram channel and analyzes their sentiment. -func ScrapeTelegramMessagesForSentiment(ctx context.Context, username string, model string, prompt string) (string, string, error) { - // Fetch messages from the Telegram channel - messages, err := FetchChannelMessages(ctx, username) - if err != nil { - return "", "", fmt.Errorf("error fetching messages from Telegram channel: %v", err) - } - - // Analyze the sentiment of the fetched messages - // Note: Ensure that llmbridge.AnalyzeSentimentTelegram is implemented and can handle the analysis - analysisPrompt, sentiment, err := llmbridge.AnalyzeSentimentTelegram(messages, model, prompt) - if err != nil { - return "", "", fmt.Errorf("error analyzing sentiment of Telegram messages: %v", err) - } - return analysisPrompt, sentiment, nil -} diff --git a/pkg/workers/handlers/discord.go b/pkg/workers/handlers/discord.go index 8683d677..6efbb387 100644 --- a/pkg/workers/handlers/discord.go +++ b/pkg/workers/handlers/discord.go @@ -6,12 +6,11 @@ import ( "github.com/sirupsen/logrus" "github.com/masa-finance/masa-oracle/pkg/scrapers/discord" - "github.com/masa-finance/masa-oracle/pkg/workers/types" + data_types "github.com/masa-finance/masa-oracle/pkg/workers/types" ) type DiscordProfileHandler struct{} type DiscordChannelHandler struct{} -type DiscordSentimentHandler struct{} type DiscordGuildHandler struct{} type DiscoreUserGuildsHandler struct{} @@ -49,24 +48,6 @@ func (h *DiscordChannelHandler) HandleWork(data []byte) data_types.WorkResponse return data_types.WorkResponse{Data: resp, RecordCount: len(resp)} } -// HandleWork implements the WorkHandler interface for DiscordSentimentHandler. -func (h *DiscordSentimentHandler) HandleWork(data []byte) data_types.WorkResponse { - logrus.Infof("[+] DiscordSentimentHandler %s", data) - dataMap, err := JsonBytesToMap(data) - if err != nil { - return data_types.WorkResponse{Error: fmt.Sprintf("unable to parse discord json data: %v", err)} - } - channelID := dataMap["channelID"].(string) - model := dataMap["model"].(string) - prompt := dataMap["prompt"].(string) - _, resp, err := discord.ScrapeDiscordMessagesForSentiment(channelID, model, prompt) - if err != nil { - return data_types.WorkResponse{Error: fmt.Sprintf("unable to get discord channel messages: %v", err)} - } - logrus.Infof("[+] DiscordSentimentHandler Work response for %s: %d records returned", data_types.DiscordSentiment, 1) - return data_types.WorkResponse{Data: resp, RecordCount: 1} -} - // HandleWork implements the WorkHandler interface for DiscordGuildHandler. func (h *DiscordGuildHandler) HandleWork(data []byte) data_types.WorkResponse { logrus.Infof("[+] DiscordGuildHandler %s", data) diff --git a/pkg/workers/handlers/telegram.go b/pkg/workers/handlers/telegram.go index e52f9f36..6b3cdd5c 100644 --- a/pkg/workers/handlers/telegram.go +++ b/pkg/workers/handlers/telegram.go @@ -10,27 +10,8 @@ import ( "github.com/masa-finance/masa-oracle/pkg/workers/types" ) -type TelegramSentimentHandler struct{} type TelegramChannelHandler struct{} -// HandleWork implements the WorkHandler interface for TelegramSentimentHandler. -func (h *TelegramSentimentHandler) HandleWork(data []byte) data_types.WorkResponse { - logrus.Infof("[+] TelegramSentimentHandler %s", data) - dataMap, err := JsonBytesToMap(data) - if err != nil { - return data_types.WorkResponse{Error: fmt.Sprintf("unable to parse telegram json data: %v", err)} - } - userName := dataMap["username"].(string) - model := dataMap["model"].(string) - prompt := dataMap["prompt"].(string) - _, resp, err := telegram.ScrapeTelegramMessagesForSentiment(context.Background(), userName, model, prompt) - if err != nil { - return data_types.WorkResponse{Error: fmt.Sprintf("unable to get telegram sentiment: %v", err)} - } - logrus.Infof("[+] TelegramSentimentHandler Work response for %s: %d records returned", data_types.TelegramSentiment, 1) - return data_types.WorkResponse{Data: resp, RecordCount: 1} -} - // HandleWork implements the WorkHandler interface for TelegramChannelHandler. func (h *TelegramChannelHandler) HandleWork(data []byte) data_types.WorkResponse { logrus.Infof("[+] TelegramChannelHandler %s", data) diff --git a/pkg/workers/worker_manager.go b/pkg/workers/worker_manager.go index abe8ee4f..1f5b1175 100644 --- a/pkg/workers/worker_manager.go +++ b/pkg/workers/worker_manager.go @@ -49,7 +49,6 @@ func NewWorkHandlerManager(opts ...WorkerOptionFunc) *WorkHandlerManager { if options.isDiscordScraperWorker { whm.addWorkHandler(data_types.Discord, &handlers.DiscordProfileHandler{}) whm.addWorkHandler(data_types.DiscordChannelMessages, &handlers.DiscordChannelHandler{}) - whm.addWorkHandler(data_types.DiscordSentiment, &handlers.DiscordSentimentHandler{}) whm.addWorkHandler(data_types.DiscordGuildChannels, &handlers.DiscordGuildHandler{}) whm.addWorkHandler(data_types.DiscordUserGuilds, &handlers.DiscoreUserGuildsHandler{}) }