Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test(scrapers): Add new Twitter scraper test suite #573

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
1 change: 0 additions & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,6 @@ require (
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/spf13/afero v1.11.0 // indirect
github.com/spf13/cast v1.6.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/supranational/blst v0.3.11 // indirect
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect
Expand Down
1 change: 0 additions & 1 deletion go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -713,7 +713,6 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
Expand Down
2 changes: 1 addition & 1 deletion pkg/scrapers/twitter/followers.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import (
// ScrapeFollowersForProfile scrapes the profile and tweets of a specific Twitter user.
// It takes the username as a parameter and returns the scraped profile information and an error if any.
func ScrapeFollowersForProfile(username string, count int) ([]twitterscraper.Legacy, error) {
scraper := auth()
scraper := Auth()

if scraper == nil {
return nil, fmt.Errorf("there was an error authenticating with your Twitter credentials")
Expand Down
94 changes: 5 additions & 89 deletions pkg/scrapers/twitter/tweets.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import (
"github.com/sirupsen/logrus"

"github.com/masa-finance/masa-oracle/pkg/config"
"github.com/masa-finance/masa-oracle/pkg/llmbridge"
)

type TweetResult struct {
Expand All @@ -24,7 +23,7 @@ type TweetResult struct {
// auth initializes and returns a new Twitter scraper instance. It attempts to load cookies from a file to reuse an existing session.
// If no valid session is found, it performs a login with credentials specified in the application's configuration.
// On successful login, it saves the session cookies for future use. If the login fails, it returns nil.
func auth() *twitterscraper.Scraper {
func Auth() *twitterscraper.Scraper {
scraper := twitterscraper.New()
appConfig := config.GetInstance()
cookieFilePath := filepath.Join(appConfig.MasaDir, "twitter_cookies.json")
Expand All @@ -41,7 +40,7 @@ func auth() *twitterscraper.Scraper {
password := appConfig.TwitterPassword
twoFACode := appConfig.Twitter2FaCode

time.Sleep(500 * time.Millisecond)
time.Sleep(100 * time.Millisecond)

var err error
if twoFACode != "" {
Expand All @@ -55,7 +54,7 @@ func auth() *twitterscraper.Scraper {
return nil
}

time.Sleep(500 * time.Millisecond)
time.Sleep(100 * time.Millisecond)

if err = SaveCookies(scraper, cookieFilePath); err != nil {
logrus.WithError(err).Error("[-] Failed to save cookies")
Expand All @@ -69,59 +68,6 @@ func auth() *twitterscraper.Scraper {
return scraper
}

// ScrapeTweetsForSentiment is a function that scrapes tweets based on a given query, analyzes their sentiment using a specified model, and returns the sentiment analysis results.
// Parameters:
// - query: The search query string to find matching tweets.
// - count: The maximum number of tweets to retrieve and analyze.
// - model: The model to use for sentiment analysis.
//
// Returns:
// - A string representing the sentiment analysis prompt.
// - A string representing the sentiment analysis result.
// - An error if the scraping or sentiment analysis process encounters any issues.
func ScrapeTweetsForSentiment(query string, count int, model string) (string, string, error) {
scraper := auth()
var tweets []*TweetResult

if scraper == nil {
return "", "", fmt.Errorf("there was an error authenticating with your Twitter credentials")
}

// Set search mode
scraper.SetSearchMode(twitterscraper.SearchLatest)

// Perform the search with the specified query and count
for tweetResult := range scraper.SearchTweets(context.Background(), query, count) {
var tweet TweetResult
if tweetResult.Error != nil {
tweet = TweetResult{
Tweet: nil,
Error: tweetResult.Error,
}
} else {
tweet = TweetResult{
Tweet: &tweetResult.Tweet,
Error: nil,
}
}
tweets = append(tweets, &tweet)
}
sentimentPrompt := "Please perform a sentiment analysis on the following tweets, using an unbiased approach. Sentiment analysis involves identifying and categorizing opinions expressed in text, particularly to determine whether the writer's attitude towards a particular topic, product, etc., is positive, negative, or neutral. After analyzing, please provide a summary of the overall sentiment expressed in these tweets, including the proportion of positive, negative, and neutral sentiments if applicable."

twitterScraperTweets := make([]*twitterscraper.TweetResult, len(tweets))
for i, tweet := range tweets {
twitterScraperTweets[i] = &twitterscraper.TweetResult{
Tweet: *tweet.Tweet,
Error: tweet.Error,
}
}
prompt, sentiment, err := llmbridge.AnalyzeSentimentTweets(twitterScraperTweets, model, sentimentPrompt)
if err != nil {
return "", "", err
}
return prompt, sentiment, tweets[0].Error
}

// ScrapeTweetsByQuery performs a search on Twitter for tweets matching the specified query.
// It fetches up to the specified count of tweets and returns a slice of Tweet pointers.
// Parameters:
Expand All @@ -132,7 +78,7 @@ func ScrapeTweetsForSentiment(query string, count int, model string) (string, st
// - A slice of pointers to twitterscraper.Tweet objects that match the search query.
// - An error if the scraping process encounters any issues.
func ScrapeTweetsByQuery(query string, count int) ([]*TweetResult, error) {
scraper := auth()
scraper := Auth()
var tweets []*TweetResult
var lastError error

Expand Down Expand Up @@ -163,40 +109,10 @@ func ScrapeTweetsByQuery(query string, count int) ([]*TweetResult, error) {
return tweets, nil
}

// ScrapeTweetsByTrends scrapes the current trending topics on Twitter.
// It returns a slice of strings representing the trending topics.
// If an error occurs during the scraping process, it returns an error.
func ScrapeTweetsByTrends() ([]*TweetResult, error) {
scraper := auth()
var trendResults []*TweetResult

if scraper == nil {
return nil, fmt.Errorf("there was an error authenticating with your Twitter credentials")
}

// Set search mode
scraper.SetSearchMode(twitterscraper.SearchLatest)

trends, err := scraper.GetTrends()
if err != nil {
return nil, err
}

for _, trend := range trends {
trendResult := &TweetResult{
Tweet: &twitterscraper.Tweet{Text: trend},
Error: nil,
}
trendResults = append(trendResults, trendResult)
}

return trendResults, trendResults[0].Error
}

// ScrapeTweetsProfile scrapes the profile and tweets of a specific Twitter user.
// It takes the username as a parameter and returns the scraped profile information and an error if any.
func ScrapeTweetsProfile(username string) (twitterscraper.Profile, error) {
scraper := auth()
scraper := Auth()

if scraper == nil {
return twitterscraper.Profile{}, fmt.Errorf("there was an error authenticating with your Twitter credentials")
Expand Down
25 changes: 0 additions & 25 deletions pkg/tests/api_test.go

This file was deleted.

56 changes: 0 additions & 56 deletions pkg/tests/auth_test.go

This file was deleted.

18 changes: 0 additions & 18 deletions pkg/tests/chain_test.go

This file was deleted.

Loading
Loading