offline-twitter/pkg/scraper/user_feed.go

38 lines
1.0 KiB
Go
Raw Normal View History

2021-06-16 19:31:27 -07:00
package scraper
import (
2022-03-06 19:27:30 -08:00
"errors"
2022-03-13 17:09:43 -07:00
"fmt"
)
2021-06-16 19:31:27 -07:00
/**
* Get a list of tweets that appear on the given user's page, along with a list of associated
* users for any retweets.
*
* args:
* - user_id: the ID of the user whomst feed to fetch
* - min_tweets: get at least this many tweets, if there are any
*
* returns: a slice of Tweets, Retweets, and Users
*/
func GetUserFeedFor(user_id UserID, min_tweets int) (trove TweetTrove, err error) {
2023-02-17 13:07:12 -05:00
tweet_response, err := the_api.GetFeedFor(user_id, "")
2021-06-16 19:31:27 -07:00
if err != nil {
2022-03-06 19:27:30 -08:00
err = fmt.Errorf("Error calling API to fetch user feed: UserID %d\n %w", user_id, err)
2021-06-16 19:31:27 -07:00
return
}
2021-11-06 13:37:46 -07:00
if len(tweet_response.GlobalObjects.Tweets) < min_tweets && tweet_response.GetCursor() != "" {
2023-02-17 13:07:12 -05:00
err = the_api.GetMoreTweetsFromFeed(user_id, &tweet_response, min_tweets)
2022-03-06 19:27:30 -08:00
if err != nil && !errors.Is(err, END_OF_FEED) {
2021-06-16 19:31:27 -07:00
return
}
}
return tweet_response.ToTweetTrove()
2021-06-16 19:31:27 -07:00
}
func GetUserFeedGraphqlFor(user_id UserID, min_tweets int) (trove TweetTrove, err error) {
return the_api.GetPaginatedQuery(PaginatedUserFeed{user_id}, min_tweets)
}