281 lines
6.4 KiB
Go
Raw Normal View History

2021-08-02 14:46:06 -07:00
package main
import (
"flag"
2022-03-06 17:07:05 -08:00
"fmt"
"os"
log "github.com/sirupsen/logrus"
2021-08-02 14:46:06 -07:00
"offline_twitter/persistence"
2022-03-06 17:07:05 -08:00
"offline_twitter/scraper"
2021-08-02 14:46:06 -07:00
)
/**
* Global variable referencing the open data profile
*/
var profile persistence.Profile
2022-01-04 12:57:43 -05:00
var version_string string
2021-08-02 14:46:06 -07:00
/**
* Main method
*/
func main() {
profile_dir := flag.String("profile", ".", "")
flag.StringVar(profile_dir, "p", ".", "")
2022-01-04 12:57:43 -05:00
show_version_flag := flag.Bool("version", false, "")
flag.BoolVar(show_version_flag, "v", false, "")
how_many := flag.Int("n", 50, "")
flag.IntVar(how_many, "number", 50, "")
var default_log_level string
if version_string == "" {
default_log_level = "debug"
} else {
default_log_level = "info"
}
log_level := flag.String("log-level", default_log_level, "")
help := flag.Bool("help", false, "")
flag.BoolVar(help, "h", false, "")
flag.Usage = func() {
die("", true, 1)
}
flag.Parse()
args := flag.Args()
2022-01-04 12:57:43 -05:00
if *show_version_flag {
if version_string == "" {
fmt.Println("Development version")
} else {
fmt.Println("v" + version_string)
}
os.Exit(0)
}
if *help {
die("", true, 0)
}
logging_level, err := log.ParseLevel(*log_level)
if err != nil {
die(err.Error(), false, 1)
}
log.SetLevel(logging_level)
if len(args) < 2 {
2022-02-28 16:06:58 -08:00
if len(args) == 1 && args[0] == "list_followed" {
// "list_followed" doesn't need a target, so create a fake second arg
args = append(args, "")
} else {
die("", true, 1)
}
}
operation := args[0]
target := args[1]
2021-08-02 14:46:06 -07:00
if operation == "create_profile" {
create_profile(target)
2021-08-02 14:46:06 -07:00
return
}
profile, err = persistence.LoadProfile(*profile_dir)
2021-08-02 14:46:06 -07:00
if err != nil {
2022-03-06 17:07:05 -08:00
die(fmt.Sprintf("Could not load profile: %s", err.Error()), true, 2)
2021-08-02 14:46:06 -07:00
}
2022-03-06 17:07:05 -08:00
switch operation {
2021-08-02 14:46:06 -07:00
case "create_profile":
create_profile(target)
case "fetch_user":
fetch_user(scraper.UserHandle(target))
2021-08-22 18:29:58 -07:00
case "download_user_content":
download_user_content(scraper.UserHandle(target))
2021-08-02 14:46:06 -07:00
case "fetch_tweet_only":
fetch_tweet_only(target)
2021-08-22 17:55:21 -07:00
case "fetch_tweet":
fetch_tweet_conversation(target)
2021-08-19 12:54:08 -07:00
case "get_user_tweets":
fetch_user_feed(target, *how_many)
case "get_user_tweets_all":
fetch_user_feed(target, 999999999)
case "download_tweet_content":
download_tweet_content(target)
2021-11-06 14:50:39 -07:00
case "search":
search(target)
case "follow":
follow_user(target, true)
case "unfollow":
follow_user(target, false)
2022-02-28 16:06:58 -08:00
case "list_followed":
list_followed()
2021-08-02 14:46:06 -07:00
default:
2022-03-06 17:07:05 -08:00
die(fmt.Sprintf("Invalid operation: %s", operation), true, 3)
2021-08-02 14:46:06 -07:00
}
}
/**
* Create a data directory.
*
* args:
* - target_dir: the location of the new data dir.
*/
func create_profile(target_dir string) {
_, err := persistence.NewProfile(target_dir)
if err != nil {
panic(err)
}
}
/**
* Scrape a user and save it in the database.
*
* args:
* - handle: e.g., "michaelmalice"
*/
func fetch_user(handle scraper.UserHandle) {
user, err := scraper.GetUser(handle)
if err != nil {
die(err.Error(), false, -1)
}
log.Debug(user)
2021-08-02 14:46:06 -07:00
2022-02-26 22:18:41 -08:00
err = profile.SaveUser(&user)
2021-08-02 14:46:06 -07:00
if err != nil {
2022-03-06 17:07:05 -08:00
die(fmt.Sprintf("Error saving user: %s", err.Error()), false, 4)
2021-08-02 14:46:06 -07:00
}
2022-03-06 17:07:05 -08:00
download_user_content(handle)
happy_exit("Saved the user")
2021-08-02 14:46:06 -07:00
}
/**
* Scrape a single tweet and save it in the database.
*
* args:
* - tweet_url: e.g., "https://twitter.com/michaelmalice/status/1395882872729477131"
*/
func fetch_tweet_only(tweet_identifier string) {
tweet_id, err := extract_id_from(tweet_identifier)
2021-08-02 14:46:06 -07:00
if err != nil {
die(err.Error(), false, -1)
}
tweet, err := scraper.GetTweet(tweet_id)
if err != nil {
2022-03-06 17:07:05 -08:00
die(fmt.Sprintf("Error fetching tweet: %s", err.Error()), false, -1)
2021-08-02 14:46:06 -07:00
}
log.Debug(tweet)
2021-08-02 14:46:06 -07:00
err = profile.SaveTweet(tweet)
if err != nil {
2022-03-06 17:07:05 -08:00
die(fmt.Sprintf("Error saving tweet: %s", err.Error()), false, 4)
2021-08-02 14:46:06 -07:00
}
happy_exit("Saved the tweet")
2021-08-02 14:46:06 -07:00
}
2021-08-22 17:55:21 -07:00
/**
* Scrape a tweet and all associated info, and save it in the database.
*
* args:
* - tweet_url: e.g., "https://twitter.com/michaelmalice/status/1395882872729477131"
*/
func fetch_tweet_conversation(tweet_identifier string) {
tweet_id, err := extract_id_from(tweet_identifier)
2021-08-22 17:55:21 -07:00
if err != nil {
die(err.Error(), false, -1)
}
trove, err := scraper.GetTweetFull(tweet_id)
2021-08-22 17:55:21 -07:00
if err != nil {
die(err.Error(), false, -1)
}
profile.SaveTweetTrove(trove)
2021-08-22 17:55:21 -07:00
happy_exit(fmt.Sprintf("Saved %d tweets and %d users", len(trove.Tweets), len(trove.Users)))
2021-08-22 17:55:21 -07:00
}
2021-08-19 12:54:08 -07:00
/**
* Scrape a user feed and get a big blob of tweets and retweets. Get 50 tweets.
*
* args:
* - handle: the user handle to get
*/
func fetch_user_feed(handle string, how_many int) {
2021-08-19 12:54:08 -07:00
user, err := profile.GetUserByHandle(scraper.UserHandle(handle))
if err != nil {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
2021-08-19 12:54:08 -07:00
}
trove, err := scraper.GetUserFeedGraphqlFor(user.ID, how_many)
2021-08-19 12:54:08 -07:00
if err != nil {
die(fmt.Sprintf("Error scraping feed: %s\n %s", handle, err.Error()), false, -2)
2021-08-19 12:54:08 -07:00
}
profile.SaveTweetTrove(trove)
2021-08-19 12:54:08 -07:00
happy_exit(fmt.Sprintf("Saved %d tweets, %d retweets and %d users", len(trove.Tweets), len(trove.Retweets), len(trove.Users)))
2021-08-19 12:54:08 -07:00
}
func download_tweet_content(tweet_identifier string) {
tweet_id, err := extract_id_from(tweet_identifier)
if err != nil {
die(err.Error(), false, -1)
}
tweet, err := profile.GetTweetById(tweet_id)
if err != nil {
2021-11-06 14:50:39 -07:00
panic(fmt.Sprintf("Couldn't get tweet (ID %d) from database: %s", tweet_id, err.Error()))
}
err = profile.DownloadTweetContentFor(&tweet)
if err != nil {
panic("Error getting content: " + err.Error())
}
}
func download_user_content(handle scraper.UserHandle) {
user, err := profile.GetUserByHandle(handle)
if err != nil {
panic("Couldn't get the user from database: " + err.Error())
}
err = profile.DownloadUserContentFor(&user)
if err != nil {
panic("Error getting content: " + err.Error())
}
}
2021-11-06 14:50:39 -07:00
func search(query string) {
trove, err := scraper.Search(query, 1000)
2021-11-06 14:50:39 -07:00
if err != nil {
2022-03-06 17:07:05 -08:00
die(fmt.Sprintf("Error scraping search results: %s", err.Error()), false, -100)
2021-11-06 14:50:39 -07:00
}
profile.SaveTweetTrove(trove)
2021-11-06 14:50:39 -07:00
happy_exit(fmt.Sprintf("Saved %d tweets and %d users", len(trove.Tweets), len(trove.Users)))
2021-11-06 14:50:39 -07:00
}
func follow_user(handle string, is_followed bool) {
user, err := profile.GetUserByHandle(scraper.UserHandle(handle))
if err != nil {
panic("Couldn't get the user from database: " + err.Error())
}
profile.SetUserFollowed(&user, is_followed)
if is_followed {
happy_exit("Followed user: " + handle)
} else {
happy_exit("Unfollowed user: " + handle)
}
}
2022-02-28 16:06:58 -08:00
func list_followed() {
for _, handle := range profile.GetAllFollowedUsers() {
fmt.Println(handle)
}
}