diff --git a/internal/webserver/handler_search.go b/internal/webserver/handler_search.go index 941534c..3731d33 100644 --- a/internal/webserver/handler_search.go +++ b/internal/webserver/handler_search.go @@ -111,12 +111,8 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) { } feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) - if err != nil { - if errors.Is(err, persistence.ErrEndOfFeed) { - // TODO - } else { - panic(err) - } + if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { + panic(err) } data := NewSearchPageData() diff --git a/internal/webserver/handler_timeline.go b/internal/webserver/handler_timeline.go index d20e234..9b9d302 100644 --- a/internal/webserver/handler_timeline.go +++ b/internal/webserver/handler_timeline.go @@ -18,12 +18,8 @@ func (app *Application) Timeline(w http.ResponseWriter, r *http.Request) { } feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) - if err != nil { - if errors.Is(err, persistence.ErrEndOfFeed) { - // TODO - } else { - panic(err) - } + if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { + panic(err) } if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE { diff --git a/internal/webserver/handler_user_feed.go b/internal/webserver/handler_user_feed.go index 4f18197..3a671ca 100644 --- a/internal/webserver/handler_user_feed.go +++ b/internal/webserver/handler_user_feed.go @@ -83,12 +83,8 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { } feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) - if err != nil { - if errors.Is(err, persistence.ErrEndOfFeed) { - // TODO - } else { - panic(err) - } + if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { + panic(err) } feed.Users[user.ID] = user @@ -112,6 +108,24 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { } } +type FollowsData struct { + Title string + HeaderUserID scraper.UserID + UserIDs []scraper.UserID +} + +func NewFollowsData(users []scraper.User) (FollowsData, scraper.TweetTrove) { + trove := scraper.NewTweetTrove() + data := FollowsData{ + UserIDs: []scraper.UserID{}, + } + for _, u := range users { + trove.Users[u.ID] = u + data.UserIDs = append(data.UserIDs, u.ID) + } + return data, trove +} + func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, user scraper.User) { if r.URL.Query().Has("scrape") { if app.IsScrapingDisabled { @@ -131,11 +145,11 @@ func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, us go app.Profile.SaveTweetTrove(trove, true) } - data, trove := NewListData(app.Profile.GetFollowees(user.ID)) + data, trove := NewFollowsData(app.Profile.GetFollowees(user.ID)) trove.Users[user.ID] = user // Not loaded otherwise; needed to profile image in the login button on the sidebar data.Title = fmt.Sprintf("Followed by @%s", user.Handle) data.HeaderUserID = user.ID - app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data) + app.buffered_render_page(w, "tpl/follows.tpl", PageGlobalData{TweetTrove: trove}, data) } func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, user scraper.User) { @@ -157,9 +171,9 @@ func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, us go app.Profile.SaveTweetTrove(trove, true) } - data, trove := NewListData(app.Profile.GetFollowers(user.ID)) + data, trove := NewFollowsData(app.Profile.GetFollowers(user.ID)) trove.Users[user.ID] = user data.Title = fmt.Sprintf("@%s's followers", user.Handle) data.HeaderUserID = user.ID - app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data) + app.buffered_render_page(w, "tpl/follows.tpl", PageGlobalData{TweetTrove: trove}, data) } diff --git a/internal/webserver/tpl/follows.tpl b/internal/webserver/tpl/follows.tpl new file mode 100644 index 0000000..7e22f0f --- /dev/null +++ b/internal/webserver/tpl/follows.tpl @@ -0,0 +1,11 @@ +{{define "title"}}{{.Title}}{{end}} + +{{define "main"}} + {{template "user-header" (user .HeaderUserID)}} + +