Create DB queries for media attachments, refactor this out of the tweet query into separate functions
This commit is contained in:
parent
81392996bb
commit
222e681836
59
persistence/media_queries.go
Normal file
59
persistence/media_queries.go
Normal file
@ -0,0 +1,59 @@
|
||||
package persistence
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
"offline_twitter/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Get the list of images for a tweet
|
||||
*/
|
||||
func (p Profile) GetImagesForTweet(t scraper.Tweet) (imgs []scraper.Image, err error) {
|
||||
stmt, err := p.DB.Prepare("select filename, is_downloaded from images where tweet_id=?")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer stmt.Close()
|
||||
rows, err := stmt.Query(t.ID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
var img scraper.Image
|
||||
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&img.Filename, &img.IsDownloaded)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
img.TweetID = t.ID
|
||||
imgs = append(imgs, img)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the list of videos for a tweet
|
||||
*/
|
||||
func (p Profile) GetVideosForTweet(t scraper.Tweet) (vids []scraper.Video, err error) {
|
||||
stmt, err := p.DB.Prepare("select filename, is_downloaded from videos where tweet_id=?")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer stmt.Close()
|
||||
rows, err := stmt.Query(t.ID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
var vid scraper.Video
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&vid.Filename, &vid.IsDownloaded)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
vid.TweetID = t.ID
|
||||
vids = append(vids, vid)
|
||||
}
|
||||
return
|
||||
}
|
@ -79,56 +79,6 @@ func (p Profile) IsTweetInDatabase(id scraper.TweetID) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (p Profile) attach_images(t *scraper.Tweet) error {
|
||||
println("Attaching images")
|
||||
stmt, err := p.DB.Prepare("select filename, is_downloaded from images where tweet_id = ?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer stmt.Close()
|
||||
rows, err := stmt.Query(t.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var filename string
|
||||
var is_downloaded bool
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&filename, &is_downloaded)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
new_img := scraper.Image{TweetID: t.ID, Filename: filename, IsDownloaded: is_downloaded}
|
||||
t.Images = append(t.Images, new_img)
|
||||
fmt.Printf("%v\n", t.Images)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p Profile) attach_videos(t *scraper.Tweet) error {
|
||||
println("Attaching videos")
|
||||
stmt, err := p.DB.Prepare("select filename, is_downloaded from videos where tweet_id = ?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer stmt.Close()
|
||||
rows, err := stmt.Query(t.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var filename string
|
||||
var is_downloaded bool
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&filename, &is_downloaded)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
new_video := scraper.Video{TweetID: t.ID, Filename: filename, IsDownloaded: is_downloaded}
|
||||
t.Videos = append(t.Videos, new_video)
|
||||
fmt.Printf("%v\n", t.Videos)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p Profile) attach_urls(t *scraper.Tweet) error {
|
||||
println("Attaching urls")
|
||||
stmt, err := p.DB.Prepare("select text from urls where tweet_id = ?")
|
||||
@ -187,14 +137,18 @@ func (p Profile) GetTweetById(id scraper.TweetID) (scraper.Tweet, error) {
|
||||
t.ID = scraper.TweetID(fmt.Sprint(tweet_id))
|
||||
t.UserID = scraper.UserID(fmt.Sprint(user_id))
|
||||
|
||||
err = p.attach_images(&t)
|
||||
imgs, err := p.GetImagesForTweet(t)
|
||||
if err != nil {
|
||||
return t, err
|
||||
}
|
||||
err = p.attach_videos(&t)
|
||||
t.Images = imgs
|
||||
|
||||
vids, err := p.GetVideosForTweet(t)
|
||||
if err != nil {
|
||||
return t, err
|
||||
}
|
||||
t.Videos = vids
|
||||
|
||||
err = p.attach_urls(&t)
|
||||
return t, err
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user