2023-12-14 19:00:04 +01:00
|
|
|
/*
|
2024-01-12 13:29:59 +01:00
|
|
|
Copyright © 2023-2024 Thomas von Dein
|
2023-12-14 19:00:04 +01:00
|
|
|
|
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
|
|
import (
|
2024-01-21 12:38:14 +01:00
|
|
|
"bytes"
|
2023-12-14 19:00:04 +01:00
|
|
|
"fmt"
|
2023-12-15 14:50:40 +01:00
|
|
|
"log/slog"
|
2023-12-18 09:21:26 +01:00
|
|
|
"path/filepath"
|
2024-01-25 19:03:34 +01:00
|
|
|
"strconv"
|
2023-12-14 19:00:04 +01:00
|
|
|
"strings"
|
2024-01-24 18:35:06 +01:00
|
|
|
"time"
|
2023-12-14 19:00:04 +01:00
|
|
|
|
|
|
|
|
"astuart.co/goq"
|
2023-12-19 18:23:41 +01:00
|
|
|
"golang.org/x/sync/errgroup"
|
2023-12-14 19:00:04 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
// extract links from all ad listing pages (that is: use pagination)
|
|
|
|
|
// and scrape every page
|
2024-01-16 19:27:46 +01:00
|
|
|
func ScrapeUser(fetch *Fetcher) error {
|
2023-12-15 14:50:40 +01:00
|
|
|
adlinks := []string{}
|
2023-12-14 19:00:04 +01:00
|
|
|
|
2024-01-16 19:27:46 +01:00
|
|
|
baseuri := fmt.Sprintf("%s%s?userId=%d", Baseuri, Listuri, fetch.Config.User)
|
2023-12-14 19:00:04 +01:00
|
|
|
page := 1
|
|
|
|
|
uri := baseuri
|
|
|
|
|
|
2024-01-16 19:27:46 +01:00
|
|
|
slog.Info("fetching ad pages", "user", fetch.Config.User)
|
2023-12-15 14:50:40 +01:00
|
|
|
|
2023-12-14 19:00:04 +01:00
|
|
|
for {
|
|
|
|
|
var index Index
|
2024-01-25 19:03:34 +01:00
|
|
|
|
2023-12-15 14:50:40 +01:00
|
|
|
slog.Debug("fetching page", "uri", uri)
|
2024-01-25 19:03:34 +01:00
|
|
|
|
2024-01-16 19:27:46 +01:00
|
|
|
body, err := fetch.Get(uri)
|
2023-12-14 19:00:04 +01:00
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
defer body.Close()
|
|
|
|
|
|
|
|
|
|
err = goq.NewDecoder(body).Decode(&index)
|
|
|
|
|
if err != nil {
|
2024-01-25 19:03:34 +01:00
|
|
|
return fmt.Errorf("failed to goquery decode HTML index body: %w", err)
|
2023-12-14 19:00:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if len(index.Links) == 0 {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-15 14:50:40 +01:00
|
|
|
slog.Debug("extracted ad links", "count", len(index.Links))
|
|
|
|
|
|
2023-12-14 19:00:04 +01:00
|
|
|
for _, href := range index.Links {
|
2023-12-15 14:50:40 +01:00
|
|
|
adlinks = append(adlinks, href)
|
|
|
|
|
slog.Debug("ad link", "href", href)
|
2023-12-14 19:00:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
page++
|
2024-01-25 19:03:34 +01:00
|
|
|
uri = baseuri + "&pageNum=" + strconv.Itoa(page)
|
2023-12-14 19:00:04 +01:00
|
|
|
}
|
|
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
for index, adlink := range adlinks {
|
2024-01-16 19:27:46 +01:00
|
|
|
err := ScrapeAd(fetch, Baseuri+adlink)
|
2023-12-14 19:00:04 +01:00
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
2023-12-19 18:23:41 +01:00
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
if fetch.Config.Limit > 0 && index == fetch.Config.Limit-1 {
|
2023-12-19 18:23:41 +01:00
|
|
|
break
|
|
|
|
|
}
|
2023-12-14 19:00:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-15 14:50:40 +01:00
|
|
|
// scrape an ad. uri is the full uri of the ad, dir is the basedir
|
2024-01-16 19:27:46 +01:00
|
|
|
func ScrapeAd(fetch *Fetcher, uri string) error {
|
2024-01-25 19:03:34 +01:00
|
|
|
advertisement := &Ad{}
|
2023-12-14 19:00:04 +01:00
|
|
|
|
2023-12-15 14:50:40 +01:00
|
|
|
// extract slug and id from uri
|
|
|
|
|
uriparts := strings.Split(uri, "/")
|
2024-01-25 19:03:34 +01:00
|
|
|
if len(uriparts) < SlugURIPartNum {
|
|
|
|
|
return fmt.Errorf("invalid uri: %s", uri)
|
2023-12-15 14:50:40 +01:00
|
|
|
}
|
2024-01-25 19:03:34 +01:00
|
|
|
|
|
|
|
|
advertisement.Slug = uriparts[4]
|
|
|
|
|
advertisement.ID = uriparts[5]
|
2023-12-15 14:50:40 +01:00
|
|
|
|
|
|
|
|
// get the ad
|
|
|
|
|
slog.Debug("fetching ad page", "uri", uri)
|
2024-01-25 19:03:34 +01:00
|
|
|
|
2024-01-16 19:27:46 +01:00
|
|
|
body, err := fetch.Get(uri)
|
2023-12-14 19:00:04 +01:00
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
defer body.Close()
|
|
|
|
|
|
2023-12-15 14:50:40 +01:00
|
|
|
// extract ad contents with goquery/goq
|
2024-01-25 19:03:34 +01:00
|
|
|
err = goq.NewDecoder(body).Decode(&advertisement)
|
2023-12-14 19:00:04 +01:00
|
|
|
if err != nil {
|
2024-01-25 19:03:34 +01:00
|
|
|
return fmt.Errorf("failed to goquery decode HTML ad body: %w", err)
|
2023-12-14 19:00:04 +01:00
|
|
|
}
|
2024-01-12 14:11:02 +01:00
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
if len(advertisement.CategoryTree) > 0 {
|
|
|
|
|
advertisement.Category = strings.Join(advertisement.CategoryTree, " => ")
|
2023-12-16 20:32:10 +01:00
|
|
|
}
|
2024-01-01 16:24:07 +01:00
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
if advertisement.Incomplete() {
|
|
|
|
|
slog.Debug("got ad", "ad", advertisement)
|
|
|
|
|
|
|
|
|
|
return fmt.Errorf("could not extract ad data from page, got empty struct")
|
2024-01-01 16:24:07 +01:00
|
|
|
}
|
|
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
advertisement.CalculateExpire()
|
2024-01-19 14:41:47 +01:00
|
|
|
|
2024-02-10 14:06:06 +01:00
|
|
|
proceed := CheckAdVisited(fetch.Config, advertisement.Slug)
|
|
|
|
|
if !proceed {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2023-12-17 17:32:05 +01:00
|
|
|
// write listing
|
2024-01-25 19:03:34 +01:00
|
|
|
addir, err := WriteAd(fetch.Config, advertisement)
|
2023-12-15 14:50:40 +01:00
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
2023-12-14 19:00:04 +01:00
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
slog.Debug("extracted ad listing", "ad", advertisement)
|
2024-01-21 12:38:14 +01:00
|
|
|
|
2024-01-16 19:27:46 +01:00
|
|
|
fetch.Config.IncrAds()
|
2023-12-19 18:23:41 +01:00
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
return ScrapeImages(fetch, advertisement, addir)
|
2023-12-16 20:32:10 +01:00
|
|
|
}
|
|
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
func ScrapeImages(fetch *Fetcher, advertisement *Ad, addir string) error {
|
2023-12-15 14:50:40 +01:00
|
|
|
// fetch images
|
2023-12-14 19:00:04 +01:00
|
|
|
img := 1
|
2024-01-21 12:38:14 +01:00
|
|
|
adpath := filepath.Join(fetch.Config.Outdir, addir)
|
|
|
|
|
|
|
|
|
|
// scan existing images, if any
|
2024-01-22 14:30:54 +01:00
|
|
|
cache, err := ReadImages(adpath, fetch.Config.ForceDownload)
|
2024-01-21 12:38:14 +01:00
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
egroup := new(errgroup.Group)
|
2023-12-16 20:32:10 +01:00
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
for _, imguri := range advertisement.Images {
|
2023-12-19 18:23:41 +01:00
|
|
|
imguri := imguri
|
2024-01-21 12:38:14 +01:00
|
|
|
file := filepath.Join(adpath, fmt.Sprintf("%d.jpg", img))
|
2024-01-25 19:03:34 +01:00
|
|
|
|
|
|
|
|
egroup.Go(func() error {
|
2024-01-24 18:35:06 +01:00
|
|
|
// wait a little
|
|
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
throttle := GetThrottleTime()
|
|
|
|
|
time.Sleep(throttle)
|
2024-01-24 18:35:06 +01:00
|
|
|
|
2024-01-16 19:27:46 +01:00
|
|
|
body, err := fetch.Getimage(imguri)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
2024-01-21 12:38:14 +01:00
|
|
|
buf := new(bytes.Buffer)
|
2024-01-27 17:23:09 +01:00
|
|
|
|
2024-01-22 14:51:26 +01:00
|
|
|
_, err = buf.ReadFrom(body)
|
|
|
|
|
if err != nil {
|
2024-01-25 19:03:34 +01:00
|
|
|
return fmt.Errorf("failed to read from image buffer: %w", err)
|
2024-01-22 14:51:26 +01:00
|
|
|
}
|
2024-01-21 12:38:14 +01:00
|
|
|
|
2024-01-27 17:23:09 +01:00
|
|
|
reader := bytes.NewReader(buf.Bytes())
|
2024-01-21 12:38:14 +01:00
|
|
|
|
2024-01-27 17:23:09 +01:00
|
|
|
image := NewImage(reader, file, imguri)
|
2024-01-21 12:38:14 +01:00
|
|
|
err = image.CalcHash()
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
2024-01-22 14:30:54 +01:00
|
|
|
if !fetch.Config.ForceDownload {
|
|
|
|
|
if image.SimilarExists(cache) {
|
2024-01-25 19:03:34 +01:00
|
|
|
slog.Debug("similar image exists, not written", "uri", image.URI)
|
|
|
|
|
|
2024-01-22 14:30:54 +01:00
|
|
|
return nil
|
|
|
|
|
}
|
2024-01-21 12:38:14 +01:00
|
|
|
}
|
|
|
|
|
|
2024-01-27 17:26:17 +01:00
|
|
|
_, err = reader.Seek(0, 0)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return fmt.Errorf("failed to seek(0) on image reader: %w", err)
|
|
|
|
|
}
|
|
|
|
|
|
2024-01-27 17:23:09 +01:00
|
|
|
err = WriteImage(file, reader)
|
2023-12-16 20:32:10 +01:00
|
|
|
if err != nil {
|
2023-12-19 18:23:41 +01:00
|
|
|
return err
|
2023-12-16 20:32:10 +01:00
|
|
|
}
|
2023-12-19 18:23:41 +01:00
|
|
|
|
2024-01-27 17:23:09 +01:00
|
|
|
slog.Debug("wrote image", "image", image, "size", buf.Len(), "throttle", throttle)
|
2024-01-25 19:03:34 +01:00
|
|
|
|
2023-12-19 18:23:41 +01:00
|
|
|
return nil
|
|
|
|
|
})
|
2023-12-14 19:00:04 +01:00
|
|
|
img++
|
|
|
|
|
}
|
|
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
if err := egroup.Wait(); err != nil {
|
|
|
|
|
return fmt.Errorf("failed to finalize error waitgroup: %w", err)
|
2023-12-16 20:32:10 +01:00
|
|
|
}
|
|
|
|
|
|
2024-01-25 19:03:34 +01:00
|
|
|
fetch.Config.IncrImgs(len(advertisement.Images))
|
2023-12-14 19:00:04 +01:00
|
|
|
|
|
|
|
|
return nil
|
|
|
|
|
}
|