mirror of
https://codeberg.org/scip/kleingebaeck.git
synced 2025-12-16 20:11:01 +01:00
Enhancement/http (#32)
* added HTTP debug logging using `-d` or `DEBUGHTTP=1` (headers only)
This commit is contained in:
66
http.go
Normal file
66
http.go
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
/*
|
||||||
|
Copyright © 2023-2024 Thomas von Dein
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// FIXME: we could also incorporate
|
||||||
|
// https://github.com/kdkumawat/golang/blob/main/http-retry/http/retry-client.go
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"math/rand"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httputil"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type loggingTransport struct{}
|
||||||
|
|
||||||
|
var letters = []rune("ABCDEF0123456789")
|
||||||
|
|
||||||
|
func getid() string {
|
||||||
|
b := make([]rune, 8)
|
||||||
|
for i := range b {
|
||||||
|
b[i] = letters[rand.Intn(len(letters))]
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *loggingTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||||
|
resp, err := http.DefaultTransport.RoundTrip(req)
|
||||||
|
|
||||||
|
// just requred for debugging
|
||||||
|
id := getid()
|
||||||
|
slog.Debug("REQUEST", "id", id, "uri", req.URL, "host", req.Host)
|
||||||
|
slog.Debug("RESPONSE", "id", id, "status", resp.StatusCode, "contentlength", resp.ContentLength)
|
||||||
|
|
||||||
|
if len(os.Getenv("DEBUGHTTP")) > 0 {
|
||||||
|
fmt.Println("DEBUGHTTP Request ===>")
|
||||||
|
bytes, _ := httputil.DumpRequestOut(req, true)
|
||||||
|
fmt.Printf("%s\n", bytes)
|
||||||
|
|
||||||
|
fmt.Println("<=== DEBUGHTTP Response")
|
||||||
|
for header, value := range resp.Header {
|
||||||
|
fmt.Printf("%s: %s\n", header, value)
|
||||||
|
}
|
||||||
|
fmt.Printf("Status: %s %s\nContent-Length: %d\n\n\n", resp.Proto, resp.Status, resp.ContentLength)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp, err
|
||||||
|
}
|
||||||
8
main.go
8
main.go
@@ -22,6 +22,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
|
|
||||||
@@ -111,17 +112,20 @@ func Main(w io.Writer) int {
|
|||||||
return Die(err)
|
return Die(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// used for all HTTP requests
|
||||||
|
client := &http.Client{Transport: &loggingTransport{}}
|
||||||
|
|
||||||
if len(conf.Adlinks) >= 1 {
|
if len(conf.Adlinks) >= 1 {
|
||||||
// directly backup ad listing[s]
|
// directly backup ad listing[s]
|
||||||
for _, uri := range conf.Adlinks {
|
for _, uri := range conf.Adlinks {
|
||||||
err := Scrape(conf, uri)
|
err := ScrapeAd(conf, uri, client)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Die(err)
|
return Die(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if conf.User > 0 {
|
} else if conf.User > 0 {
|
||||||
// backup all ads of the given user (via config or cmdline)
|
// backup all ads of the given user (via config or cmdline)
|
||||||
err := Start(conf)
|
err := ScrapeUser(conf, client)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Die(err)
|
return Die(err)
|
||||||
}
|
}
|
||||||
|
|||||||
25
scrape.go
25
scrape.go
@@ -56,8 +56,7 @@ func Get(uri string, client *http.Client) (io.ReadCloser, error) {
|
|||||||
|
|
||||||
// extract links from all ad listing pages (that is: use pagination)
|
// extract links from all ad listing pages (that is: use pagination)
|
||||||
// and scrape every page
|
// and scrape every page
|
||||||
func Start(conf *Config) error {
|
func ScrapeUser(conf *Config, client *http.Client) error {
|
||||||
client := &http.Client{}
|
|
||||||
adlinks := []string{}
|
adlinks := []string{}
|
||||||
|
|
||||||
baseuri := fmt.Sprintf("%s%s?userId=%d", Baseuri, Listuri, conf.User)
|
baseuri := fmt.Sprintf("%s%s?userId=%d", Baseuri, Listuri, conf.User)
|
||||||
@@ -96,7 +95,7 @@ func Start(conf *Config) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for i, adlink := range adlinks {
|
for i, adlink := range adlinks {
|
||||||
err := Scrape(conf, Baseuri+adlink)
|
err := ScrapeAd(conf, Baseuri+adlink, client)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -110,8 +109,7 @@ func Start(conf *Config) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// scrape an ad. uri is the full uri of the ad, dir is the basedir
|
// scrape an ad. uri is the full uri of the ad, dir is the basedir
|
||||||
func Scrape(c *Config, uri string) error {
|
func ScrapeAd(c *Config, uri string, client *http.Client) error {
|
||||||
client := &http.Client{}
|
|
||||||
ad := &Ad{}
|
ad := &Ad{}
|
||||||
|
|
||||||
// extract slug and id from uri
|
// extract slug and id from uri
|
||||||
@@ -155,10 +153,10 @@ func Scrape(c *Config, uri string) error {
|
|||||||
|
|
||||||
c.IncrAds()
|
c.IncrAds()
|
||||||
|
|
||||||
return ScrapeImages(c, ad, addir)
|
return ScrapeImages(c, ad, addir, client)
|
||||||
}
|
}
|
||||||
|
|
||||||
func ScrapeImages(c *Config, ad *Ad, addir string) error {
|
func ScrapeImages(c *Config, ad *Ad, addir string, client *http.Client) error {
|
||||||
// fetch images
|
// fetch images
|
||||||
img := 1
|
img := 1
|
||||||
g := new(errgroup.Group)
|
g := new(errgroup.Group)
|
||||||
@@ -167,7 +165,7 @@ func ScrapeImages(c *Config, ad *Ad, addir string) error {
|
|||||||
imguri := imguri
|
imguri := imguri
|
||||||
file := filepath.Join(c.Outdir, addir, fmt.Sprintf("%d.jpg", img))
|
file := filepath.Join(c.Outdir, addir, fmt.Sprintf("%d.jpg", img))
|
||||||
g.Go(func() error {
|
g.Go(func() error {
|
||||||
err := Getimage(imguri, file)
|
err := Getimage(imguri, file, client)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -188,9 +186,16 @@ func ScrapeImages(c *Config, ad *Ad, addir string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// fetch an image
|
// fetch an image
|
||||||
func Getimage(uri, fileName string) error {
|
func Getimage(uri, fileName string, client *http.Client) error {
|
||||||
slog.Debug("fetching ad image", "uri", uri)
|
slog.Debug("fetching ad image", "uri", uri)
|
||||||
response, err := http.Get(uri)
|
req, err := http.NewRequest("GET", uri, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("User-Agent", Useragent)
|
||||||
|
|
||||||
|
response, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user