Move request building within loop to avoid shallow copy reuse between routines

This commit is contained in:
Alex D. 2023-06-08 13:34:14 +00:00
parent 6489774ca1
commit d8c691af3c
Signed by: caskd
GPG Key ID: F92BA85F61F4C173
2 changed files with 32 additions and 32 deletions

View File

@ -122,24 +122,24 @@ func Query(uname string, api_key string, tags []string, j_max Jobs, tout time.Du
res_chan := make(chan result)
var r_arr []result
q := &http.Request{
URL: &url.URL{
Scheme: "https",
Host: "e621.net",
Path: "/posts.json",
},
Header: make(http.Header),
}
q.Header.Set("user-agent", "goboru/1.0 (caskd@redxen.eu, https://git.redxen.eu/caskd/goboru)")
uq := q.URL.Query()
uq.Set("tags", strings.Join(tags, " "))
if uname != "" && api_key != "" {
uq.Set("login", uname)
uq.Set("api_key", api_key)
}
q.URL.RawQuery = uq.Encode()
for pid, rpid, ppid := Page(0), Page(0), Page(0); ; {
q := &http.Request{
URL: &url.URL{
Scheme: "https",
Host: "e621.net",
Path: "/posts.json",
},
Header: make(http.Header),
}
q.Header.Set("user-agent", "goboru/1.0 (caskd@redxen.eu, https://git.redxen.eu/caskd/goboru)")
uq := q.URL.Query()
uq.Set("tags", strings.Join(tags, " "))
if uname != "" && api_key != "" {
uq.Set("login", uname)
uq.Set("api_key", api_key)
}
q.URL.RawQuery = uq.Encode()
go run_job(q, tout, pid, res_chan)
pid++

View File

@ -82,22 +82,22 @@ func Query(tags Tags, j_max Jobs) (mr []Media, err error) {
res_chan := make(chan result)
var r_arr []result
q := &http.Request{
URL: &url.URL{
Scheme: "https",
Host: "gelbooru.com",
Path: "/index.php",
},
}
qu := q.URL.Query()
qu.Set("page", "dapi")
qu.Set("s", "post")
qu.Set("q", "index")
qu.Set("json", "1")
qu.Set("tags", strings.Join(tags, " "))
q.URL.RawQuery = qu.Encode()
for pid, rpid, ppid := uint(0), uint(0), uint(0); ; {
q := &http.Request{
URL: &url.URL{
Scheme: "https",
Host: "gelbooru.com",
Path: "/index.php",
},
}
qu := q.URL.Query()
qu.Set("page", "dapi")
qu.Set("s", "post")
qu.Set("q", "index")
qu.Set("json", "1")
qu.Set("tags", strings.Join(tags, " "))
q.URL.RawQuery = qu.Encode()
if pid <= 200 { // API only allows to fetch up to 200 pages per query
go run_job(q, pid, res_chan)
pid++