stash/pkg/scraper/json.go
SmallCoccinelle e513b6ffa5
Cache and reuse the scraper HTTP client (#1855)
* Add Cookies directly to the request

Rather than maintaining a cookie jar on a one-shot HTTP client, maintain
the jar ourselves: make a new jar, then use it to select the right
cookies.

The cookies are set on the request rather than on the client. This will
retain the current behavior as we are always throwing the client away
after each use.

This patch enables the lifting of the http client as well over time.

* Introduce a cached scraper HTTP client

The scraper cache is augmented with an *http.Client. These are safe for
concurrent use, so the pointer can safely be passed around. Push this
into scraper configurations where applicable, next to the txnManagers.

When we issue a loadUrl request, do so on the cached *http.Client,
which will reuse existing idle connections in the client if any are
present.

* Set MaxIdleConnsPerHost. Closes #1850

We allow for up to 8 idle connections to a single host. This should
make concurrent operation toward the same host reuse connections, even
for sizeable concurrency.

The number isn't bumped excessively high. We should probably limit
concurrency toward a single site anyway, since we'll be able to overrun
a site with queries quite easily if we have many concurrent goroutines
issuing requests at the same time.

* Reinstate driverOptions / useCDP check

Use DeMorgan's laws to invert the logic and exit early. Fixes tests
breaking.

* Documentation fixup.

* Use the scraper http.Client when fetching images

Fold image fetchers onto the cached scraper http.Client as well. This
makes the scraper have a single http.Client cache for all its
operations.

Thread the client upwards to the relevant attachment points: either the
cache, or a stash_box instance, which is extended to include a pointer
to the client.

Style roughly follows that of txnManagers.

* Use the same http Client as the GraphQL client use

Rather than using http.DefaultClient, use the same client as the
GraphQL client use in the stash_box subsystem. This localizes the
client used in the subsystem into the constructing New.. call.

* Hoist HTTP client construction

Create a function for initializaing the HTTP Client we use. While here
hoist magic numbers into constants. Introduce a proper static redirect
error and use it in the client code as well.

* Reinstate printCookies

This is a debugging function, and it might still come in handy in the
future at some point.

* Nitpick comment.

* Minor tidy

Co-authored-by: WithoutPants <53250216+WithoutPants@users.noreply.github.com>
2021-10-20 16:12:24 +11:00

293 lines
7.2 KiB
Go

package scraper
import (
"context"
"errors"
"io"
"net/http"
"net/url"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/tidwall/gjson"
)
type jsonScraper struct {
scraper scraperTypeConfig
config config
globalConfig GlobalConfig
client *http.Client
txnManager models.TransactionManager
}
func newJsonScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, config config, globalConfig GlobalConfig) *jsonScraper {
return &jsonScraper{
scraper: scraper,
config: config,
client: client,
globalConfig: globalConfig,
txnManager: txnManager,
}
}
func (s *jsonScraper) getJsonScraper() *mappedScraper {
return s.config.JsonScrapers[s.scraper.Scraper]
}
func (s *jsonScraper) scrapeURL(ctx context.Context, url string) (string, *mappedScraper, error) {
scraper := s.getJsonScraper()
if scraper == nil {
return "", nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(ctx, url)
if err != nil {
return "", nil, err
}
return doc, scraper, nil
}
func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) {
r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig)
if err != nil {
return "", err
}
logger.Infof("loadURL (%s)\n", url)
doc, err := io.ReadAll(r)
if err != nil {
return "", err
}
docStr := string(doc)
if !gjson.Valid(docStr) {
return "", errors.New("not valid json")
}
if err == nil && s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML {
logger.Infof("loadURL (%s) response: \n%s", url, docStr)
}
return docStr, err
}
func (s *jsonScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for performer by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapePerformer(q)
}
func (s *jsonScraper) scrapeSceneByURL(url string) (*models.ScrapedScene, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for scene by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeScene(q)
}
func (s *jsonScraper) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for gallery by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *jsonScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error) {
u := replaceURL(url, s.scraper) // allow a URL Replace for movie by URL queries
doc, scraper, err := s.scrapeURL(context.TODO(), u)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeMovie(q)
}
func (s *jsonScraper) scrapePerformersByName(name string) ([]*models.ScrapedPerformer, error) {
scraper := s.getJsonScraper()
if scraper == nil {
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
const placeholder = "{}"
// replace the placeholder string with the URL-escaped name
escapedName := url.QueryEscape(name)
url := s.scraper.QueryURL
url = strings.ReplaceAll(url, placeholder, escapedName)
doc, err := s.loadURL(context.TODO(), url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapePerformers(q)
}
func (s *jsonScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
return nil, errors.New("scrapePerformerByFragment not supported for json scraper")
}
func (s *jsonScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
scraper := s.getJsonScraper()
if scraper == nil {
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
const placeholder = "{}"
// replace the placeholder string with the URL-escaped name
escapedName := url.QueryEscape(name)
url := s.scraper.QueryURL
url = strings.ReplaceAll(url, placeholder, escapedName)
doc, err := s.loadURL(context.TODO(), url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeScenes(q)
}
func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
// construct the URL
queryURL := queryURLParametersFromScene(scene)
if s.scraper.QueryURLReplacements != nil {
queryURL.applyReplacements(s.scraper.QueryURLReplacements)
}
url := queryURL.constructURL(s.scraper.QueryURL)
scraper := s.getJsonScraper()
if scraper == nil {
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeScene(q)
}
func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
// construct the URL
queryURL := queryURLParametersFromScrapedScene(scene)
if s.scraper.QueryURLReplacements != nil {
queryURL.applyReplacements(s.scraper.QueryURLReplacements)
}
url := queryURL.constructURL(s.scraper.QueryURL)
scraper := s.getJsonScraper()
if scraper == nil {
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeScene(q)
}
func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
// construct the URL
queryURL := queryURLParametersFromGallery(gallery)
if s.scraper.QueryURLReplacements != nil {
queryURL.applyReplacements(s.scraper.QueryURLReplacements)
}
url := queryURL.constructURL(s.scraper.QueryURL)
scraper := s.getJsonScraper()
if scraper == nil {
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
}
doc, err := s.loadURL(context.TODO(), url)
if err != nil {
return nil, err
}
q := s.getJsonQuery(doc)
return scraper.scrapeGallery(q)
}
func (s *jsonScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByFragment not supported for json scraper")
}
func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
return &jsonQuery{
doc: doc,
scraper: s,
}
}
type jsonQuery struct {
doc string
scraper *jsonScraper
}
func (q *jsonQuery) runQuery(selector string) []string {
value := gjson.Get(q.doc, selector)
if !value.Exists() {
logger.Warnf("Could not find json path '%s' in json object", selector)
return nil
}
var ret []string
if value.IsArray() {
value.ForEach(func(k, v gjson.Result) bool {
ret = append(ret, v.String())
return true
})
} else {
ret = append(ret, value.String())
}
return ret
}
func (q *jsonQuery) subScrape(value string) mappedQuery {
doc, err := q.scraper.loadURL(context.TODO(), value)
if err != nil {
logger.Warnf("Error getting URL '%s' for sub-scraper: %s", value, err.Error())
return nil
}
return q.scraper.getJsonQuery(doc)
}