mirror of
https://github.com/stashapp/stash.git
synced 2025-12-06 16:34:02 +01:00
Scrape scene by name (#1712)
* Support scrape scene by name in configs * Initial scene querying * Add to manual
This commit is contained in:
parent
565064b441
commit
1a3a2f1f83
18 changed files with 786 additions and 192 deletions
|
|
@ -71,6 +71,14 @@ func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*mo
|
||||||
return manager.GetInstance().ScraperCache.ScrapePerformerURL(url)
|
return manager.GetInstance().ScraperCache.ScrapePerformerURL(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) {
|
||||||
|
if query == "" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(scraperID, query)
|
||||||
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||||
id, err := strconv.Atoi(scene.ID)
|
id, err := strconv.Atoi(scene.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -165,8 +173,10 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
|
||||||
singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID)
|
singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID)
|
||||||
} else if input.SceneInput != nil {
|
} else if input.SceneInput != nil {
|
||||||
singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput)
|
singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput)
|
||||||
|
} else if input.Query != nil {
|
||||||
|
return manager.GetInstance().ScraperCache.ScrapeSceneQuery(*source.ScraperID, *input.Query)
|
||||||
} else {
|
} else {
|
||||||
return nil, errors.New("not implemented")
|
err = errors.New("scene_id, scene_input or query must be set")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ type scraper interface {
|
||||||
scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error)
|
scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error)
|
||||||
scrapePerformerByURL(url string) (*models.ScrapedPerformer, error)
|
scrapePerformerByURL(url string) (*models.ScrapedPerformer, error)
|
||||||
|
|
||||||
|
scrapeScenesByName(name string) ([]*models.ScrapedScene, error)
|
||||||
scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error)
|
scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error)
|
||||||
scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error)
|
scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error)
|
||||||
scrapeSceneByURL(url string) (*models.ScrapedScene, error)
|
scrapeSceneByURL(url string) (*models.ScrapedScene, error)
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,12 @@ type config struct {
|
||||||
// Configuration for querying gallery by a Gallery fragment
|
// Configuration for querying gallery by a Gallery fragment
|
||||||
GalleryByFragment *scraperTypeConfig `yaml:"galleryByFragment"`
|
GalleryByFragment *scraperTypeConfig `yaml:"galleryByFragment"`
|
||||||
|
|
||||||
|
// Configuration for querying scenes by name
|
||||||
|
SceneByName *scraperTypeConfig `yaml:"sceneByName"`
|
||||||
|
|
||||||
|
// Configuration for querying scenes by query fragment
|
||||||
|
SceneByQueryFragment *scraperTypeConfig `yaml:"sceneByQueryFragment"`
|
||||||
|
|
||||||
// Configuration for querying a scene by a URL
|
// Configuration for querying a scene by a URL
|
||||||
SceneByURL []*scrapeByURLConfig `yaml:"sceneByURL"`
|
SceneByURL []*scrapeByURLConfig `yaml:"sceneByURL"`
|
||||||
|
|
||||||
|
|
@ -256,6 +262,9 @@ func (c config) toScraper() *models.Scraper {
|
||||||
if c.SceneByFragment != nil {
|
if c.SceneByFragment != nil {
|
||||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
|
||||||
}
|
}
|
||||||
|
if c.SceneByName != nil && c.SceneByQueryFragment != nil {
|
||||||
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName)
|
||||||
|
}
|
||||||
if len(c.SceneByURL) > 0 {
|
if len(c.SceneByURL) > 0 {
|
||||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
|
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
|
||||||
for _, v := range c.SceneByURL {
|
for _, v := range c.SceneByURL {
|
||||||
|
|
@ -353,7 +362,7 @@ func (c config) ScrapePerformerURL(url string, txnManager models.TransactionMana
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) supportsScenes() bool {
|
func (c config) supportsScenes() bool {
|
||||||
return c.SceneByFragment != nil || len(c.SceneByURL) > 0
|
return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) supportsGalleries() bool {
|
func (c config) supportsGalleries() bool {
|
||||||
|
|
@ -393,6 +402,15 @@ func (c config) matchesMovieURL(url string) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c config) ScrapeSceneQuery(name string, txnManager models.TransactionManager, globalConfig GlobalConfig) ([]*models.ScrapedScene, error) {
|
||||||
|
if c.SceneByName != nil {
|
||||||
|
s := getScraper(*c.SceneByName, txnManager, c, globalConfig)
|
||||||
|
return s.scrapeScenesByName(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (c config) ScrapeSceneByScene(scene *models.Scene, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) {
|
func (c config) ScrapeSceneByScene(scene *models.Scene, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) {
|
||||||
if c.SceneByFragment != nil {
|
if c.SceneByFragment != nil {
|
||||||
s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig)
|
s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig)
|
||||||
|
|
@ -403,9 +421,8 @@ func (c config) ScrapeSceneByScene(scene *models.Scene, txnManager models.Transa
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) ScrapeSceneByFragment(scene models.ScrapedSceneInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) {
|
func (c config) ScrapeSceneByFragment(scene models.ScrapedSceneInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) {
|
||||||
if c.SceneByFragment != nil {
|
if c.SceneByQueryFragment != nil {
|
||||||
// TODO - this should be sceneByQueryFragment
|
s := getScraper(*c.SceneByQueryFragment, txnManager, c, globalConfig)
|
||||||
s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig)
|
|
||||||
return s.scrapeSceneByFragment(scene)
|
return s.scrapeSceneByFragment(scene)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -431,7 +448,7 @@ func (c config) ScrapeSceneURL(url string, txnManager models.TransactionManager,
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) ScrapeGalleryByGallery(gallery *models.Gallery, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) {
|
func (c config) ScrapeGalleryByGallery(gallery *models.Gallery, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) {
|
||||||
if c.SceneByFragment != nil {
|
if c.GalleryByFragment != nil {
|
||||||
s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig)
|
s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig)
|
||||||
return s.scrapeGalleryByGallery(gallery)
|
return s.scrapeGalleryByGallery(gallery)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -143,6 +143,31 @@ func (s *jsonScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedP
|
||||||
return nil, errors.New("scrapePerformerByFragment not supported for json scraper")
|
return nil, errors.New("scrapePerformerByFragment not supported for json scraper")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *jsonScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
||||||
|
scraper := s.getJsonScraper()
|
||||||
|
|
||||||
|
if scraper == nil {
|
||||||
|
return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
|
}
|
||||||
|
|
||||||
|
const placeholder = "{}"
|
||||||
|
|
||||||
|
// replace the placeholder string with the URL-escaped name
|
||||||
|
escapedName := url.QueryEscape(name)
|
||||||
|
|
||||||
|
url := s.scraper.QueryURL
|
||||||
|
url = strings.Replace(url, placeholder, escapedName, -1)
|
||||||
|
|
||||||
|
doc, err := s.loadURL(url)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
q := s.getJsonQuery(doc)
|
||||||
|
return scraper.scrapeScenes(q)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScene(scene)
|
queryURL := queryURLParametersFromScene(scene)
|
||||||
|
|
@ -168,7 +193,27 @@ func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedSc
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
||||||
return nil, errors.New("scrapeSceneByFragment not supported for json scraper")
|
// construct the URL
|
||||||
|
queryURL := queryURLParametersFromScrapedScene(scene)
|
||||||
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
queryURL.applyReplacements(s.scraper.QueryURLReplacements)
|
||||||
|
}
|
||||||
|
url := queryURL.constructURL(s.scraper.QueryURL)
|
||||||
|
|
||||||
|
scraper := s.getJsonScraper()
|
||||||
|
|
||||||
|
if scraper == nil {
|
||||||
|
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
|
}
|
||||||
|
|
||||||
|
doc, err := s.loadURL(url)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
q := s.getJsonQuery(doc)
|
||||||
|
return scraper.scrapeScene(q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
|
|
|
||||||
|
|
@ -789,6 +789,100 @@ func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerform
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.ScrapedScene {
|
||||||
|
var ret models.ScrapedScene
|
||||||
|
|
||||||
|
sceneScraperConfig := s.Scene
|
||||||
|
|
||||||
|
scenePerformersMap := sceneScraperConfig.Performers
|
||||||
|
sceneTagsMap := sceneScraperConfig.Tags
|
||||||
|
sceneStudioMap := sceneScraperConfig.Studio
|
||||||
|
sceneMoviesMap := sceneScraperConfig.Movies
|
||||||
|
|
||||||
|
scenePerformerTagsMap := scenePerformersMap.Tags
|
||||||
|
|
||||||
|
r.apply(&ret)
|
||||||
|
|
||||||
|
// process performer tags once
|
||||||
|
var performerTagResults mappedResults
|
||||||
|
if scenePerformerTagsMap != nil {
|
||||||
|
performerTagResults = scenePerformerTagsMap.process(q, s.Common)
|
||||||
|
}
|
||||||
|
|
||||||
|
// now apply the performers and tags
|
||||||
|
if scenePerformersMap.mappedConfig != nil {
|
||||||
|
logger.Debug(`Processing scene performers:`)
|
||||||
|
performerResults := scenePerformersMap.process(q, s.Common)
|
||||||
|
|
||||||
|
for _, p := range performerResults {
|
||||||
|
performer := &models.ScrapedPerformer{}
|
||||||
|
p.apply(performer)
|
||||||
|
|
||||||
|
for _, p := range performerTagResults {
|
||||||
|
tag := &models.ScrapedTag{}
|
||||||
|
p.apply(tag)
|
||||||
|
ret.Tags = append(ret.Tags, tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
ret.Performers = append(ret.Performers, performer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if sceneTagsMap != nil {
|
||||||
|
logger.Debug(`Processing scene tags:`)
|
||||||
|
tagResults := sceneTagsMap.process(q, s.Common)
|
||||||
|
|
||||||
|
for _, p := range tagResults {
|
||||||
|
tag := &models.ScrapedTag{}
|
||||||
|
p.apply(tag)
|
||||||
|
ret.Tags = append(ret.Tags, tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if sceneStudioMap != nil {
|
||||||
|
logger.Debug(`Processing scene studio:`)
|
||||||
|
studioResults := sceneStudioMap.process(q, s.Common)
|
||||||
|
|
||||||
|
if len(studioResults) > 0 {
|
||||||
|
studio := &models.ScrapedStudio{}
|
||||||
|
studioResults[0].apply(studio)
|
||||||
|
ret.Studio = studio
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if sceneMoviesMap != nil {
|
||||||
|
logger.Debug(`Processing scene movies:`)
|
||||||
|
movieResults := sceneMoviesMap.process(q, s.Common)
|
||||||
|
|
||||||
|
for _, p := range movieResults {
|
||||||
|
movie := &models.ScrapedMovie{}
|
||||||
|
p.apply(movie)
|
||||||
|
ret.Movies = append(ret.Movies, movie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s mappedScraper) scrapeScenes(q mappedQuery) ([]*models.ScrapedScene, error) {
|
||||||
|
var ret []*models.ScrapedScene
|
||||||
|
|
||||||
|
sceneScraperConfig := s.Scene
|
||||||
|
sceneMap := sceneScraperConfig.mappedConfig
|
||||||
|
if sceneMap == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Debug(`Processing scenes:`)
|
||||||
|
results := sceneMap.process(q, s.Common)
|
||||||
|
for _, r := range results {
|
||||||
|
logger.Debug(`Processing scene:`)
|
||||||
|
ret = append(ret, s.processScene(q, r))
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error) {
|
func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error) {
|
||||||
var ret models.ScrapedScene
|
var ret models.ScrapedScene
|
||||||
|
|
||||||
|
|
@ -798,76 +892,11 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
scenePerformersMap := sceneScraperConfig.Performers
|
|
||||||
sceneTagsMap := sceneScraperConfig.Tags
|
|
||||||
sceneStudioMap := sceneScraperConfig.Studio
|
|
||||||
sceneMoviesMap := sceneScraperConfig.Movies
|
|
||||||
|
|
||||||
scenePerformerTagsMap := scenePerformersMap.Tags
|
|
||||||
|
|
||||||
logger.Debug(`Processing scene:`)
|
logger.Debug(`Processing scene:`)
|
||||||
results := sceneMap.process(q, s.Common)
|
results := sceneMap.process(q, s.Common)
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
results[0].apply(&ret)
|
ss := s.processScene(q, results[0])
|
||||||
|
ret = *ss
|
||||||
// process performer tags once
|
|
||||||
var performerTagResults mappedResults
|
|
||||||
if scenePerformerTagsMap != nil {
|
|
||||||
performerTagResults = scenePerformerTagsMap.process(q, s.Common)
|
|
||||||
}
|
|
||||||
|
|
||||||
// now apply the performers and tags
|
|
||||||
if scenePerformersMap.mappedConfig != nil {
|
|
||||||
logger.Debug(`Processing scene performers:`)
|
|
||||||
performerResults := scenePerformersMap.process(q, s.Common)
|
|
||||||
|
|
||||||
for _, p := range performerResults {
|
|
||||||
performer := &models.ScrapedPerformer{}
|
|
||||||
p.apply(performer)
|
|
||||||
|
|
||||||
for _, p := range performerTagResults {
|
|
||||||
tag := &models.ScrapedTag{}
|
|
||||||
p.apply(tag)
|
|
||||||
ret.Tags = append(ret.Tags, tag)
|
|
||||||
}
|
|
||||||
|
|
||||||
ret.Performers = append(ret.Performers, performer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if sceneTagsMap != nil {
|
|
||||||
logger.Debug(`Processing scene tags:`)
|
|
||||||
tagResults := sceneTagsMap.process(q, s.Common)
|
|
||||||
|
|
||||||
for _, p := range tagResults {
|
|
||||||
tag := &models.ScrapedTag{}
|
|
||||||
p.apply(tag)
|
|
||||||
ret.Tags = append(ret.Tags, tag)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if sceneStudioMap != nil {
|
|
||||||
logger.Debug(`Processing scene studio:`)
|
|
||||||
studioResults := sceneStudioMap.process(q, s.Common)
|
|
||||||
|
|
||||||
if len(studioResults) > 0 {
|
|
||||||
studio := &models.ScrapedStudio{}
|
|
||||||
studioResults[0].apply(studio)
|
|
||||||
ret.Studio = studio
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if sceneMoviesMap != nil {
|
|
||||||
logger.Debug(`Processing scene movies:`)
|
|
||||||
movieResults := sceneMoviesMap.process(q, s.Common)
|
|
||||||
|
|
||||||
for _, p := range movieResults {
|
|
||||||
movie := &models.ScrapedMovie{}
|
|
||||||
p.apply(movie)
|
|
||||||
ret.Movies = append(ret.Movies, movie)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ret, nil
|
return &ret, nil
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,23 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func queryURLParametersFromScrapedScene(scene models.ScrapedSceneInput) queryURLParameters {
|
||||||
|
ret := make(queryURLParameters)
|
||||||
|
|
||||||
|
setField := func(field string, value *string) {
|
||||||
|
if value != nil {
|
||||||
|
ret[field] = *value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setField("title", scene.Title)
|
||||||
|
setField("url", scene.URL)
|
||||||
|
setField("date", scene.Date)
|
||||||
|
setField("details", scene.Details)
|
||||||
|
setField("remote_site_id", scene.RemoteSiteID)
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
func queryURLParameterFromURL(url string) queryURLParameters {
|
func queryURLParameterFromURL(url string) queryURLParameters {
|
||||||
ret := make(queryURLParameters)
|
ret := make(queryURLParameters)
|
||||||
ret["url"] = url
|
ret["url"] = url
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ package scraper
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
@ -366,30 +365,43 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error {
|
||||||
func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, error) {
|
func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, error) {
|
||||||
// find scraper with the provided id
|
// find scraper with the provided id
|
||||||
s := c.findScraper(scraperID)
|
s := c.findScraper(scraperID)
|
||||||
if s == nil {
|
if s != nil {
|
||||||
return nil, fmt.Errorf("scraper with ID %s not found", scraperID)
|
// get scene from id
|
||||||
}
|
scene, err := getScene(sceneID, c.txnManager)
|
||||||
|
|
||||||
// get scene from id
|
|
||||||
scene, err := getScene(sceneID, c.txnManager)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ret, err := s.ScrapeSceneByScene(scene, c.txnManager, c.globalConfig)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ret != nil {
|
|
||||||
err = c.postScrapeScene(ret)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ret, err := s.ScrapeSceneByScene(scene, c.txnManager, c.globalConfig)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ret != nil {
|
||||||
|
err = c.postScrapeScene(ret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScrapeSceneQuery uses the scraper with the provided ID to query for
|
||||||
|
// scenes using the provided query string. It returns a list of
|
||||||
|
// scraped scene data.
|
||||||
|
func (c Cache) ScrapeSceneQuery(scraperID string, query string) ([]*models.ScrapedScene, error) {
|
||||||
|
// find scraper with the provided id
|
||||||
|
s := c.findScraper(scraperID)
|
||||||
|
if s != nil {
|
||||||
|
return s.ScrapeSceneQuery(query, c.txnManager, c.globalConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errors.New("Scraper with ID " + scraperID + " not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
// ScrapeSceneFragment uses the scraper with the provided ID to scrape a scene.
|
// ScrapeSceneFragment uses the scraper with the provided ID to scrape a scene.
|
||||||
|
|
|
||||||
|
|
@ -148,6 +148,24 @@ func (s *scriptScraper) scrapeSceneByScene(scene *models.Scene) (*models.Scraped
|
||||||
return &ret, err
|
return &ret, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *scriptScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
||||||
|
inString := `{"name": "` + name + `"}`
|
||||||
|
|
||||||
|
var scenes []models.ScrapedScene
|
||||||
|
|
||||||
|
err := s.runScraperScript(inString, &scenes)
|
||||||
|
|
||||||
|
// convert to pointers
|
||||||
|
var ret []*models.ScrapedScene
|
||||||
|
if err == nil {
|
||||||
|
for i := 0; i < len(scenes); i++ {
|
||||||
|
ret = append(ret, &scenes[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, err
|
||||||
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
func (s *scriptScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
||||||
inString, err := json.Marshal(scene)
|
inString, err := json.Marshal(scene)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -151,6 +151,62 @@ type scrapedStudioStash struct {
|
||||||
URL *string `graphql:"url" json:"url"`
|
URL *string `graphql:"url" json:"url"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type stashFindSceneNamesResultType struct {
|
||||||
|
Count int `graphql:"count"`
|
||||||
|
Scenes []*scrapedSceneStash `graphql:"scenes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash) (*models.ScrapedScene, error) {
|
||||||
|
ret := models.ScrapedScene{}
|
||||||
|
err := copier.Copy(&ret, scene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the performer image directly
|
||||||
|
ret.Image, err = getStashSceneImage(s.config.StashServer.URL, scene.ID, s.globalConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *stashScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
||||||
|
client := s.getStashClient()
|
||||||
|
|
||||||
|
var q struct {
|
||||||
|
FindScenes stashFindSceneNamesResultType `graphql:"findScenes(filter: $f)"`
|
||||||
|
}
|
||||||
|
|
||||||
|
page := 1
|
||||||
|
perPage := 10
|
||||||
|
|
||||||
|
vars := map[string]interface{}{
|
||||||
|
"f": models.FindFilterType{
|
||||||
|
Q: &name,
|
||||||
|
Page: &page,
|
||||||
|
PerPage: &perPage,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := client.Query(context.Background(), &q, vars)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var ret []*models.ScrapedScene
|
||||||
|
for _, scene := range q.FindScenes.Scenes {
|
||||||
|
converted, err := s.scrapedStashSceneToScrapedScene(scene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ret = append(ret, converted)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
type scrapedSceneStash struct {
|
type scrapedSceneStash struct {
|
||||||
ID string `graphql:"id" json:"id"`
|
ID string `graphql:"id" json:"id"`
|
||||||
Title *string `graphql:"title" json:"title"`
|
Title *string `graphql:"title" json:"title"`
|
||||||
|
|
@ -189,19 +245,18 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
|
||||||
}
|
}
|
||||||
|
|
||||||
// need to copy back to a scraped scene
|
// need to copy back to a scraped scene
|
||||||
ret := models.ScrapedScene{}
|
ret, err := s.scrapedStashSceneToScrapedScene(q.FindScene)
|
||||||
if err := copier.Copy(&ret, q.FindScene); err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// get the performer image directly
|
// get the performer image directly
|
||||||
var err error
|
|
||||||
ret.Image, err = getStashSceneImage(s.config.StashServer.URL, q.FindScene.ID, s.globalConfig)
|
ret.Image, err = getStashSceneImage(s.config.StashServer.URL, q.FindScene.ID, s.globalConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
func (s *stashScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
||||||
|
|
|
||||||
|
|
@ -109,7 +109,7 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re
|
||||||
func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig GlobalConfig) (io.Reader, error) {
|
func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig GlobalConfig) (io.Reader, error) {
|
||||||
|
|
||||||
if !driverOptions.UseCDP {
|
if !driverOptions.UseCDP {
|
||||||
return nil, fmt.Errorf("URL shouldn't be fetched through CDP")
|
return nil, fmt.Errorf("url shouldn't be fetched through CDP")
|
||||||
}
|
}
|
||||||
|
|
||||||
sleepDuration := scrapeDefaultSleep
|
sleepDuration := scrapeDefaultSleep
|
||||||
|
|
|
||||||
|
|
@ -124,6 +124,31 @@ func (s *xpathScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
|
||||||
return nil, errors.New("scrapePerformerByFragment not supported for xpath scraper")
|
return nil, errors.New("scrapePerformerByFragment not supported for xpath scraper")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *xpathScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) {
|
||||||
|
scraper := s.getXpathScraper()
|
||||||
|
|
||||||
|
if scraper == nil {
|
||||||
|
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
|
}
|
||||||
|
|
||||||
|
const placeholder = "{}"
|
||||||
|
|
||||||
|
// replace the placeholder string with the URL-escaped name
|
||||||
|
escapedName := url.QueryEscape(name)
|
||||||
|
|
||||||
|
url := s.scraper.QueryURL
|
||||||
|
url = strings.Replace(url, placeholder, escapedName, -1)
|
||||||
|
|
||||||
|
doc, err := s.loadURL(url)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
q := s.getXPathQuery(doc)
|
||||||
|
return scraper.scrapeScenes(q)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScene(scene)
|
queryURL := queryURLParametersFromScene(scene)
|
||||||
|
|
@ -149,7 +174,27 @@ func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
|
||||||
return nil, errors.New("scrapeSceneByFragment not supported for xpath scraper")
|
// construct the URL
|
||||||
|
queryURL := queryURLParametersFromScrapedScene(scene)
|
||||||
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
queryURL.applyReplacements(s.scraper.QueryURLReplacements)
|
||||||
|
}
|
||||||
|
url := queryURL.constructURL(s.scraper.QueryURL)
|
||||||
|
|
||||||
|
scraper := s.getXpathScraper()
|
||||||
|
|
||||||
|
if scraper == nil {
|
||||||
|
return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config")
|
||||||
|
}
|
||||||
|
|
||||||
|
doc, err := s.loadURL(url)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
q := s.getXPathQuery(doc)
|
||||||
|
return scraper.scrapeScene(q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
### ✨ New Features
|
### ✨ New Features
|
||||||
|
* Added support for querying scene scrapers using keywords. ([#1712](https://github.com/stashapp/stash/pull/1712))
|
||||||
* Added support for Studio aliases. ([#1660](https://github.com/stashapp/stash/pull/1660))
|
* Added support for Studio aliases. ([#1660](https://github.com/stashapp/stash/pull/1660))
|
||||||
* Added support for Tag hierarchies. ([#1519](https://github.com/stashapp/stash/pull/1519))
|
* Added support for Tag hierarchies. ([#1519](https://github.com/stashapp/stash/pull/1519))
|
||||||
* Added native support for Apple Silicon / M1 Macs. ([#1646] https://github.com/stashapp/stash/pull/1646)
|
* Added native support for Apple Silicon / M1 Macs. ([#1646](https://github.com/stashapp/stash/pull/1646))
|
||||||
* Added Movies to Scene bulk edit dialog. ([#1676](https://github.com/stashapp/stash/pull/1676))
|
* Added Movies to Scene bulk edit dialog. ([#1676](https://github.com/stashapp/stash/pull/1676))
|
||||||
* Added Movies tab to Studio and Performer pages. ([#1675](https://github.com/stashapp/stash/pull/1675))
|
* Added Movies tab to Studio and Performer pages. ([#1675](https://github.com/stashapp/stash/pull/1675))
|
||||||
* Support filtering Movies by Performers. ([#1675](https://github.com/stashapp/stash/pull/1675))
|
* Support filtering Movies by Performers. ([#1675](https://github.com/stashapp/stash/pull/1675))
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ import {
|
||||||
Form,
|
Form,
|
||||||
Col,
|
Col,
|
||||||
Row,
|
Row,
|
||||||
|
ButtonGroup,
|
||||||
} from "react-bootstrap";
|
} from "react-bootstrap";
|
||||||
import Mousetrap from "mousetrap";
|
import Mousetrap from "mousetrap";
|
||||||
import * as GQL from "src/core/generated-graphql";
|
import * as GQL from "src/core/generated-graphql";
|
||||||
|
|
@ -18,7 +19,7 @@ import {
|
||||||
useSceneUpdate,
|
useSceneUpdate,
|
||||||
mutateReloadScrapers,
|
mutateReloadScrapers,
|
||||||
useConfiguration,
|
useConfiguration,
|
||||||
queryStashBoxScene,
|
queryScrapeSceneQueryFragment,
|
||||||
} from "src/core/StashService";
|
} from "src/core/StashService";
|
||||||
import {
|
import {
|
||||||
PerformerSelect,
|
PerformerSelect,
|
||||||
|
|
@ -37,6 +38,7 @@ import { Prompt } from "react-router";
|
||||||
import { SceneMovieTable } from "./SceneMovieTable";
|
import { SceneMovieTable } from "./SceneMovieTable";
|
||||||
import { RatingStars } from "./RatingStars";
|
import { RatingStars } from "./RatingStars";
|
||||||
import { SceneScrapeDialog } from "./SceneScrapeDialog";
|
import { SceneScrapeDialog } from "./SceneScrapeDialog";
|
||||||
|
import { SceneQueryModal } from "./SceneQueryModal";
|
||||||
|
|
||||||
interface IProps {
|
interface IProps {
|
||||||
scene: GQL.SceneDataFragment;
|
scene: GQL.SceneDataFragment;
|
||||||
|
|
@ -60,8 +62,14 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
);
|
);
|
||||||
|
|
||||||
const Scrapers = useListSceneScrapers();
|
const Scrapers = useListSceneScrapers();
|
||||||
|
const [fragmentScrapers, setFragmentScrapers] = useState<GQL.Scraper[]>([]);
|
||||||
const [queryableScrapers, setQueryableScrapers] = useState<GQL.Scraper[]>([]);
|
const [queryableScrapers, setQueryableScrapers] = useState<GQL.Scraper[]>([]);
|
||||||
|
|
||||||
|
const [scraper, setScraper] = useState<GQL.ScraperSourceInput | undefined>();
|
||||||
|
const [
|
||||||
|
isScraperQueryModalOpen,
|
||||||
|
setIsScraperQueryModalOpen,
|
||||||
|
] = useState<boolean>(false);
|
||||||
const [scrapedScene, setScrapedScene] = useState<GQL.ScrapedScene | null>();
|
const [scrapedScene, setScrapedScene] = useState<GQL.ScrapedScene | null>();
|
||||||
|
|
||||||
const [coverImagePreview, setCoverImagePreview] = useState<
|
const [coverImagePreview, setCoverImagePreview] = useState<
|
||||||
|
|
@ -181,12 +189,16 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
});
|
});
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const newQueryableScrapers = (
|
const toFilter = Scrapers?.data?.listSceneScrapers ?? [];
|
||||||
Scrapers?.data?.listSceneScrapers ?? []
|
|
||||||
).filter((s) =>
|
const newFragmentScrapers = toFilter.filter((s) =>
|
||||||
s.scene?.supported_scrapes.includes(GQL.ScrapeType.Fragment)
|
s.scene?.supported_scrapes.includes(GQL.ScrapeType.Fragment)
|
||||||
);
|
);
|
||||||
|
const newQueryableScrapers = toFilter.filter((s) =>
|
||||||
|
s.scene?.supported_scrapes.includes(GQL.ScrapeType.Name)
|
||||||
|
);
|
||||||
|
|
||||||
|
setFragmentScrapers(newFragmentScrapers);
|
||||||
setQueryableScrapers(newQueryableScrapers);
|
setQueryableScrapers(newQueryableScrapers);
|
||||||
}, [Scrapers, stashConfig]);
|
}, [Scrapers, stashConfig]);
|
||||||
|
|
||||||
|
|
@ -273,32 +285,10 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
ImageUtils.onImageChange(event, onImageLoad);
|
ImageUtils.onImageChange(event, onImageLoad);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function onScrapeStashBoxClicked(stashBoxIndex: number) {
|
async function onScrapeClicked(s: GQL.ScraperSourceInput) {
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
try {
|
try {
|
||||||
const result = await queryStashBoxScene(stashBoxIndex, scene.id);
|
const result = await queryScrapeScene(s, scene.id);
|
||||||
if (!result.data || !result.data.scrapeSingleScene) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.data.scrapeSingleScene.length > 0) {
|
|
||||||
setScrapedScene(result.data.scrapeSingleScene[0]);
|
|
||||||
} else {
|
|
||||||
Toast.success({
|
|
||||||
content: "No scenes found",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
Toast.error(e);
|
|
||||||
} finally {
|
|
||||||
setIsLoading(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function onScrapeClicked(scraper: GQL.Scraper) {
|
|
||||||
setIsLoading(true);
|
|
||||||
try {
|
|
||||||
const result = await queryScrapeScene(scraper.id, scene.id);
|
|
||||||
if (!result.data || !result.data.scrapeSingleScene?.length) {
|
if (!result.data || !result.data.scrapeSingleScene?.length) {
|
||||||
Toast.success({
|
Toast.success({
|
||||||
content: "No scenes found",
|
content: "No scenes found",
|
||||||
|
|
@ -314,6 +304,41 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function scrapeFromQuery(
|
||||||
|
s: GQL.ScraperSourceInput,
|
||||||
|
fragment: GQL.ScrapedSceneDataFragment
|
||||||
|
) {
|
||||||
|
setIsLoading(true);
|
||||||
|
try {
|
||||||
|
const input: GQL.ScrapedSceneInput = {
|
||||||
|
date: fragment.date,
|
||||||
|
details: fragment.details,
|
||||||
|
remote_site_id: fragment.remote_site_id,
|
||||||
|
title: fragment.title,
|
||||||
|
url: fragment.url,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await queryScrapeSceneQueryFragment(s, input);
|
||||||
|
if (!result.data || !result.data.scrapeSingleScene?.length) {
|
||||||
|
Toast.success({
|
||||||
|
content: "No scenes found",
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// assume one returned scene
|
||||||
|
setScrapedScene(result.data.scrapeSingleScene[0]);
|
||||||
|
} catch (e) {
|
||||||
|
Toast.error(e);
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function onScrapeQueryClicked(s: GQL.ScraperSourceInput) {
|
||||||
|
setScraper(s);
|
||||||
|
setIsScraperQueryModalOpen(true);
|
||||||
|
}
|
||||||
|
|
||||||
async function onReloadScrapers() {
|
async function onReloadScrapers() {
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
try {
|
try {
|
||||||
|
|
@ -354,10 +379,79 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function renderScrapeQueryMenu() {
|
||||||
|
const stashBoxes = stashConfig.data?.configuration.general.stashBoxes ?? [];
|
||||||
|
|
||||||
|
if (stashBoxes.length === 0 && queryableScrapers.length === 0) return;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dropdown title={intl.formatMessage({ id: "actions.scrape_query" })}>
|
||||||
|
<Dropdown.Toggle variant="secondary">
|
||||||
|
<Icon icon="search" />
|
||||||
|
</Dropdown.Toggle>
|
||||||
|
|
||||||
|
<Dropdown.Menu>
|
||||||
|
{stashBoxes.map((s, index) => (
|
||||||
|
<Dropdown.Item
|
||||||
|
key={s.endpoint}
|
||||||
|
onClick={() => onScrapeQueryClicked({ stash_box_index: index })}
|
||||||
|
>
|
||||||
|
{s.name ?? "Stash-Box"}
|
||||||
|
</Dropdown.Item>
|
||||||
|
))}
|
||||||
|
{queryableScrapers.map((s) => (
|
||||||
|
<Dropdown.Item
|
||||||
|
key={s.name}
|
||||||
|
onClick={() => onScrapeQueryClicked({ scraper_id: s.id })}
|
||||||
|
>
|
||||||
|
{s.name}
|
||||||
|
</Dropdown.Item>
|
||||||
|
))}
|
||||||
|
<Dropdown.Item onClick={() => onReloadScrapers()}>
|
||||||
|
<span className="fa-icon">
|
||||||
|
<Icon icon="sync-alt" />
|
||||||
|
</span>
|
||||||
|
<span>
|
||||||
|
<FormattedMessage id="actions.reload_scrapers" />
|
||||||
|
</span>
|
||||||
|
</Dropdown.Item>
|
||||||
|
</Dropdown.Menu>
|
||||||
|
</Dropdown>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function onSceneSelected(s: GQL.ScrapedSceneDataFragment) {
|
||||||
|
if (!scraper) return;
|
||||||
|
|
||||||
|
if (scraper?.stash_box_index !== undefined) {
|
||||||
|
// must be stash-box - assume full scene
|
||||||
|
setScrapedScene(s);
|
||||||
|
} else {
|
||||||
|
// must be scraper
|
||||||
|
scrapeFromQuery(scraper, s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const renderScrapeQueryModal = () => {
|
||||||
|
if (!isScraperQueryModalOpen || !scraper) return;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SceneQueryModal
|
||||||
|
scraper={scraper}
|
||||||
|
onHide={() => setScraper(undefined)}
|
||||||
|
onSelectScene={(s) => {
|
||||||
|
setIsScraperQueryModalOpen(false);
|
||||||
|
setScraper(undefined);
|
||||||
|
onSceneSelected(s);
|
||||||
|
}}
|
||||||
|
name={formik.values.title || ""}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
function renderScraperMenu() {
|
function renderScraperMenu() {
|
||||||
const stashBoxes = stashConfig.data?.configuration.general.stashBoxes ?? [];
|
const stashBoxes = stashConfig.data?.configuration.general.stashBoxes ?? [];
|
||||||
|
|
||||||
// TODO - change name based on stashbox configuration
|
|
||||||
return (
|
return (
|
||||||
<DropdownButton
|
<DropdownButton
|
||||||
className="d-inline-block"
|
className="d-inline-block"
|
||||||
|
|
@ -367,13 +461,16 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
{stashBoxes.map((s, index) => (
|
{stashBoxes.map((s, index) => (
|
||||||
<Dropdown.Item
|
<Dropdown.Item
|
||||||
key={s.endpoint}
|
key={s.endpoint}
|
||||||
onClick={() => onScrapeStashBoxClicked(index)}
|
onClick={() => onScrapeClicked({ stash_box_index: index })}
|
||||||
>
|
>
|
||||||
{s.name ?? "Stash-Box"}
|
{s.name ?? "Stash-Box"}
|
||||||
</Dropdown.Item>
|
</Dropdown.Item>
|
||||||
))}
|
))}
|
||||||
{queryableScrapers.map((s) => (
|
{fragmentScrapers.map((s) => (
|
||||||
<Dropdown.Item key={s.name} onClick={() => onScrapeClicked(s)}>
|
<Dropdown.Item
|
||||||
|
key={s.name}
|
||||||
|
onClick={() => onScrapeClicked({ scraper_id: s.id })}
|
||||||
|
>
|
||||||
{s.name}
|
{s.name}
|
||||||
</Dropdown.Item>
|
</Dropdown.Item>
|
||||||
))}
|
))}
|
||||||
|
|
@ -389,44 +486,6 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function maybeRenderStashboxQueryButton() {
|
|
||||||
// const stashBoxes = stashConfig.data?.configuration.general.stashBoxes ?? [];
|
|
||||||
// if (stashBoxes.length === 0) {
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// TODO - hide this button for now, with the view to add it when we get
|
|
||||||
// the query dialog going
|
|
||||||
// if (stashBoxes.length === 1) {
|
|
||||||
// return (
|
|
||||||
// <Button
|
|
||||||
// className="mr-1"
|
|
||||||
// onClick={() => onStashBoxQueryClicked(0)}
|
|
||||||
// title="Query"
|
|
||||||
// >
|
|
||||||
// <Icon className="fa-fw" icon="search" />
|
|
||||||
// </Button>
|
|
||||||
// );
|
|
||||||
// }
|
|
||||||
// // TODO - change name based on stashbox configuration
|
|
||||||
// return (
|
|
||||||
// <Dropdown className="d-inline-block mr-1">
|
|
||||||
// <Dropdown.Toggle id="stashbox-query-dropdown">
|
|
||||||
// <Icon className="fa-fw" icon="search" />
|
|
||||||
// </Dropdown.Toggle>
|
|
||||||
// <Dropdown.Menu>
|
|
||||||
// {stashBoxes.map((s, index) => (
|
|
||||||
// <Dropdown.Item
|
|
||||||
// key={s.endpoint}
|
|
||||||
// onClick={() => onStashBoxQueryClicked(index)}
|
|
||||||
// >
|
|
||||||
// stash-box
|
|
||||||
// </Dropdown.Item>
|
|
||||||
// ))}
|
|
||||||
// </Dropdown.Menu>
|
|
||||||
// </Dropdown>
|
|
||||||
// );
|
|
||||||
}
|
|
||||||
|
|
||||||
function urlScrapable(scrapedUrl: string): boolean {
|
function urlScrapable(scrapedUrl: string): boolean {
|
||||||
return (Scrapers?.data?.listSceneScrapers ?? []).some((s) =>
|
return (Scrapers?.data?.listSceneScrapers ?? []).some((s) =>
|
||||||
(s?.scene?.urls ?? []).some((u) => scrapedUrl.includes(u))
|
(s?.scene?.urls ?? []).some((u) => scrapedUrl.includes(u))
|
||||||
|
|
@ -556,10 +615,11 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
message={intl.formatMessage({ id: "dialogs.unsaved_changes" })}
|
message={intl.formatMessage({ id: "dialogs.unsaved_changes" })}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{renderScrapeQueryModal()}
|
||||||
{maybeRenderScrapeDialog()}
|
{maybeRenderScrapeDialog()}
|
||||||
<Form noValidate onSubmit={formik.handleSubmit}>
|
<Form noValidate onSubmit={formik.handleSubmit}>
|
||||||
<div className="form-container row px-3 pt-3">
|
<div className="form-container row px-3 pt-3">
|
||||||
<div className="col-6 edit-buttons mb-3 pl-0">
|
<div className="edit-buttons mb-3 pl-0">
|
||||||
<Button
|
<Button
|
||||||
className="edit-button"
|
className="edit-button"
|
||||||
variant="primary"
|
variant="primary"
|
||||||
|
|
@ -576,10 +636,12 @@ export const SceneEditPanel: React.FC<IProps> = ({
|
||||||
<FormattedMessage id="actions.delete" />
|
<FormattedMessage id="actions.delete" />
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
<Col xs={6} className="text-right">
|
<div className="ml-auto pr-3 text-right d-flex">
|
||||||
{maybeRenderStashboxQueryButton()}
|
<ButtonGroup className="scraper-group">
|
||||||
{renderScraperMenu()}
|
{renderScraperMenu()}
|
||||||
</Col>
|
{renderScrapeQueryMenu()}
|
||||||
|
</ButtonGroup>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="form-container row px-3">
|
<div className="form-container row px-3">
|
||||||
<div className="col-12 col-lg-6 col-xl-12">
|
<div className="col-12 col-lg-6 col-xl-12">
|
||||||
|
|
|
||||||
229
ui/v2.5/src/components/Scenes/SceneDetails/SceneQueryModal.tsx
Normal file
229
ui/v2.5/src/components/Scenes/SceneDetails/SceneQueryModal.tsx
Normal file
|
|
@ -0,0 +1,229 @@
|
||||||
|
import React, { useCallback, useEffect, useRef, useState } from "react";
|
||||||
|
import { Badge, Button, Col, Form, InputGroup, Row } from "react-bootstrap";
|
||||||
|
import { FormattedMessage, useIntl } from "react-intl";
|
||||||
|
|
||||||
|
import * as GQL from "src/core/generated-graphql";
|
||||||
|
import {
|
||||||
|
Modal,
|
||||||
|
LoadingIndicator,
|
||||||
|
TruncatedText,
|
||||||
|
Icon,
|
||||||
|
} from "src/components/Shared";
|
||||||
|
import { queryScrapeSceneQuery } from "src/core/StashService";
|
||||||
|
import { useToast } from "src/hooks";
|
||||||
|
|
||||||
|
interface ISceneSearchResultDetailsProps {
|
||||||
|
scene: GQL.ScrapedSceneDataFragment;
|
||||||
|
}
|
||||||
|
|
||||||
|
const SceneSearchResultDetails: React.FC<ISceneSearchResultDetailsProps> = ({
|
||||||
|
scene,
|
||||||
|
}) => {
|
||||||
|
function renderPerformers() {
|
||||||
|
if (scene.performers) {
|
||||||
|
return (
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
{scene.performers?.map((performer) => (
|
||||||
|
<Badge className="tag-item" variant="secondary">
|
||||||
|
{performer.name}
|
||||||
|
</Badge>
|
||||||
|
))}
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderTags() {
|
||||||
|
if (scene.tags) {
|
||||||
|
return (
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
{scene.tags?.map((tag) => (
|
||||||
|
<Badge className="tag-item" variant="secondary">
|
||||||
|
{tag.name}
|
||||||
|
</Badge>
|
||||||
|
))}
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderImage() {
|
||||||
|
if (scene.image) {
|
||||||
|
return (
|
||||||
|
<div className="scene-image-container">
|
||||||
|
<img
|
||||||
|
src={scene.image}
|
||||||
|
alt=""
|
||||||
|
className="align-self-center scene-image"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="scene-details">
|
||||||
|
<Row>
|
||||||
|
{renderImage()}
|
||||||
|
<div className="col flex-column">
|
||||||
|
<h4>{scene.title}</h4>
|
||||||
|
<h5>
|
||||||
|
{scene.studio?.name}
|
||||||
|
{scene.studio?.name && scene.date && ` • `}
|
||||||
|
{scene.date}
|
||||||
|
</h5>
|
||||||
|
</div>
|
||||||
|
</Row>
|
||||||
|
<Row>
|
||||||
|
<Col>
|
||||||
|
<TruncatedText text={scene.details ?? ""} lineCount={3} />
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
{renderPerformers()}
|
||||||
|
{renderTags()}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface ISceneSearchResult {
|
||||||
|
scene: GQL.ScrapedSceneDataFragment;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SceneSearchResult: React.FC<ISceneSearchResult> = ({ scene }) => {
|
||||||
|
return (
|
||||||
|
<div className="mt-3 search-item">
|
||||||
|
<div className="row">
|
||||||
|
<SceneSearchResultDetails scene={scene} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
interface IProps {
|
||||||
|
scraper: GQL.ScraperSourceInput;
|
||||||
|
onHide: () => void;
|
||||||
|
onSelectScene: (scene: GQL.ScrapedSceneDataFragment) => void;
|
||||||
|
name?: string;
|
||||||
|
}
|
||||||
|
export const SceneQueryModal: React.FC<IProps> = ({
|
||||||
|
scraper,
|
||||||
|
name,
|
||||||
|
onHide,
|
||||||
|
onSelectScene,
|
||||||
|
}) => {
|
||||||
|
const CLASSNAME = "SceneScrapeModal";
|
||||||
|
const CLASSNAME_LIST = `${CLASSNAME}-list`;
|
||||||
|
const CLASSNAME_LIST_CONTAINER = `${CLASSNAME_LIST}-container`;
|
||||||
|
|
||||||
|
const intl = useIntl();
|
||||||
|
const Toast = useToast();
|
||||||
|
|
||||||
|
const inputRef = useRef<HTMLInputElement>(null);
|
||||||
|
const [loading, setLoading] = useState<boolean>(false);
|
||||||
|
const [scenes, setScenes] = useState<GQL.ScrapedScene[] | undefined>();
|
||||||
|
const [error, setError] = useState<Error | undefined>();
|
||||||
|
|
||||||
|
const doQuery = useCallback(
|
||||||
|
async (input: string) => {
|
||||||
|
if (!input) return;
|
||||||
|
|
||||||
|
setLoading(true);
|
||||||
|
try {
|
||||||
|
const r = await queryScrapeSceneQuery(scraper, input);
|
||||||
|
setScenes(r.data.scrapeSingleScene);
|
||||||
|
} catch (err) {
|
||||||
|
setError(err);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[scraper]
|
||||||
|
);
|
||||||
|
|
||||||
|
useEffect(() => inputRef.current?.focus(), []);
|
||||||
|
useEffect(() => {
|
||||||
|
if (error) {
|
||||||
|
Toast.error(error);
|
||||||
|
setError(undefined);
|
||||||
|
}
|
||||||
|
}, [error, Toast]);
|
||||||
|
|
||||||
|
function renderResults() {
|
||||||
|
if (!scenes) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={CLASSNAME_LIST_CONTAINER}>
|
||||||
|
<div className="mt-1">
|
||||||
|
<FormattedMessage
|
||||||
|
id="dialogs.scenes_found"
|
||||||
|
values={{ count: scenes.length }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<ul className={CLASSNAME_LIST}>
|
||||||
|
{scenes.map((s, i) => (
|
||||||
|
// eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-noninteractive-element-interactions, react/no-array-index-key
|
||||||
|
<li key={i} onClick={() => onSelectScene(s)}>
|
||||||
|
<SceneSearchResult scene={s} />
|
||||||
|
</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Modal
|
||||||
|
show
|
||||||
|
onHide={onHide}
|
||||||
|
modalProps={{ size: "lg", dialogClassName: "scrape-query-dialog" }}
|
||||||
|
header={intl.formatMessage(
|
||||||
|
{ id: "dialogs.scrape_entity_query" },
|
||||||
|
{ entity_type: intl.formatMessage({ id: "scene" }) }
|
||||||
|
)}
|
||||||
|
accept={{
|
||||||
|
text: intl.formatMessage({ id: "actions.cancel" }),
|
||||||
|
onClick: onHide,
|
||||||
|
variant: "secondary",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div className={CLASSNAME}>
|
||||||
|
<InputGroup>
|
||||||
|
<Form.Control
|
||||||
|
defaultValue={name ?? ""}
|
||||||
|
placeholder={`${intl.formatMessage({ id: "name" })}...`}
|
||||||
|
className="text-input"
|
||||||
|
ref={inputRef}
|
||||||
|
onKeyPress={(e: React.KeyboardEvent<HTMLInputElement>) =>
|
||||||
|
e.key === "Enter" && doQuery(inputRef.current?.value ?? "")
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<InputGroup.Append>
|
||||||
|
<Button
|
||||||
|
onClick={() => {
|
||||||
|
doQuery(inputRef.current?.value ?? "");
|
||||||
|
}}
|
||||||
|
variant="primary"
|
||||||
|
title={intl.formatMessage({ id: "actions.search" })}
|
||||||
|
>
|
||||||
|
<Icon icon="search" />
|
||||||
|
</Button>
|
||||||
|
</InputGroup.Append>
|
||||||
|
</InputGroup>
|
||||||
|
|
||||||
|
{loading ? (
|
||||||
|
<div className="m-4 text-center">
|
||||||
|
<LoadingIndicator inline />
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
renderResults()
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</Modal>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
@ -583,3 +583,31 @@ input[type="range"].blue-slider {
|
||||||
background-color: $secondary;
|
background-color: $secondary;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.scrape-query-dialog {
|
||||||
|
max-height: calc(100vh - 10rem);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scraper-group {
|
||||||
|
& > .dropdown:not(:last-child) .btn {
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
|
border-top-right-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
& > .dropdown:not(:first-child) .btn {
|
||||||
|
border-bottom-left-radius: 0;
|
||||||
|
border-top-left-radius: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.SceneScrapeModal-list {
|
||||||
|
list-style: none;
|
||||||
|
max-height: 50vh;
|
||||||
|
overflow-x: hidden;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding-inline-start: 0;
|
||||||
|
|
||||||
|
li {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -836,6 +836,21 @@ export const queryScrapePerformerURL = (url: string) =>
|
||||||
fetchPolicy: "network-only",
|
fetchPolicy: "network-only",
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const queryScrapeSceneQuery = (
|
||||||
|
source: GQL.ScraperSourceInput,
|
||||||
|
q: string
|
||||||
|
) =>
|
||||||
|
client.query<GQL.ScrapeSingleSceneQuery>({
|
||||||
|
query: GQL.ScrapeSingleSceneDocument,
|
||||||
|
variables: {
|
||||||
|
source,
|
||||||
|
input: {
|
||||||
|
query: q,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
fetchPolicy: "network-only",
|
||||||
|
});
|
||||||
|
|
||||||
export const queryScrapeSceneURL = (url: string) =>
|
export const queryScrapeSceneURL = (url: string) =>
|
||||||
client.query<GQL.ScrapeSceneUrlQuery>({
|
client.query<GQL.ScrapeSceneUrlQuery>({
|
||||||
query: GQL.ScrapeSceneUrlDocument,
|
query: GQL.ScrapeSceneUrlDocument,
|
||||||
|
|
@ -863,13 +878,14 @@ export const queryScrapeMovieURL = (url: string) =>
|
||||||
fetchPolicy: "network-only",
|
fetchPolicy: "network-only",
|
||||||
});
|
});
|
||||||
|
|
||||||
export const queryScrapeScene = (scraperId: string, sceneId: string) =>
|
export const queryScrapeScene = (
|
||||||
|
source: GQL.ScraperSourceInput,
|
||||||
|
sceneId: string
|
||||||
|
) =>
|
||||||
client.query<GQL.ScrapeSingleSceneQuery>({
|
client.query<GQL.ScrapeSingleSceneQuery>({
|
||||||
query: GQL.ScrapeSingleSceneDocument,
|
query: GQL.ScrapeSingleSceneDocument,
|
||||||
variables: {
|
variables: {
|
||||||
source: {
|
source,
|
||||||
scraper_id: scraperId,
|
|
||||||
},
|
|
||||||
input: {
|
input: {
|
||||||
scene_id: sceneId,
|
scene_id: sceneId,
|
||||||
},
|
},
|
||||||
|
|
@ -888,22 +904,22 @@ export const queryStashBoxScene = (stashBoxIndex: number, sceneID: string) =>
|
||||||
scene_id: sceneID,
|
scene_id: sceneID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
fetchPolicy: "network-only",
|
||||||
});
|
});
|
||||||
|
|
||||||
export const queryStashBoxPerformer = (
|
export const queryScrapeSceneQueryFragment = (
|
||||||
stashBoxIndex: number,
|
source: GQL.ScraperSourceInput,
|
||||||
performerID: string
|
input: GQL.ScrapedSceneInput
|
||||||
) =>
|
) =>
|
||||||
client.query<GQL.ScrapeSinglePerformerQuery>({
|
client.query<GQL.ScrapeSingleSceneQuery>({
|
||||||
query: GQL.ScrapeSinglePerformerDocument,
|
query: GQL.ScrapeSingleSceneDocument,
|
||||||
variables: {
|
variables: {
|
||||||
source: {
|
source,
|
||||||
stash_box_index: stashBoxIndex,
|
|
||||||
},
|
|
||||||
input: {
|
input: {
|
||||||
performer_id: performerID,
|
scene_input: input,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
fetchPolicy: "network-only",
|
||||||
});
|
});
|
||||||
|
|
||||||
export const queryScrapeGallery = (scraperId: string, galleryId: string) =>
|
export const queryScrapeGallery = (scraperId: string, galleryId: string) =>
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,10 @@ performerByFragment:
|
||||||
<single scraper config>
|
<single scraper config>
|
||||||
performerByURL:
|
performerByURL:
|
||||||
<multiple scraper URL configs>
|
<multiple scraper URL configs>
|
||||||
|
sceneByName:
|
||||||
|
<single scraper config>
|
||||||
|
sceneByQueryFragment:
|
||||||
|
<single scraper config>
|
||||||
sceneByFragment:
|
sceneByFragment:
|
||||||
<single scraper config>
|
<single scraper config>
|
||||||
sceneByURL:
|
sceneByURL:
|
||||||
|
|
@ -63,6 +67,7 @@ The scraping types and their required fields are outlined in the following table
|
||||||
|-----------|------------------------|
|
|-----------|------------------------|
|
||||||
| Scraper in `Scrape...` dropdown button in Performer Edit page | Valid `performerByName` and `performerByFragment` configurations. |
|
| Scraper in `Scrape...` dropdown button in Performer Edit page | Valid `performerByName` and `performerByFragment` configurations. |
|
||||||
| Scrape performer from URL | Valid `performerByURL` configuration with matching URL. |
|
| Scrape performer from URL | Valid `performerByURL` configuration with matching URL. |
|
||||||
|
| Scraper in query dropdown button in Scene Edit page | Valid `sceneByName` and `sceneByQueryFragment` configurations. |
|
||||||
| Scraper in `Scrape...` dropdown button in Scene Edit page | Valid `sceneByFragment` configuration. |
|
| Scraper in `Scrape...` dropdown button in Scene Edit page | Valid `sceneByFragment` configuration. |
|
||||||
| Scrape scene from URL | Valid `sceneByURL` configuration with matching URL. |
|
| Scrape scene from URL | Valid `sceneByURL` configuration with matching URL. |
|
||||||
| Scrape movie from URL | Valid `movieByURL` configuration with matching URL. |
|
| Scrape movie from URL | Valid `movieByURL` configuration with matching URL. |
|
||||||
|
|
@ -97,7 +102,8 @@ The script is sent input and expects output based on the scraping type, as detai
|
||||||
| `performerByName` | `{"name": "<performer query string>"}` | Array of JSON-encoded performer fragments (including at least `name`) |
|
| `performerByName` | `{"name": "<performer query string>"}` | Array of JSON-encoded performer fragments (including at least `name`) |
|
||||||
| `performerByFragment` | JSON-encoded performer fragment | JSON-encoded performer fragment |
|
| `performerByFragment` | JSON-encoded performer fragment | JSON-encoded performer fragment |
|
||||||
| `performerByURL` | `{"url": "<url>"}` | JSON-encoded performer fragment |
|
| `performerByURL` | `{"url": "<url>"}` | JSON-encoded performer fragment |
|
||||||
| `sceneByFragment` | JSON-encoded scene fragment | JSON-encoded scene fragment |
|
| `sceneByName` | `{"name": "<scene query string>"}` | Array of JSON-encoded scene fragments |
|
||||||
|
| `sceneByQueryFragment`, `sceneByFragment` | JSON-encoded scene fragment | JSON-encoded scene fragment |
|
||||||
| `sceneByURL` | `{"url": "<url>"}` | JSON-encoded scene fragment |
|
| `sceneByURL` | `{"url": "<url>"}` | JSON-encoded scene fragment |
|
||||||
| `movieByURL` | `{"url": "<url>"}` | JSON-encoded movie fragment |
|
| `movieByURL` | `{"url": "<url>"}` | JSON-encoded movie fragment |
|
||||||
| `galleryByFragment` | JSON-encoded gallery fragment | JSON-encoded gallery fragment |
|
| `galleryByFragment` | JSON-encoded gallery fragment | JSON-encoded gallery fragment |
|
||||||
|
|
@ -217,9 +223,9 @@ xPathScrapers:
|
||||||
# ... performer scraper details ...
|
# ... performer scraper details ...
|
||||||
```
|
```
|
||||||
|
|
||||||
### scrapeXPath and scrapeJson use with `sceneByFragment`
|
### scrapeXPath and scrapeJson use with `sceneByFragment` and `sceneByQueryFragment`
|
||||||
|
|
||||||
For `sceneByFragment`, the `queryURL` field must also be present. This field is used to build a query URL for scenes. For `sceneByFragment`, the `queryURL` field supports the following placeholder fields:
|
For `sceneByFragment` and `sceneByQueryFragment`, the `queryURL` field must also be present. This field is used to build a query URL for scenes. For `sceneByFragment`, the `queryURL` field supports the following placeholder fields:
|
||||||
* `{checksum}` - the MD5 checksum of the scene
|
* `{checksum}` - the MD5 checksum of the scene
|
||||||
* `{oshash}` - the oshash of the scene
|
* `{oshash}` - the oshash of the scene
|
||||||
* `{filename}` - the base filename of the scene
|
* `{filename}` - the base filename of the scene
|
||||||
|
|
|
||||||
|
|
@ -61,6 +61,7 @@
|
||||||
"save_filter": "Save filter",
|
"save_filter": "Save filter",
|
||||||
"scan": "Scan",
|
"scan": "Scan",
|
||||||
"scrape_with": "Scrape with…",
|
"scrape_with": "Scrape with…",
|
||||||
|
"scrape_query": "Scrape query",
|
||||||
"search": "Search",
|
"search": "Search",
|
||||||
"select_all": "Select All",
|
"select_all": "Select All",
|
||||||
"select_none": "Select None",
|
"select_none": "Select None",
|
||||||
|
|
@ -456,6 +457,8 @@
|
||||||
"transcodes": "Transcodes (MP4 conversions of unsupported video formats)",
|
"transcodes": "Transcodes (MP4 conversions of unsupported video formats)",
|
||||||
"video_previews": "Previews (video previews which play when hovering over a scene)"
|
"video_previews": "Previews (video previews which play when hovering over a scene)"
|
||||||
},
|
},
|
||||||
|
"scenes_found": "{count} scenes found",
|
||||||
|
"scrape_entity_query": "{entity_type} Scrape Query",
|
||||||
"scrape_entity_title": "{entity_type} Scrape Results",
|
"scrape_entity_title": "{entity_type} Scrape Results",
|
||||||
"scrape_results_existing": "Existing",
|
"scrape_results_existing": "Existing",
|
||||||
"scrape_results_scraped": "Scraped",
|
"scrape_results_scraped": "Scraped",
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue