diff --git a/pkg/api/resolver_query_scraper.go b/pkg/api/resolver_query_scraper.go index 55b17c09f..181363e24 100644 --- a/pkg/api/resolver_query_scraper.go +++ b/pkg/api/resolver_query_scraper.go @@ -71,6 +71,14 @@ func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*mo return manager.GetInstance().ScraperCache.ScrapePerformerURL(url) } +func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) { + if query == "" { + return nil, nil + } + + return manager.GetInstance().ScraperCache.ScrapeSceneQuery(scraperID, query) +} + func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) { id, err := strconv.Atoi(scene.ID) if err != nil { @@ -165,8 +173,10 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID) } else if input.SceneInput != nil { singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput) + } else if input.Query != nil { + return manager.GetInstance().ScraperCache.ScrapeSceneQuery(*source.ScraperID, *input.Query) } else { - return nil, errors.New("not implemented") + err = errors.New("scene_id, scene_input or query must be set") } if err != nil { diff --git a/pkg/scraper/action.go b/pkg/scraper/action.go index ac5e163b6..493163936 100644 --- a/pkg/scraper/action.go +++ b/pkg/scraper/action.go @@ -24,6 +24,7 @@ type scraper interface { scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) + scrapeScenesByName(name string) ([]*models.ScrapedScene, error) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) scrapeSceneByURL(url string) (*models.ScrapedScene, error) diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go index d71d2f954..78d3fe4fe 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/config.go @@ -35,6 +35,12 @@ type config struct { // Configuration for querying gallery by a Gallery fragment GalleryByFragment *scraperTypeConfig `yaml:"galleryByFragment"` + // Configuration for querying scenes by name + SceneByName *scraperTypeConfig `yaml:"sceneByName"` + + // Configuration for querying scenes by query fragment + SceneByQueryFragment *scraperTypeConfig `yaml:"sceneByQueryFragment"` + // Configuration for querying a scene by a URL SceneByURL []*scrapeByURLConfig `yaml:"sceneByURL"` @@ -256,6 +262,9 @@ func (c config) toScraper() *models.Scraper { if c.SceneByFragment != nil { scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment) } + if c.SceneByName != nil && c.SceneByQueryFragment != nil { + scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName) + } if len(c.SceneByURL) > 0 { scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL) for _, v := range c.SceneByURL { @@ -353,7 +362,7 @@ func (c config) ScrapePerformerURL(url string, txnManager models.TransactionMana } func (c config) supportsScenes() bool { - return c.SceneByFragment != nil || len(c.SceneByURL) > 0 + return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0 } func (c config) supportsGalleries() bool { @@ -393,6 +402,15 @@ func (c config) matchesMovieURL(url string) bool { return false } +func (c config) ScrapeSceneQuery(name string, txnManager models.TransactionManager, globalConfig GlobalConfig) ([]*models.ScrapedScene, error) { + if c.SceneByName != nil { + s := getScraper(*c.SceneByName, txnManager, c, globalConfig) + return s.scrapeScenesByName(name) + } + + return nil, nil +} + func (c config) ScrapeSceneByScene(scene *models.Scene, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) { if c.SceneByFragment != nil { s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig) @@ -403,9 +421,8 @@ func (c config) ScrapeSceneByScene(scene *models.Scene, txnManager models.Transa } func (c config) ScrapeSceneByFragment(scene models.ScrapedSceneInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) { - if c.SceneByFragment != nil { - // TODO - this should be sceneByQueryFragment - s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig) + if c.SceneByQueryFragment != nil { + s := getScraper(*c.SceneByQueryFragment, txnManager, c, globalConfig) return s.scrapeSceneByFragment(scene) } @@ -431,7 +448,7 @@ func (c config) ScrapeSceneURL(url string, txnManager models.TransactionManager, } func (c config) ScrapeGalleryByGallery(gallery *models.Gallery, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) { - if c.SceneByFragment != nil { + if c.GalleryByFragment != nil { s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig) return s.scrapeGalleryByGallery(gallery) } diff --git a/pkg/scraper/json.go b/pkg/scraper/json.go index e5786761a..3494f451c 100644 --- a/pkg/scraper/json.go +++ b/pkg/scraper/json.go @@ -143,6 +143,31 @@ func (s *jsonScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedP return nil, errors.New("scrapePerformerByFragment not supported for json scraper") } +func (s *jsonScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) { + scraper := s.getJsonScraper() + + if scraper == nil { + return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + } + + const placeholder = "{}" + + // replace the placeholder string with the URL-escaped name + escapedName := url.QueryEscape(name) + + url := s.scraper.QueryURL + url = strings.Replace(url, placeholder, escapedName, -1) + + doc, err := s.loadURL(url) + + if err != nil { + return nil, err + } + + q := s.getJsonQuery(doc) + return scraper.scrapeScenes(q) +} + func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) { // construct the URL queryURL := queryURLParametersFromScene(scene) @@ -168,7 +193,27 @@ func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedSc } func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) { - return nil, errors.New("scrapeSceneByFragment not supported for json scraper") + // construct the URL + queryURL := queryURLParametersFromScrapedScene(scene) + if s.scraper.QueryURLReplacements != nil { + queryURL.applyReplacements(s.scraper.QueryURLReplacements) + } + url := queryURL.constructURL(s.scraper.QueryURL) + + scraper := s.getJsonScraper() + + if scraper == nil { + return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + } + + doc, err := s.loadURL(url) + + if err != nil { + return nil, err + } + + q := s.getJsonQuery(doc) + return scraper.scrapeScene(q) } func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) { diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index ca5c7b1b8..80d854341 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -789,6 +789,100 @@ func (s mappedScraper) scrapePerformers(q mappedQuery) ([]*models.ScrapedPerform return ret, nil } +func (s mappedScraper) processScene(q mappedQuery, r mappedResult) *models.ScrapedScene { + var ret models.ScrapedScene + + sceneScraperConfig := s.Scene + + scenePerformersMap := sceneScraperConfig.Performers + sceneTagsMap := sceneScraperConfig.Tags + sceneStudioMap := sceneScraperConfig.Studio + sceneMoviesMap := sceneScraperConfig.Movies + + scenePerformerTagsMap := scenePerformersMap.Tags + + r.apply(&ret) + + // process performer tags once + var performerTagResults mappedResults + if scenePerformerTagsMap != nil { + performerTagResults = scenePerformerTagsMap.process(q, s.Common) + } + + // now apply the performers and tags + if scenePerformersMap.mappedConfig != nil { + logger.Debug(`Processing scene performers:`) + performerResults := scenePerformersMap.process(q, s.Common) + + for _, p := range performerResults { + performer := &models.ScrapedPerformer{} + p.apply(performer) + + for _, p := range performerTagResults { + tag := &models.ScrapedTag{} + p.apply(tag) + ret.Tags = append(ret.Tags, tag) + } + + ret.Performers = append(ret.Performers, performer) + } + } + + if sceneTagsMap != nil { + logger.Debug(`Processing scene tags:`) + tagResults := sceneTagsMap.process(q, s.Common) + + for _, p := range tagResults { + tag := &models.ScrapedTag{} + p.apply(tag) + ret.Tags = append(ret.Tags, tag) + } + } + + if sceneStudioMap != nil { + logger.Debug(`Processing scene studio:`) + studioResults := sceneStudioMap.process(q, s.Common) + + if len(studioResults) > 0 { + studio := &models.ScrapedStudio{} + studioResults[0].apply(studio) + ret.Studio = studio + } + } + + if sceneMoviesMap != nil { + logger.Debug(`Processing scene movies:`) + movieResults := sceneMoviesMap.process(q, s.Common) + + for _, p := range movieResults { + movie := &models.ScrapedMovie{} + p.apply(movie) + ret.Movies = append(ret.Movies, movie) + } + } + + return &ret +} + +func (s mappedScraper) scrapeScenes(q mappedQuery) ([]*models.ScrapedScene, error) { + var ret []*models.ScrapedScene + + sceneScraperConfig := s.Scene + sceneMap := sceneScraperConfig.mappedConfig + if sceneMap == nil { + return nil, nil + } + + logger.Debug(`Processing scenes:`) + results := sceneMap.process(q, s.Common) + for _, r := range results { + logger.Debug(`Processing scene:`) + ret = append(ret, s.processScene(q, r)) + } + + return ret, nil +} + func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error) { var ret models.ScrapedScene @@ -798,76 +892,11 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error) return nil, nil } - scenePerformersMap := sceneScraperConfig.Performers - sceneTagsMap := sceneScraperConfig.Tags - sceneStudioMap := sceneScraperConfig.Studio - sceneMoviesMap := sceneScraperConfig.Movies - - scenePerformerTagsMap := scenePerformersMap.Tags - logger.Debug(`Processing scene:`) results := sceneMap.process(q, s.Common) if len(results) > 0 { - results[0].apply(&ret) - - // process performer tags once - var performerTagResults mappedResults - if scenePerformerTagsMap != nil { - performerTagResults = scenePerformerTagsMap.process(q, s.Common) - } - - // now apply the performers and tags - if scenePerformersMap.mappedConfig != nil { - logger.Debug(`Processing scene performers:`) - performerResults := scenePerformersMap.process(q, s.Common) - - for _, p := range performerResults { - performer := &models.ScrapedPerformer{} - p.apply(performer) - - for _, p := range performerTagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } - - ret.Performers = append(ret.Performers, performer) - } - } - - if sceneTagsMap != nil { - logger.Debug(`Processing scene tags:`) - tagResults := sceneTagsMap.process(q, s.Common) - - for _, p := range tagResults { - tag := &models.ScrapedTag{} - p.apply(tag) - ret.Tags = append(ret.Tags, tag) - } - } - - if sceneStudioMap != nil { - logger.Debug(`Processing scene studio:`) - studioResults := sceneStudioMap.process(q, s.Common) - - if len(studioResults) > 0 { - studio := &models.ScrapedStudio{} - studioResults[0].apply(studio) - ret.Studio = studio - } - } - - if sceneMoviesMap != nil { - logger.Debug(`Processing scene movies:`) - movieResults := sceneMoviesMap.process(q, s.Common) - - for _, p := range movieResults { - movie := &models.ScrapedMovie{} - p.apply(movie) - ret.Movies = append(ret.Movies, movie) - } - - } + ss := s.processScene(q, results[0]) + ret = *ss } return &ret, nil diff --git a/pkg/scraper/query_url.go b/pkg/scraper/query_url.go index 462069d2f..b48b2b794 100644 --- a/pkg/scraper/query_url.go +++ b/pkg/scraper/query_url.go @@ -21,6 +21,23 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters { return ret } +func queryURLParametersFromScrapedScene(scene models.ScrapedSceneInput) queryURLParameters { + ret := make(queryURLParameters) + + setField := func(field string, value *string) { + if value != nil { + ret[field] = *value + } + } + + setField("title", scene.Title) + setField("url", scene.URL) + setField("date", scene.Date) + setField("details", scene.Details) + setField("remote_site_id", scene.RemoteSiteID) + return ret +} + func queryURLParameterFromURL(url string) queryURLParameters { ret := make(queryURLParameters) ret["url"] = url diff --git a/pkg/scraper/scrapers.go b/pkg/scraper/scrapers.go index 41b57d2cf..d039a59a7 100644 --- a/pkg/scraper/scrapers.go +++ b/pkg/scraper/scrapers.go @@ -3,7 +3,6 @@ package scraper import ( "context" "errors" - "fmt" "os" "path/filepath" "regexp" @@ -366,30 +365,43 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error { func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, error) { // find scraper with the provided id s := c.findScraper(scraperID) - if s == nil { - return nil, fmt.Errorf("scraper with ID %s not found", scraperID) - } - - // get scene from id - scene, err := getScene(sceneID, c.txnManager) - if err != nil { - return nil, err - } - - ret, err := s.ScrapeSceneByScene(scene, c.txnManager, c.globalConfig) - - if err != nil { - return nil, err - } - - if ret != nil { - err = c.postScrapeScene(ret) + if s != nil { + // get scene from id + scene, err := getScene(sceneID, c.txnManager) if err != nil { return nil, err } + + ret, err := s.ScrapeSceneByScene(scene, c.txnManager, c.globalConfig) + + if err != nil { + return nil, err + } + + if ret != nil { + err = c.postScrapeScene(ret) + if err != nil { + return nil, err + } + } + + return ret, nil } - return ret, nil + return nil, errors.New("Scraper with ID " + scraperID + " not found") +} + +// ScrapeSceneQuery uses the scraper with the provided ID to query for +// scenes using the provided query string. It returns a list of +// scraped scene data. +func (c Cache) ScrapeSceneQuery(scraperID string, query string) ([]*models.ScrapedScene, error) { + // find scraper with the provided id + s := c.findScraper(scraperID) + if s != nil { + return s.ScrapeSceneQuery(query, c.txnManager, c.globalConfig) + } + + return nil, errors.New("Scraper with ID " + scraperID + " not found") } // ScrapeSceneFragment uses the scraper with the provided ID to scrape a scene. diff --git a/pkg/scraper/script.go b/pkg/scraper/script.go index 6b47dd6ef..e3aa5f81a 100644 --- a/pkg/scraper/script.go +++ b/pkg/scraper/script.go @@ -148,6 +148,24 @@ func (s *scriptScraper) scrapeSceneByScene(scene *models.Scene) (*models.Scraped return &ret, err } +func (s *scriptScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) { + inString := `{"name": "` + name + `"}` + + var scenes []models.ScrapedScene + + err := s.runScraperScript(inString, &scenes) + + // convert to pointers + var ret []*models.ScrapedScene + if err == nil { + for i := 0; i < len(scenes); i++ { + ret = append(ret, &scenes[i]) + } + } + + return ret, err +} + func (s *scriptScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) { inString, err := json.Marshal(scene) diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index 539057196..7023a6d3a 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -151,6 +151,62 @@ type scrapedStudioStash struct { URL *string `graphql:"url" json:"url"` } +type stashFindSceneNamesResultType struct { + Count int `graphql:"count"` + Scenes []*scrapedSceneStash `graphql:"scenes"` +} + +func (s *stashScraper) scrapedStashSceneToScrapedScene(scene *scrapedSceneStash) (*models.ScrapedScene, error) { + ret := models.ScrapedScene{} + err := copier.Copy(&ret, scene) + if err != nil { + return nil, err + } + + // get the performer image directly + ret.Image, err = getStashSceneImage(s.config.StashServer.URL, scene.ID, s.globalConfig) + if err != nil { + return nil, err + } + + return &ret, nil +} + +func (s *stashScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) { + client := s.getStashClient() + + var q struct { + FindScenes stashFindSceneNamesResultType `graphql:"findScenes(filter: $f)"` + } + + page := 1 + perPage := 10 + + vars := map[string]interface{}{ + "f": models.FindFilterType{ + Q: &name, + Page: &page, + PerPage: &perPage, + }, + } + + err := client.Query(context.Background(), &q, vars) + if err != nil { + return nil, err + } + + var ret []*models.ScrapedScene + for _, scene := range q.FindScenes.Scenes { + converted, err := s.scrapedStashSceneToScrapedScene(scene) + if err != nil { + return nil, err + } + ret = append(ret, converted) + } + + return ret, nil +} + type scrapedSceneStash struct { ID string `graphql:"id" json:"id"` Title *string `graphql:"title" json:"title"` @@ -189,19 +245,18 @@ func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS } // need to copy back to a scraped scene - ret := models.ScrapedScene{} - if err := copier.Copy(&ret, q.FindScene); err != nil { + ret, err := s.scrapedStashSceneToScrapedScene(q.FindScene) + if err != nil { return nil, err } // get the performer image directly - var err error ret.Image, err = getStashSceneImage(s.config.StashServer.URL, q.FindScene.ID, s.globalConfig) if err != nil { return nil, err } - return &ret, nil + return ret, nil } func (s *stashScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) { diff --git a/pkg/scraper/url.go b/pkg/scraper/url.go index d2adaa81b..8f5ede759 100644 --- a/pkg/scraper/url.go +++ b/pkg/scraper/url.go @@ -109,7 +109,7 @@ func loadURL(url string, scraperConfig config, globalConfig GlobalConfig) (io.Re func urlFromCDP(url string, driverOptions scraperDriverOptions, globalConfig GlobalConfig) (io.Reader, error) { if !driverOptions.UseCDP { - return nil, fmt.Errorf("URL shouldn't be fetched through CDP") + return nil, fmt.Errorf("url shouldn't be fetched through CDP") } sleepDuration := scrapeDefaultSleep diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index 71ab74a8d..270b9d381 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -124,6 +124,31 @@ func (s *xpathScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped return nil, errors.New("scrapePerformerByFragment not supported for xpath scraper") } +func (s *xpathScraper) scrapeScenesByName(name string) ([]*models.ScrapedScene, error) { + scraper := s.getXpathScraper() + + if scraper == nil { + return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + } + + const placeholder = "{}" + + // replace the placeholder string with the URL-escaped name + escapedName := url.QueryEscape(name) + + url := s.scraper.QueryURL + url = strings.Replace(url, placeholder, escapedName, -1) + + doc, err := s.loadURL(url) + + if err != nil { + return nil, err + } + + q := s.getXPathQuery(doc) + return scraper.scrapeScenes(q) +} + func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) { // construct the URL queryURL := queryURLParametersFromScene(scene) @@ -149,7 +174,27 @@ func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedS } func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) { - return nil, errors.New("scrapeSceneByFragment not supported for xpath scraper") + // construct the URL + queryURL := queryURLParametersFromScrapedScene(scene) + if s.scraper.QueryURLReplacements != nil { + queryURL.applyReplacements(s.scraper.QueryURLReplacements) + } + url := queryURL.constructURL(s.scraper.QueryURL) + + scraper := s.getXpathScraper() + + if scraper == nil { + return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + } + + doc, err := s.loadURL(url) + + if err != nil { + return nil, err + } + + q := s.getXPathQuery(doc) + return scraper.scrapeScene(q) } func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) { diff --git a/ui/v2.5/src/components/Changelog/versions/v0100.md b/ui/v2.5/src/components/Changelog/versions/v0100.md index 9d86487e8..2e02f9b90 100644 --- a/ui/v2.5/src/components/Changelog/versions/v0100.md +++ b/ui/v2.5/src/components/Changelog/versions/v0100.md @@ -1,7 +1,8 @@ ### ✨ New Features +* Added support for querying scene scrapers using keywords. ([#1712](https://github.com/stashapp/stash/pull/1712)) * Added support for Studio aliases. ([#1660](https://github.com/stashapp/stash/pull/1660)) * Added support for Tag hierarchies. ([#1519](https://github.com/stashapp/stash/pull/1519)) -* Added native support for Apple Silicon / M1 Macs. ([#1646] https://github.com/stashapp/stash/pull/1646) +* Added native support for Apple Silicon / M1 Macs. ([#1646](https://github.com/stashapp/stash/pull/1646)) * Added Movies to Scene bulk edit dialog. ([#1676](https://github.com/stashapp/stash/pull/1676)) * Added Movies tab to Studio and Performer pages. ([#1675](https://github.com/stashapp/stash/pull/1675)) * Support filtering Movies by Performers. ([#1675](https://github.com/stashapp/stash/pull/1675)) diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx index c08d5060e..02c4f3429 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx @@ -7,6 +7,7 @@ import { Form, Col, Row, + ButtonGroup, } from "react-bootstrap"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; @@ -18,7 +19,7 @@ import { useSceneUpdate, mutateReloadScrapers, useConfiguration, - queryStashBoxScene, + queryScrapeSceneQueryFragment, } from "src/core/StashService"; import { PerformerSelect, @@ -37,6 +38,7 @@ import { Prompt } from "react-router"; import { SceneMovieTable } from "./SceneMovieTable"; import { RatingStars } from "./RatingStars"; import { SceneScrapeDialog } from "./SceneScrapeDialog"; +import { SceneQueryModal } from "./SceneQueryModal"; interface IProps { scene: GQL.SceneDataFragment; @@ -60,8 +62,14 @@ export const SceneEditPanel: React.FC = ({ ); const Scrapers = useListSceneScrapers(); + const [fragmentScrapers, setFragmentScrapers] = useState([]); const [queryableScrapers, setQueryableScrapers] = useState([]); + const [scraper, setScraper] = useState(); + const [ + isScraperQueryModalOpen, + setIsScraperQueryModalOpen, + ] = useState(false); const [scrapedScene, setScrapedScene] = useState(); const [coverImagePreview, setCoverImagePreview] = useState< @@ -181,12 +189,16 @@ export const SceneEditPanel: React.FC = ({ }); useEffect(() => { - const newQueryableScrapers = ( - Scrapers?.data?.listSceneScrapers ?? [] - ).filter((s) => + const toFilter = Scrapers?.data?.listSceneScrapers ?? []; + + const newFragmentScrapers = toFilter.filter((s) => s.scene?.supported_scrapes.includes(GQL.ScrapeType.Fragment) ); + const newQueryableScrapers = toFilter.filter((s) => + s.scene?.supported_scrapes.includes(GQL.ScrapeType.Name) + ); + setFragmentScrapers(newFragmentScrapers); setQueryableScrapers(newQueryableScrapers); }, [Scrapers, stashConfig]); @@ -273,32 +285,10 @@ export const SceneEditPanel: React.FC = ({ ImageUtils.onImageChange(event, onImageLoad); } - async function onScrapeStashBoxClicked(stashBoxIndex: number) { + async function onScrapeClicked(s: GQL.ScraperSourceInput) { setIsLoading(true); try { - const result = await queryStashBoxScene(stashBoxIndex, scene.id); - if (!result.data || !result.data.scrapeSingleScene) { - return; - } - - if (result.data.scrapeSingleScene.length > 0) { - setScrapedScene(result.data.scrapeSingleScene[0]); - } else { - Toast.success({ - content: "No scenes found", - }); - } - } catch (e) { - Toast.error(e); - } finally { - setIsLoading(false); - } - } - - async function onScrapeClicked(scraper: GQL.Scraper) { - setIsLoading(true); - try { - const result = await queryScrapeScene(scraper.id, scene.id); + const result = await queryScrapeScene(s, scene.id); if (!result.data || !result.data.scrapeSingleScene?.length) { Toast.success({ content: "No scenes found", @@ -314,6 +304,41 @@ export const SceneEditPanel: React.FC = ({ } } + async function scrapeFromQuery( + s: GQL.ScraperSourceInput, + fragment: GQL.ScrapedSceneDataFragment + ) { + setIsLoading(true); + try { + const input: GQL.ScrapedSceneInput = { + date: fragment.date, + details: fragment.details, + remote_site_id: fragment.remote_site_id, + title: fragment.title, + url: fragment.url, + }; + + const result = await queryScrapeSceneQueryFragment(s, input); + if (!result.data || !result.data.scrapeSingleScene?.length) { + Toast.success({ + content: "No scenes found", + }); + return; + } + // assume one returned scene + setScrapedScene(result.data.scrapeSingleScene[0]); + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + + function onScrapeQueryClicked(s: GQL.ScraperSourceInput) { + setScraper(s); + setIsScraperQueryModalOpen(true); + } + async function onReloadScrapers() { setIsLoading(true); try { @@ -354,10 +379,79 @@ export const SceneEditPanel: React.FC = ({ ); } + function renderScrapeQueryMenu() { + const stashBoxes = stashConfig.data?.configuration.general.stashBoxes ?? []; + + if (stashBoxes.length === 0 && queryableScrapers.length === 0) return; + + return ( + + + + + + + {stashBoxes.map((s, index) => ( + onScrapeQueryClicked({ stash_box_index: index })} + > + {s.name ?? "Stash-Box"} + + ))} + {queryableScrapers.map((s) => ( + onScrapeQueryClicked({ scraper_id: s.id })} + > + {s.name} + + ))} + onReloadScrapers()}> + + + + + + + + + + ); + } + + function onSceneSelected(s: GQL.ScrapedSceneDataFragment) { + if (!scraper) return; + + if (scraper?.stash_box_index !== undefined) { + // must be stash-box - assume full scene + setScrapedScene(s); + } else { + // must be scraper + scrapeFromQuery(scraper, s); + } + } + + const renderScrapeQueryModal = () => { + if (!isScraperQueryModalOpen || !scraper) return; + + return ( + setScraper(undefined)} + onSelectScene={(s) => { + setIsScraperQueryModalOpen(false); + setScraper(undefined); + onSceneSelected(s); + }} + name={formik.values.title || ""} + /> + ); + }; + function renderScraperMenu() { const stashBoxes = stashConfig.data?.configuration.general.stashBoxes ?? []; - // TODO - change name based on stashbox configuration return ( = ({ {stashBoxes.map((s, index) => ( onScrapeStashBoxClicked(index)} + onClick={() => onScrapeClicked({ stash_box_index: index })} > {s.name ?? "Stash-Box"} ))} - {queryableScrapers.map((s) => ( - onScrapeClicked(s)}> + {fragmentScrapers.map((s) => ( + onScrapeClicked({ scraper_id: s.id })} + > {s.name} ))} @@ -389,44 +486,6 @@ export const SceneEditPanel: React.FC = ({ ); } - function maybeRenderStashboxQueryButton() { - // const stashBoxes = stashConfig.data?.configuration.general.stashBoxes ?? []; - // if (stashBoxes.length === 0) { - // return; - // } - // TODO - hide this button for now, with the view to add it when we get - // the query dialog going - // if (stashBoxes.length === 1) { - // return ( - // - // ); - // } - // // TODO - change name based on stashbox configuration - // return ( - // - // - // - // - // - // {stashBoxes.map((s, index) => ( - // onStashBoxQueryClicked(index)} - // > - // stash-box - // - // ))} - // - // - // ); - } - function urlScrapable(scrapedUrl: string): boolean { return (Scrapers?.data?.listSceneScrapers ?? []).some((s) => (s?.scene?.urls ?? []).some((u) => scrapedUrl.includes(u)) @@ -556,10 +615,11 @@ export const SceneEditPanel: React.FC = ({ message={intl.formatMessage({ id: "dialogs.unsaved_changes" })} /> + {renderScrapeQueryModal()} {maybeRenderScrapeDialog()}
-
+
- - {maybeRenderStashboxQueryButton()} - {renderScraperMenu()} - +
+ + {renderScraperMenu()} + {renderScrapeQueryMenu()} + +
diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneQueryModal.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneQueryModal.tsx new file mode 100644 index 000000000..723c9644f --- /dev/null +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneQueryModal.tsx @@ -0,0 +1,229 @@ +import React, { useCallback, useEffect, useRef, useState } from "react"; +import { Badge, Button, Col, Form, InputGroup, Row } from "react-bootstrap"; +import { FormattedMessage, useIntl } from "react-intl"; + +import * as GQL from "src/core/generated-graphql"; +import { + Modal, + LoadingIndicator, + TruncatedText, + Icon, +} from "src/components/Shared"; +import { queryScrapeSceneQuery } from "src/core/StashService"; +import { useToast } from "src/hooks"; + +interface ISceneSearchResultDetailsProps { + scene: GQL.ScrapedSceneDataFragment; +} + +const SceneSearchResultDetails: React.FC = ({ + scene, +}) => { + function renderPerformers() { + if (scene.performers) { + return ( + + + {scene.performers?.map((performer) => ( + + {performer.name} + + ))} + + + ); + } + } + + function renderTags() { + if (scene.tags) { + return ( + + + {scene.tags?.map((tag) => ( + + {tag.name} + + ))} + + + ); + } + } + + function renderImage() { + if (scene.image) { + return ( +
+ +
+ ); + } + } + + return ( +
+ + {renderImage()} +
+

{scene.title}

+
+ {scene.studio?.name} + {scene.studio?.name && scene.date && ` • `} + {scene.date} +
+
+
+ + + + + + {renderPerformers()} + {renderTags()} +
+ ); +}; + +export interface ISceneSearchResult { + scene: GQL.ScrapedSceneDataFragment; +} + +export const SceneSearchResult: React.FC = ({ scene }) => { + return ( +
+
+ +
+
+ ); +}; + +interface IProps { + scraper: GQL.ScraperSourceInput; + onHide: () => void; + onSelectScene: (scene: GQL.ScrapedSceneDataFragment) => void; + name?: string; +} +export const SceneQueryModal: React.FC = ({ + scraper, + name, + onHide, + onSelectScene, +}) => { + const CLASSNAME = "SceneScrapeModal"; + const CLASSNAME_LIST = `${CLASSNAME}-list`; + const CLASSNAME_LIST_CONTAINER = `${CLASSNAME_LIST}-container`; + + const intl = useIntl(); + const Toast = useToast(); + + const inputRef = useRef(null); + const [loading, setLoading] = useState(false); + const [scenes, setScenes] = useState(); + const [error, setError] = useState(); + + const doQuery = useCallback( + async (input: string) => { + if (!input) return; + + setLoading(true); + try { + const r = await queryScrapeSceneQuery(scraper, input); + setScenes(r.data.scrapeSingleScene); + } catch (err) { + setError(err); + } finally { + setLoading(false); + } + }, + [scraper] + ); + + useEffect(() => inputRef.current?.focus(), []); + useEffect(() => { + if (error) { + Toast.error(error); + setError(undefined); + } + }, [error, Toast]); + + function renderResults() { + if (!scenes) { + return; + } + + return ( +
+
+ +
+
    + {scenes.map((s, i) => ( + // eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-noninteractive-element-interactions, react/no-array-index-key +
  • onSelectScene(s)}> + +
  • + ))} +
+
+ ); + } + + return ( + +
+ + ) => + e.key === "Enter" && doQuery(inputRef.current?.value ?? "") + } + /> + + + + + + {loading ? ( +
+ +
+ ) : ( + renderResults() + )} +
+
+ ); +}; diff --git a/ui/v2.5/src/components/Scenes/styles.scss b/ui/v2.5/src/components/Scenes/styles.scss index b3294d85e..c8bcc4979 100644 --- a/ui/v2.5/src/components/Scenes/styles.scss +++ b/ui/v2.5/src/components/Scenes/styles.scss @@ -583,3 +583,31 @@ input[type="range"].blue-slider { background-color: $secondary; } } + +.scrape-query-dialog { + max-height: calc(100vh - 10rem); +} + +.scraper-group { + & > .dropdown:not(:last-child) .btn { + border-bottom-right-radius: 0; + border-top-right-radius: 0; + } + + & > .dropdown:not(:first-child) .btn { + border-bottom-left-radius: 0; + border-top-left-radius: 0; + } +} + +.SceneScrapeModal-list { + list-style: none; + max-height: 50vh; + overflow-x: hidden; + overflow-y: auto; + padding-inline-start: 0; + + li { + cursor: pointer; + } +} diff --git a/ui/v2.5/src/core/StashService.ts b/ui/v2.5/src/core/StashService.ts index a1bc343dd..2a2e4abd4 100644 --- a/ui/v2.5/src/core/StashService.ts +++ b/ui/v2.5/src/core/StashService.ts @@ -836,6 +836,21 @@ export const queryScrapePerformerURL = (url: string) => fetchPolicy: "network-only", }); +export const queryScrapeSceneQuery = ( + source: GQL.ScraperSourceInput, + q: string +) => + client.query({ + query: GQL.ScrapeSingleSceneDocument, + variables: { + source, + input: { + query: q, + }, + }, + fetchPolicy: "network-only", + }); + export const queryScrapeSceneURL = (url: string) => client.query({ query: GQL.ScrapeSceneUrlDocument, @@ -863,13 +878,14 @@ export const queryScrapeMovieURL = (url: string) => fetchPolicy: "network-only", }); -export const queryScrapeScene = (scraperId: string, sceneId: string) => +export const queryScrapeScene = ( + source: GQL.ScraperSourceInput, + sceneId: string +) => client.query({ query: GQL.ScrapeSingleSceneDocument, variables: { - source: { - scraper_id: scraperId, - }, + source, input: { scene_id: sceneId, }, @@ -888,22 +904,22 @@ export const queryStashBoxScene = (stashBoxIndex: number, sceneID: string) => scene_id: sceneID, }, }, + fetchPolicy: "network-only", }); -export const queryStashBoxPerformer = ( - stashBoxIndex: number, - performerID: string +export const queryScrapeSceneQueryFragment = ( + source: GQL.ScraperSourceInput, + input: GQL.ScrapedSceneInput ) => - client.query({ - query: GQL.ScrapeSinglePerformerDocument, + client.query({ + query: GQL.ScrapeSingleSceneDocument, variables: { - source: { - stash_box_index: stashBoxIndex, - }, + source, input: { - performer_id: performerID, + scene_input: input, }, }, + fetchPolicy: "network-only", }); export const queryScrapeGallery = (scraperId: string, galleryId: string) => diff --git a/ui/v2.5/src/docs/en/Scraping.md b/ui/v2.5/src/docs/en/Scraping.md index 9dd8374a9..9f072a3ce 100644 --- a/ui/v2.5/src/docs/en/Scraping.md +++ b/ui/v2.5/src/docs/en/Scraping.md @@ -40,6 +40,10 @@ performerByFragment: performerByURL: +sceneByName: + +sceneByQueryFragment: + sceneByFragment: sceneByURL: @@ -63,6 +67,7 @@ The scraping types and their required fields are outlined in the following table |-----------|------------------------| | Scraper in `Scrape...` dropdown button in Performer Edit page | Valid `performerByName` and `performerByFragment` configurations. | | Scrape performer from URL | Valid `performerByURL` configuration with matching URL. | +| Scraper in query dropdown button in Scene Edit page | Valid `sceneByName` and `sceneByQueryFragment` configurations. | | Scraper in `Scrape...` dropdown button in Scene Edit page | Valid `sceneByFragment` configuration. | | Scrape scene from URL | Valid `sceneByURL` configuration with matching URL. | | Scrape movie from URL | Valid `movieByURL` configuration with matching URL. | @@ -97,7 +102,8 @@ The script is sent input and expects output based on the scraping type, as detai | `performerByName` | `{"name": ""}` | Array of JSON-encoded performer fragments (including at least `name`) | | `performerByFragment` | JSON-encoded performer fragment | JSON-encoded performer fragment | | `performerByURL` | `{"url": ""}` | JSON-encoded performer fragment | -| `sceneByFragment` | JSON-encoded scene fragment | JSON-encoded scene fragment | +| `sceneByName` | `{"name": ""}` | Array of JSON-encoded scene fragments | +| `sceneByQueryFragment`, `sceneByFragment` | JSON-encoded scene fragment | JSON-encoded scene fragment | | `sceneByURL` | `{"url": ""}` | JSON-encoded scene fragment | | `movieByURL` | `{"url": ""}` | JSON-encoded movie fragment | | `galleryByFragment` | JSON-encoded gallery fragment | JSON-encoded gallery fragment | @@ -217,9 +223,9 @@ xPathScrapers: # ... performer scraper details ... ``` -### scrapeXPath and scrapeJson use with `sceneByFragment` +### scrapeXPath and scrapeJson use with `sceneByFragment` and `sceneByQueryFragment` -For `sceneByFragment`, the `queryURL` field must also be present. This field is used to build a query URL for scenes. For `sceneByFragment`, the `queryURL` field supports the following placeholder fields: +For `sceneByFragment` and `sceneByQueryFragment`, the `queryURL` field must also be present. This field is used to build a query URL for scenes. For `sceneByFragment`, the `queryURL` field supports the following placeholder fields: * `{checksum}` - the MD5 checksum of the scene * `{oshash}` - the oshash of the scene * `{filename}` - the base filename of the scene diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index 0251ff1be..64d57468c 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -61,6 +61,7 @@ "save_filter": "Save filter", "scan": "Scan", "scrape_with": "Scrape with…", + "scrape_query": "Scrape query", "search": "Search", "select_all": "Select All", "select_none": "Select None", @@ -456,6 +457,8 @@ "transcodes": "Transcodes (MP4 conversions of unsupported video formats)", "video_previews": "Previews (video previews which play when hovering over a scene)" }, + "scenes_found": "{count} scenes found", + "scrape_entity_query": "{entity_type} Scrape Query", "scrape_entity_title": "{entity_type} Scrape Results", "scrape_results_existing": "Existing", "scrape_results_scraped": "Scraped",