Unify scrape refactor (#1630)

* Unify scraped types
* Make name fields optional
* Unify single scrape queries
* Change UI to use new interfaces
* Add multi scrape interfaces
* Use images instead of image
This commit is contained in:
WithoutPants 2021-09-07 11:54:22 +10:00 committed by GitHub
parent 04e146f290
commit 4625e1f955
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 1035 additions and 781 deletions

View file

@ -34,24 +34,8 @@ models:
model: github.com/stashapp/stash/pkg/models.Movie model: github.com/stashapp/stash/pkg/models.Movie
Tag: Tag:
model: github.com/stashapp/stash/pkg/models.Tag model: github.com/stashapp/stash/pkg/models.Tag
ScrapedPerformer:
model: github.com/stashapp/stash/pkg/models.ScrapedPerformer
ScrapedScene:
model: github.com/stashapp/stash/pkg/models.ScrapedScene
ScrapedScenePerformer:
model: github.com/stashapp/stash/pkg/models.ScrapedScenePerformer
ScrapedSceneStudio:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneStudio
ScrapedSceneMovie:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneMovie
ScrapedSceneTag:
model: github.com/stashapp/stash/pkg/models.ScrapedSceneTag
SceneFileType: SceneFileType:
model: github.com/stashapp/stash/pkg/models.SceneFileType model: github.com/stashapp/stash/pkg/models.SceneFileType
ScrapedMovie:
model: github.com/stashapp/stash/pkg/models.ScrapedMovie
ScrapedMovieStudio:
model: github.com/stashapp/stash/pkg/models.ScrapedMovieStudio
SavedFilter: SavedFilter:
model: github.com/stashapp/stash/pkg/models.SavedFilter model: github.com/stashapp/stash/pkg/models.SavedFilter
StashID: StashID:

View file

@ -1,4 +1,5 @@
fragment ScrapedPerformerData on ScrapedPerformer { fragment ScrapedPerformerData on ScrapedPerformer {
stored_id
name name
gender gender
url url
@ -18,7 +19,7 @@ fragment ScrapedPerformerData on ScrapedPerformer {
tags { tags {
...ScrapedSceneTagData ...ScrapedSceneTagData
} }
image images
details details
death_date death_date
hair_color hair_color
@ -26,7 +27,7 @@ fragment ScrapedPerformerData on ScrapedPerformer {
remote_site_id remote_site_id
} }
fragment ScrapedScenePerformerData on ScrapedScenePerformer { fragment ScrapedScenePerformerData on ScrapedPerformer {
stored_id stored_id
name name
gender gender
@ -55,8 +56,8 @@ fragment ScrapedScenePerformerData on ScrapedScenePerformer {
weight weight
} }
fragment ScrapedMovieStudioData on ScrapedMovieStudio { fragment ScrapedMovieStudioData on ScrapedStudio {
id stored_id
name name
url url
} }
@ -78,7 +79,7 @@ fragment ScrapedMovieData on ScrapedMovie {
} }
} }
fragment ScrapedSceneMovieData on ScrapedSceneMovie { fragment ScrapedSceneMovieData on ScrapedMovie {
stored_id stored_id
name name
aliases aliases
@ -90,14 +91,14 @@ fragment ScrapedSceneMovieData on ScrapedSceneMovie {
synopsis synopsis
} }
fragment ScrapedSceneStudioData on ScrapedSceneStudio { fragment ScrapedSceneStudioData on ScrapedStudio {
stored_id stored_id
name name
url url
remote_site_id remote_site_id
} }
fragment ScrapedSceneTagData on ScrapedSceneTag { fragment ScrapedSceneTagData on ScrapedTag {
stored_id stored_id
name name
} }
@ -108,6 +109,7 @@ fragment ScrapedSceneData on ScrapedScene {
url url
date date
image image
remote_site_id
file { file {
size size
@ -135,6 +137,12 @@ fragment ScrapedSceneData on ScrapedScene {
movies { movies {
...ScrapedSceneMovieData ...ScrapedSceneMovieData
} }
fingerprints {
hash
algorithm
duration
}
} }
fragment ScrapedGalleryData on ScrapedGallery { fragment ScrapedGalleryData on ScrapedGallery {

View file

@ -42,14 +42,14 @@ query ListMovieScrapers {
} }
} }
query ScrapePerformerList($scraper_id: ID!, $query: String!) { query ScrapeSinglePerformer($source: ScraperSourceInput!, $input: ScrapeSinglePerformerInput!) {
scrapePerformerList(scraper_id: $scraper_id, query: $query) { scrapeSinglePerformer(source: $source, input: $input) {
...ScrapedPerformerData ...ScrapedPerformerData
} }
} }
query ScrapePerformer($scraper_id: ID!, $scraped_performer: ScrapedPerformerInput!) { query ScrapeMultiPerformers($source: ScraperSourceInput!, $input: ScrapeMultiPerformersInput!) {
scrapePerformer(scraper_id: $scraper_id, scraped_performer: $scraped_performer) { scrapeMultiPerformers(source: $source, input: $input) {
...ScrapedPerformerData ...ScrapedPerformerData
} }
} }
@ -60,8 +60,14 @@ query ScrapePerformerURL($url: String!) {
} }
} }
query ScrapeScene($scraper_id: ID!, $scene: SceneUpdateInput!) { query ScrapeSingleScene($source: ScraperSourceInput!, $input: ScrapeSingleSceneInput!) {
scrapeScene(scraper_id: $scraper_id, scene: $scene) { scrapeSingleScene(source: $source, input: $input) {
...ScrapedSceneData
}
}
query ScrapeMultiScenes($source: ScraperSourceInput!, $input: ScrapeMultiScenesInput!) {
scrapeMultiScenes(source: $source, input: $input) {
...ScrapedSceneData ...ScrapedSceneData
} }
} }
@ -72,8 +78,8 @@ query ScrapeSceneURL($url: String!) {
} }
} }
query ScrapeGallery($scraper_id: ID!, $gallery: GalleryUpdateInput!) { query ScrapeSingleGallery($source: ScraperSourceInput!, $input: ScrapeSingleGalleryInput!) {
scrapeGallery(scraper_id: $scraper_id, gallery: $gallery) { scrapeSingleGallery(source: $source, input: $input) {
...ScrapedGalleryData ...ScrapedGalleryData
} }
} }
@ -89,15 +95,3 @@ query ScrapeMovieURL($url: String!) {
...ScrapedMovieData ...ScrapedMovieData
} }
} }
query QueryStashBoxScene($input: StashBoxSceneQueryInput!) {
queryStashBoxScene(input: $input) {
...ScrapedStashBoxSceneData
}
}
query QueryStashBoxPerformer($input: StashBoxPerformerQueryInput!) {
queryStashBoxPerformer(input: $input) {
...ScrapedStashBoxPerformerData
}
}

View file

@ -72,31 +72,50 @@ type Query {
listGalleryScrapers: [Scraper!]! listGalleryScrapers: [Scraper!]!
listMovieScrapers: [Scraper!]! listMovieScrapers: [Scraper!]!
"""Scrape a list of performers based on name""" """Scrape for a single scene"""
scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]! scrapeSingleScene(source: ScraperSourceInput!, input: ScrapeSingleSceneInput!): [ScrapedScene!]!
"""Scrapes a complete performer record based on a scrapePerformerList result""" """Scrape for multiple scenes"""
scrapePerformer(scraper_id: ID!, scraped_performer: ScrapedPerformerInput!): ScrapedPerformer scrapeMultiScenes(source: ScraperSourceInput!, input: ScrapeMultiScenesInput!): [[ScrapedScene!]!]!
"""Scrape for a single performer"""
scrapeSinglePerformer(source: ScraperSourceInput!, input: ScrapeSinglePerformerInput!): [ScrapedPerformer!]!
"""Scrape for multiple performers"""
scrapeMultiPerformers(source: ScraperSourceInput!, input: ScrapeMultiPerformersInput!): [[ScrapedPerformer!]!]!
"""Scrape for a single gallery"""
scrapeSingleGallery(source: ScraperSourceInput!, input: ScrapeSingleGalleryInput!): [ScrapedGallery!]!
"""Scrape for a single movie"""
scrapeSingleMovie(source: ScraperSourceInput!, input: ScrapeSingleMovieInput!): [ScrapedMovie!]!
"""Scrapes a complete performer record based on a URL""" """Scrapes a complete performer record based on a URL"""
scrapePerformerURL(url: String!): ScrapedPerformer scrapePerformerURL(url: String!): ScrapedPerformer
"""Scrapes a complete scene record based on an existing scene"""
scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene
"""Scrapes a complete performer record based on a URL""" """Scrapes a complete performer record based on a URL"""
scrapeSceneURL(url: String!): ScrapedScene scrapeSceneURL(url: String!): ScrapedScene
"""Scrapes a complete gallery record based on an existing gallery"""
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery
"""Scrapes a complete gallery record based on a URL""" """Scrapes a complete gallery record based on a URL"""
scrapeGalleryURL(url: String!): ScrapedGallery scrapeGalleryURL(url: String!): ScrapedGallery
"""Scrapes a complete movie record based on a URL""" """Scrapes a complete movie record based on a URL"""
scrapeMovieURL(url: String!): ScrapedMovie scrapeMovieURL(url: String!): ScrapedMovie
"""Scrape a list of performers based on name"""
scrapePerformerList(scraper_id: ID!, query: String!): [ScrapedPerformer!]! @deprecated(reason: "use scrapeSinglePerformer")
"""Scrapes a complete performer record based on a scrapePerformerList result"""
scrapePerformer(scraper_id: ID!, scraped_performer: ScrapedPerformerInput!): ScrapedPerformer @deprecated(reason: "use scrapeSinglePerformer")
"""Scrapes a complete scene record based on an existing scene"""
scrapeScene(scraper_id: ID!, scene: SceneUpdateInput!): ScrapedScene @deprecated(reason: "use scrapeSingleScene")
"""Scrapes a complete gallery record based on an existing gallery"""
scrapeGallery(scraper_id: ID!, gallery: GalleryUpdateInput!): ScrapedGallery @deprecated(reason: "use scrapeSingleGallery")
"""Scrape a performer using Freeones""" """Scrape a performer using Freeones"""
scrapeFreeones(performer_name: String!): ScrapedPerformer scrapeFreeones(performer_name: String!): ScrapedPerformer @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
"""Scrape a list of performers from a query""" """Scrape a list of performers from a query"""
scrapeFreeonesPerformerList(query: String!): [String!]! scrapeFreeonesPerformerList(query: String!): [String!]! @deprecated(reason: "use scrapeSinglePerformer with scraper_id = builtin_freeones")
"""Query StashBox for scenes""" """Query StashBox for scenes"""
queryStashBoxScene(input: StashBoxSceneQueryInput!): [ScrapedScene!]! queryStashBoxScene(input: StashBoxSceneQueryInput!): [ScrapedScene!]! @deprecated(reason: "use scrapeSingleScene or scrapeMultiScenes")
queryStashBoxPerformer(input: StashBoxPerformerQueryInput!): [StashBoxPerformerQueryResult!]! """Query StashBox for performers"""
queryStashBoxPerformer(input: StashBoxPerformerQueryInput!): [StashBoxPerformerQueryResult!]! @deprecated(reason: "use scrapeSinglePerformer or scrapeMultiPerformers")
# === end deprecated methods ===
# Plugins # Plugins
"""List loaded plugins""" """List loaded plugins"""

View file

@ -1,12 +1,6 @@
type ScrapedMovieStudio {
"""Set if studio matched"""
id: ID
name: String!
url: String
}
"""A movie from a scraping operation...""" """A movie from a scraping operation..."""
type ScrapedMovie { type ScrapedMovie {
stored_id: ID
name: String name: String
aliases: String aliases: String
duration: String duration: String
@ -15,7 +9,7 @@ type ScrapedMovie {
director: String director: String
url: String url: String
synopsis: String synopsis: String
studio: ScrapedMovieStudio studio: ScrapedStudio
"""This should be a base64 encoded data URL""" """This should be a base64 encoded data URL"""
front_image: String front_image: String

View file

@ -1,5 +1,7 @@
"""A performer from a scraping operation...""" """A performer from a scraping operation..."""
type ScrapedPerformer { type ScrapedPerformer {
"""Set if performer matched"""
stored_id: ID
name: String name: String
gender: String gender: String
url: String url: String
@ -16,11 +18,11 @@ type ScrapedPerformer {
tattoos: String tattoos: String
piercings: String piercings: String
aliases: String aliases: String
# Should be ScrapedPerformerTag - but would be identical types tags: [ScrapedTag!]
tags: [ScrapedSceneTag!]
"""This should be a base64 encoded data URL""" """This should be a base64 encoded data URL"""
image: String image: String @deprecated(reason: "use images instead")
images: [String!]
details: String details: String
death_date: String death_date: String
hair_color: String hair_color: String
@ -29,6 +31,8 @@ type ScrapedPerformer {
} }
input ScrapedPerformerInput { input ScrapedPerformerInput {
"""Set if performer matched"""
stored_id: ID
name: String name: String
gender: String gender: String
url: String url: String

View file

@ -26,49 +26,7 @@ type Scraper {
movie: ScraperSpec movie: ScraperSpec
} }
type ScrapedScenePerformer { type ScrapedStudio {
"""Set if performer matched"""
stored_id: ID
name: String!
gender: String
url: String
twitter: String
instagram: String
birthdate: String
ethnicity: String
country: String
eye_color: String
height: String
measurements: String
fake_tits: String
career_length: String
tattoos: String
piercings: String
aliases: String
tags: [ScrapedSceneTag!]
remote_site_id: String
images: [String!]
details: String
death_date: String
hair_color: String
weight: String
}
type ScrapedSceneMovie {
"""Set if movie matched"""
stored_id: ID
name: String!
aliases: String
duration: String
date: String
rating: String
director: String
synopsis: String
url: String
}
type ScrapedSceneStudio {
"""Set if studio matched""" """Set if studio matched"""
stored_id: ID stored_id: ID
name: String! name: String!
@ -77,7 +35,7 @@ type ScrapedSceneStudio {
remote_site_id: String remote_site_id: String
} }
type ScrapedSceneTag { type ScrapedTag {
"""Set if tag matched""" """Set if tag matched"""
stored_id: ID stored_id: ID
name: String! name: String!
@ -94,25 +52,98 @@ type ScrapedScene {
file: SceneFileType # Resolver file: SceneFileType # Resolver
studio: ScrapedSceneStudio studio: ScrapedStudio
tags: [ScrapedSceneTag!] tags: [ScrapedTag!]
performers: [ScrapedScenePerformer!] performers: [ScrapedPerformer!]
movies: [ScrapedSceneMovie!] movies: [ScrapedMovie!]
remote_site_id: String remote_site_id: String
duration: Int duration: Int
fingerprints: [StashBoxFingerprint!] fingerprints: [StashBoxFingerprint!]
} }
input ScrapedSceneInput {
title: String
details: String
url: String
date: String
# no image, file, duration or relationships
remote_site_id: String
}
type ScrapedGallery { type ScrapedGallery {
title: String title: String
details: String details: String
url: String url: String
date: String date: String
studio: ScrapedSceneStudio studio: ScrapedStudio
tags: [ScrapedSceneTag!] tags: [ScrapedTag!]
performers: [ScrapedScenePerformer!] performers: [ScrapedPerformer!]
}
input ScrapedGalleryInput {
title: String
details: String
url: String
date: String
# no studio, tags or performers
}
input ScraperSourceInput {
"""Index of the configured stash-box instance to use. Should be unset if scraper_id is set"""
stash_box_index: Int
"""Scraper ID to scrape with. Should be unset if stash_box_index is set"""
scraper_id: ID
}
input ScrapeSingleSceneInput {
"""Instructs to query by string"""
query: String
"""Instructs to query by scene fingerprints"""
scene_id: ID
"""Instructs to query by scene fragment"""
scene_input: ScrapedSceneInput
}
input ScrapeMultiScenesInput {
"""Instructs to query by scene fingerprints"""
scene_ids: [ID!]
}
input ScrapeSinglePerformerInput {
"""Instructs to query by string"""
query: String
"""Instructs to query by performer id"""
performer_id: ID
"""Instructs to query by performer fragment"""
performer_input: ScrapedPerformerInput
}
input ScrapeMultiPerformersInput {
"""Instructs to query by scene fingerprints"""
performer_ids: [ID!]
}
input ScrapeSingleGalleryInput {
"""Instructs to query by string"""
query: String
"""Instructs to query by gallery id"""
gallery_id: ID
"""Instructs to query by gallery fragment"""
gallery_input: ScrapedGalleryInput
}
input ScrapeSingleMovieInput {
"""Instructs to query by string"""
query: String
"""Instructs to query by movie id"""
movie_id: ID
"""Instructs to query by gallery fragment"""
movie_input: ScrapedMovieInput
} }
input StashBoxSceneQueryInput { input StashBoxSceneQueryInput {
@ -135,7 +166,7 @@ input StashBoxPerformerQueryInput {
type StashBoxPerformerQueryResult { type StashBoxPerformerQueryResult {
query: String! query: String!
results: [ScrapedScenePerformer!]! results: [ScrapedPerformer!]!
} }
type StashBoxFingerprint { type StashBoxFingerprint {

View file

@ -53,22 +53,6 @@ func (r *Resolver) Tag() models.TagResolver {
return &tagResolver{r} return &tagResolver{r}
} }
func (r *Resolver) ScrapedSceneTag() models.ScrapedSceneTagResolver {
return &scrapedSceneTagResolver{r}
}
func (r *Resolver) ScrapedSceneMovie() models.ScrapedSceneMovieResolver {
return &scrapedSceneMovieResolver{r}
}
func (r *Resolver) ScrapedScenePerformer() models.ScrapedScenePerformerResolver {
return &scrapedScenePerformerResolver{r}
}
func (r *Resolver) ScrapedSceneStudio() models.ScrapedSceneStudioResolver {
return &scrapedSceneStudioResolver{r}
}
type mutationResolver struct{ *Resolver } type mutationResolver struct{ *Resolver }
type queryResolver struct{ *Resolver } type queryResolver struct{ *Resolver }
type subscriptionResolver struct{ *Resolver } type subscriptionResolver struct{ *Resolver }
@ -81,10 +65,6 @@ type imageResolver struct{ *Resolver }
type studioResolver struct{ *Resolver } type studioResolver struct{ *Resolver }
type movieResolver struct{ *Resolver } type movieResolver struct{ *Resolver }
type tagResolver struct{ *Resolver } type tagResolver struct{ *Resolver }
type scrapedSceneTagResolver struct{ *Resolver }
type scrapedSceneMovieResolver struct{ *Resolver }
type scrapedScenePerformerResolver struct{ *Resolver }
type scrapedSceneStudioResolver struct{ *Resolver }
func (r *Resolver) withTxn(ctx context.Context, fn func(r models.Repository) error) error { func (r *Resolver) withTxn(ctx context.Context, fn func(r models.Repository) error) error {
return r.txnManager.WithTxn(ctx, fn) return r.txnManager.WithTxn(ctx, fn)

View file

@ -1,23 +0,0 @@
package api
import (
"context"
"github.com/stashapp/stash/pkg/models"
)
func (r *scrapedSceneTagResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneTag) (*string, error) {
return obj.ID, nil
}
func (r *scrapedSceneMovieResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneMovie) (*string, error) {
return obj.ID, nil
}
func (r *scrapedScenePerformerResolver) StoredID(ctx context.Context, obj *models.ScrapedScenePerformer) (*string, error) {
return obj.ID, nil
}
func (r *scrapedSceneStudioResolver) StoredID(ctx context.Context, obj *models.ScrapedSceneStudio) (*string, error) {
return obj.ID, nil
}

View file

@ -2,7 +2,9 @@ package api
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"strconv"
"github.com/stashapp/stash/pkg/manager" "github.com/stashapp/stash/pkg/manager"
"github.com/stashapp/stash/pkg/manager/config" "github.com/stashapp/stash/pkg/manager/config"
@ -29,8 +31,9 @@ func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query s
var ret []string var ret []string
for _, v := range scrapedPerformers { for _, v := range scrapedPerformers {
name := v.Name if v.Name != nil {
ret = append(ret, *name) ret = append(ret, *v.Name)
}
} }
return ret, nil return ret, nil
@ -69,7 +72,12 @@ func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*mo
} }
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) { func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
return manager.GetInstance().ScraperCache.ScrapeScene(scraperID, scene) id, err := strconv.Atoi(scene.ID)
if err != nil {
return nil, err
}
return manager.GetInstance().ScraperCache.ScrapeScene(scraperID, id)
} }
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) { func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
@ -77,7 +85,12 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models
} }
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) { func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
return manager.GetInstance().ScraperCache.ScrapeGallery(scraperID, gallery) id, err := strconv.Atoi(gallery.ID)
if err != nil {
return nil, err
}
return manager.GetInstance().ScraperCache.ScrapeGallery(scraperID, id)
} }
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) { func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
@ -98,7 +111,7 @@ func (r *queryResolver) QueryStashBoxScene(ctx context.Context, input models.Sta
client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager) client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.txnManager)
if len(input.SceneIds) > 0 { if len(input.SceneIds) > 0 {
return client.FindStashBoxScenesByFingerprints(input.SceneIds) return client.FindStashBoxScenesByFingerprintsFlat(input.SceneIds)
} }
if input.Q != nil { if input.Q != nil {
@ -127,3 +140,175 @@ func (r *queryResolver) QueryStashBoxPerformer(ctx context.Context, input models
return nil, nil return nil, nil
} }
func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
boxes := config.GetInstance().GetStashBoxes()
if index < 0 || index >= len(boxes) {
return nil, fmt.Errorf("invalid stash_box_index %d", index)
}
return stashbox.NewClient(*boxes[index], r.txnManager), nil
}
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) {
if source.ScraperID != nil {
var singleScene *models.ScrapedScene
var err error
if input.SceneID != nil {
var sceneID int
sceneID, err = strconv.Atoi(*input.SceneID)
if err != nil {
return nil, err
}
singleScene, err = manager.GetInstance().ScraperCache.ScrapeScene(*source.ScraperID, sceneID)
} else if input.SceneInput != nil {
singleScene, err = manager.GetInstance().ScraperCache.ScrapeSceneFragment(*source.ScraperID, *input.SceneInput)
} else {
return nil, errors.New("not implemented")
}
if err != nil {
return nil, err
}
if singleScene != nil {
return []*models.ScrapedScene{singleScene}, nil
}
return nil, nil
} else if source.StashBoxIndex != nil {
client, err := r.getStashBoxClient(*source.StashBoxIndex)
if err != nil {
return nil, err
}
if input.SceneID != nil {
return client.FindStashBoxScenesByFingerprintsFlat([]string{*input.SceneID})
} else if input.Query != nil {
return client.QueryStashBoxScene(*input.Query)
}
return nil, errors.New("scene_id or query must be set")
}
return nil, errors.New("scraper_id or stash_box_index must be set")
}
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) {
if source.ScraperID != nil {
return nil, errors.New("not implemented")
} else if source.StashBoxIndex != nil {
client, err := r.getStashBoxClient(*source.StashBoxIndex)
if err != nil {
return nil, err
}
return client.FindStashBoxScenesByFingerprints(input.SceneIds)
}
return nil, errors.New("scraper_id or stash_box_index must be set")
}
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
if source.ScraperID != nil {
if input.PerformerInput != nil {
singlePerformer, err := manager.GetInstance().ScraperCache.ScrapePerformer(*source.ScraperID, *input.PerformerInput)
if err != nil {
return nil, err
}
if singlePerformer != nil {
return []*models.ScrapedPerformer{singlePerformer}, nil
}
return nil, nil
}
if input.Query != nil {
return manager.GetInstance().ScraperCache.ScrapePerformerList(*source.ScraperID, *input.Query)
}
return nil, errors.New("not implemented")
} else if source.StashBoxIndex != nil {
client, err := r.getStashBoxClient(*source.StashBoxIndex)
if err != nil {
return nil, err
}
var ret []*models.StashBoxPerformerQueryResult
if input.PerformerID != nil {
ret, err = client.FindStashBoxPerformersByNames([]string{*input.PerformerID})
} else if input.Query != nil {
ret, err = client.QueryStashBoxPerformer(*input.Query)
} else {
return nil, errors.New("not implemented")
}
if err != nil {
return nil, err
}
if len(ret) > 0 {
return ret[0].Results, nil
}
return nil, nil
}
return nil, errors.New("scraper_id or stash_box_index must be set")
}
func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) {
if source.ScraperID != nil {
return nil, errors.New("not implemented")
} else if source.StashBoxIndex != nil {
client, err := r.getStashBoxClient(*source.StashBoxIndex)
if err != nil {
return nil, err
}
return client.FindStashBoxPerformersByPerformerNames(input.PerformerIds)
}
return nil, errors.New("scraper_id or stash_box_index must be set")
}
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) {
if source.ScraperID != nil {
var singleGallery *models.ScrapedGallery
var err error
if input.GalleryID != nil {
var galleryID int
galleryID, err = strconv.Atoi(*input.GalleryID)
if err != nil {
return nil, err
}
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGallery(*source.ScraperID, galleryID)
} else if input.GalleryInput != nil {
singleGallery, err = manager.GetInstance().ScraperCache.ScrapeGalleryFragment(*source.ScraperID, *input.GalleryInput)
} else {
return nil, errors.New("not implemented")
}
if err != nil {
return nil, err
}
if singleGallery != nil {
return []*models.ScrapedGallery{singleGallery}, nil
}
return nil, nil
} else if source.StashBoxIndex != nil {
return nil, errors.New("not supported")
}
return nil, errors.New("scraper_id must be set")
}
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {
return nil, errors.New("not supported")
}

View file

@ -40,7 +40,7 @@ func (t *StashBoxPerformerTagTask) Description() string {
} }
func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() { func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() {
var performer *models.ScrapedScenePerformer var performer *models.ScrapedPerformer
var err error var err error
client := stashbox.NewClient(*t.box, t.txnManager) client := stashbox.NewClient(*t.box, t.txnManager)
@ -132,8 +132,8 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() {
value := getNullString(performer.Measurements) value := getNullString(performer.Measurements)
partial.Measurements = &value partial.Measurements = &value
} }
if excluded["name"] { if excluded["name"] && performer.Name != nil {
value := sql.NullString{String: performer.Name, Valid: true} value := sql.NullString{String: *performer.Name, Valid: true}
partial.Name = &value partial.Name = &value
} }
if performer.Piercings != nil && !excluded["piercings"] { if performer.Piercings != nil && !excluded["piercings"] {
@ -180,17 +180,21 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() {
} }
if err == nil { if err == nil {
logger.Infof("Updated performer %s", performer.Name) var name string
if performer.Name != nil {
name = *performer.Name
}
logger.Infof("Updated performer %s", name)
} }
return err return err
}) })
} else if t.name != nil { } else if t.name != nil && performer.Name != nil {
currentTime := time.Now() currentTime := time.Now()
newPerformer := models.Performer{ newPerformer := models.Performer{
Aliases: getNullString(performer.Aliases), Aliases: getNullString(performer.Aliases),
Birthdate: getDate(performer.Birthdate), Birthdate: getDate(performer.Birthdate),
CareerLength: getNullString(performer.CareerLength), CareerLength: getNullString(performer.CareerLength),
Checksum: utils.MD5FromString(performer.Name), Checksum: utils.MD5FromString(*performer.Name),
Country: getNullString(performer.Country), Country: getNullString(performer.Country),
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime}, CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
Ethnicity: getNullString(performer.Ethnicity), Ethnicity: getNullString(performer.Ethnicity),
@ -201,7 +205,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag() {
Height: getNullString(performer.Height), Height: getNullString(performer.Height),
Instagram: getNullString(performer.Instagram), Instagram: getNullString(performer.Instagram),
Measurements: getNullString(performer.Measurements), Measurements: getNullString(performer.Measurements),
Name: sql.NullString{String: performer.Name, Valid: true}, Name: sql.NullString{String: *performer.Name, Valid: true},
Piercings: getNullString(performer.Piercings), Piercings: getNullString(performer.Piercings),
Tattoos: getNullString(performer.Tattoos), Tattoos: getNullString(performer.Tattoos),
Twitter: getNullString(performer.Twitter), Twitter: getNullString(performer.Twitter),

View file

@ -23,174 +23,6 @@ type ScrapedItem struct {
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
} }
type ScrapedPerformer struct {
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
Image *string `graphql:"image" json:"image"`
Details *string `graphql:"details" json:"details"`
DeathDate *string `graphql:"death_date" json:"death_date"`
HairColor *string `graphql:"hair_color" json:"hair_color"`
Weight *string `graphql:"weight" json:"weight"`
RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"`
}
// this type has no Image field
type ScrapedPerformerStash struct {
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
Details *string `graphql:"details" json:"details"`
DeathDate *string `graphql:"death_date" json:"death_date"`
HairColor *string `graphql:"hair_color" json:"hair_color"`
Weight *string `graphql:"weight" json:"weight"`
}
type ScrapedScene struct {
Title *string `graphql:"title" json:"title"`
Details *string `graphql:"details" json:"details"`
URL *string `graphql:"url" json:"url"`
Date *string `graphql:"date" json:"date"`
Image *string `graphql:"image" json:"image"`
RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"`
Duration *int `graphql:"duration" json:"duration"`
File *SceneFileType `graphql:"file" json:"file"`
Fingerprints []*StashBoxFingerprint `graphql:"fingerprints" json:"fingerprints"`
Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
Movies []*ScrapedSceneMovie `graphql:"movies" json:"movies"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
}
// stash doesn't return image, and we need id
type ScrapedSceneStash struct {
ID string `graphql:"id" json:"id"`
Title *string `graphql:"title" json:"title"`
Details *string `graphql:"details" json:"details"`
URL *string `graphql:"url" json:"url"`
Date *string `graphql:"date" json:"date"`
File *SceneFileType `graphql:"file" json:"file"`
Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
}
type ScrapedGalleryStash struct {
ID string `graphql:"id" json:"id"`
Title *string `graphql:"title" json:"title"`
Details *string `graphql:"details" json:"details"`
URL *string `graphql:"url" json:"url"`
Date *string `graphql:"date" json:"date"`
File *SceneFileType `graphql:"file" json:"file"`
Studio *ScrapedSceneStudio `graphql:"studio" json:"studio"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
Performers []*ScrapedScenePerformer `graphql:"performers" json:"performers"`
}
type ScrapedScenePerformer struct {
// Set if performer matched
ID *string `graphql:"id" json:"id"`
Name string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Tags []*ScrapedSceneTag `graphql:"tags" json:"tags"`
RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"`
Images []string `graphql:"images" json:"images"`
Details *string `graphql:"details" json:"details"`
DeathDate *string `graphql:"death_date" json:"death_date"`
HairColor *string `graphql:"hair_color" json:"hair_color"`
Weight *string `graphql:"weight" json:"weight"`
}
type ScrapedSceneStudio struct {
// Set if studio matched
ID *string `graphql:"id" json:"id"`
Name string `graphql:"name" json:"name"`
URL *string `graphql:"url" json:"url"`
RemoteSiteID *string `graphql:"remote_site_id" json:"remote_site_id"`
}
type ScrapedSceneMovie struct {
// Set if movie matched
ID *string `graphql:"id" json:"id"`
Name string `graphql:"name" json:"name"`
Aliases string `graphql:"aliases" json:"aliases"`
Duration string `graphql:"duration" json:"duration"`
Date string `graphql:"date" json:"date"`
Rating string `graphql:"rating" json:"rating"`
Director string `graphql:"director" json:"director"`
Synopsis string `graphql:"synopsis" json:"synopsis"`
URL *string `graphql:"url" json:"url"`
}
type ScrapedSceneTag struct {
// Set if tag matched
ID *string `graphql:"stored_id" json:"stored_id"`
Name string `graphql:"name" json:"name"`
}
type ScrapedMovie struct {
Name *string `graphql:"name" json:"name"`
Aliases *string `graphql:"aliases" json:"aliases"`
Duration *string `graphql:"duration" json:"duration"`
Date *string `graphql:"date" json:"date"`
Rating *string `graphql:"rating" json:"rating"`
Director *string `graphql:"director" json:"director"`
Studio *ScrapedMovieStudio `graphql:"studio" json:"studio"`
Synopsis *string `graphql:"synopsis" json:"synopsis"`
URL *string `graphql:"url" json:"url"`
FrontImage *string `graphql:"front_image" json:"front_image"`
BackImage *string `graphql:"back_image" json:"back_image"`
}
type ScrapedMovieStudio struct {
// Set if studio matched
ID *string `graphql:"id" json:"id"`
Name string `graphql:"name" json:"name"`
URL *string `graphql:"url" json:"url"`
}
type ScrapedItems []*ScrapedItem type ScrapedItems []*ScrapedItem
func (s *ScrapedItems) Append(o interface{}) { func (s *ScrapedItems) Append(o interface{}) {

View file

@ -37,10 +37,12 @@ type scraper interface {
scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error)
scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error)
scrapeSceneByFragment(scene models.SceneUpdateInput) (*models.ScrapedScene, error) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error)
scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error)
scrapeSceneByURL(url string) (*models.ScrapedScene, error) scrapeSceneByURL(url string) (*models.ScrapedScene, error)
scrapeGalleryByFragment(scene models.GalleryUpdateInput) (*models.ScrapedGallery, error) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error)
scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error)
scrapeGalleryByURL(url string) (*models.ScrapedGallery, error) scrapeGalleryByURL(url string) (*models.ScrapedGallery, error)
scrapeMovieByURL(url string) (*models.ScrapedMovie, error) scrapeMovieByURL(url string) (*models.ScrapedMovie, error)

View file

@ -393,8 +393,18 @@ func (c config) matchesMovieURL(url string) bool {
return false return false
} }
func (c config) ScrapeScene(scene models.SceneUpdateInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) { func (c config) ScrapeSceneByScene(scene *models.Scene, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) {
if c.SceneByFragment != nil { if c.SceneByFragment != nil {
s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig)
return s.scrapeSceneByScene(scene)
}
return nil, nil
}
func (c config) ScrapeSceneByFragment(scene models.ScrapedSceneInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedScene, error) {
if c.SceneByFragment != nil {
// TODO - this should be sceneByQueryFragment
s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig) s := getScraper(*c.SceneByFragment, txnManager, c, globalConfig)
return s.scrapeSceneByFragment(scene) return s.scrapeSceneByFragment(scene)
} }
@ -420,8 +430,18 @@ func (c config) ScrapeSceneURL(url string, txnManager models.TransactionManager,
return nil, nil return nil, nil
} }
func (c config) ScrapeGallery(gallery models.GalleryUpdateInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) { func (c config) ScrapeGalleryByGallery(gallery *models.Gallery, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) {
if c.SceneByFragment != nil {
s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig)
return s.scrapeGalleryByGallery(gallery)
}
return nil, nil
}
func (c config) ScrapeGalleryByFragment(gallery models.ScrapedGalleryInput, txnManager models.TransactionManager, globalConfig GlobalConfig) (*models.ScrapedGallery, error) {
if c.GalleryByFragment != nil { if c.GalleryByFragment != nil {
// TODO - this should be galleryByQueryFragment
s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig) s := getScraper(*c.GalleryByFragment, txnManager, c, globalConfig)
return s.scrapeGalleryByFragment(gallery) return s.scrapeGalleryByFragment(gallery)
} }

View file

@ -28,6 +28,8 @@ func setPerformerImage(p *models.ScrapedPerformer, globalConfig GlobalConfig) er
} }
p.Image = img p.Image = img
// Image is deprecated. Use images instead
p.Images = []string{*img}
return nil return nil
} }

View file

@ -143,18 +143,9 @@ func (s *jsonScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedP
return nil, errors.New("scrapePerformerByFragment not supported for json scraper") return nil, errors.New("scrapePerformerByFragment not supported for json scraper")
} }
func (s *jsonScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*models.ScrapedScene, error) { func (s *jsonScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
storedScene, err := sceneFromUpdateFragment(scene, s.txnManager)
if err != nil {
return nil, err
}
if storedScene == nil {
return nil, errors.New("no scene found")
}
// construct the URL // construct the URL
queryURL := queryURLParametersFromScene(storedScene) queryURL := queryURLParametersFromScene(scene)
if s.scraper.QueryURLReplacements != nil { if s.scraper.QueryURLReplacements != nil {
queryURL.applyReplacements(s.scraper.QueryURLReplacements) queryURL.applyReplacements(s.scraper.QueryURLReplacements)
} }
@ -176,18 +167,13 @@ func (s *jsonScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mod
return scraper.scrapeScene(q) return scraper.scrapeScene(q)
} }
func (s *jsonScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) { func (s *jsonScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
storedGallery, err := galleryFromUpdateFragment(gallery, s.txnManager) return nil, errors.New("scrapeSceneByFragment not supported for json scraper")
if err != nil { }
return nil, err
}
if storedGallery == nil {
return nil, errors.New("no scene found")
}
func (s *jsonScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
// construct the URL // construct the URL
queryURL := queryURLParametersFromGallery(storedGallery) queryURL := queryURLParametersFromGallery(gallery)
if s.scraper.QueryURLReplacements != nil { if s.scraper.QueryURLReplacements != nil {
queryURL.applyReplacements(s.scraper.QueryURLReplacements) queryURL.applyReplacements(s.scraper.QueryURLReplacements)
} }
@ -209,6 +195,10 @@ func (s *jsonScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput)
return scraper.scrapeGallery(q) return scraper.scrapeGallery(q)
} }
func (s *jsonScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByFragment not supported for json scraper")
}
func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery { func (s *jsonScraper) getJsonQuery(doc string) *jsonQuery {
return &jsonQuery{ return &jsonQuery{
doc: doc, doc: doc,

View file

@ -763,7 +763,7 @@ func (s mappedScraper) scrapePerformer(q mappedQuery) (*models.ScrapedPerformer,
tagResults := performerTagsMap.process(q, s.Common) tagResults := performerTagsMap.process(q, s.Common)
for _, p := range tagResults { for _, p := range tagResults {
tag := &models.ScrapedSceneTag{} tag := &models.ScrapedTag{}
p.apply(tag) p.apply(tag)
ret.Tags = append(ret.Tags, tag) ret.Tags = append(ret.Tags, tag)
} }
@ -824,11 +824,11 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
performerResults := scenePerformersMap.process(q, s.Common) performerResults := scenePerformersMap.process(q, s.Common)
for _, p := range performerResults { for _, p := range performerResults {
performer := &models.ScrapedScenePerformer{} performer := &models.ScrapedPerformer{}
p.apply(performer) p.apply(performer)
for _, p := range performerTagResults { for _, p := range performerTagResults {
tag := &models.ScrapedSceneTag{} tag := &models.ScrapedTag{}
p.apply(tag) p.apply(tag)
ret.Tags = append(ret.Tags, tag) ret.Tags = append(ret.Tags, tag)
} }
@ -842,7 +842,7 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
tagResults := sceneTagsMap.process(q, s.Common) tagResults := sceneTagsMap.process(q, s.Common)
for _, p := range tagResults { for _, p := range tagResults {
tag := &models.ScrapedSceneTag{} tag := &models.ScrapedTag{}
p.apply(tag) p.apply(tag)
ret.Tags = append(ret.Tags, tag) ret.Tags = append(ret.Tags, tag)
} }
@ -853,7 +853,7 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
studioResults := sceneStudioMap.process(q, s.Common) studioResults := sceneStudioMap.process(q, s.Common)
if len(studioResults) > 0 { if len(studioResults) > 0 {
studio := &models.ScrapedSceneStudio{} studio := &models.ScrapedStudio{}
studioResults[0].apply(studio) studioResults[0].apply(studio)
ret.Studio = studio ret.Studio = studio
} }
@ -864,7 +864,7 @@ func (s mappedScraper) scrapeScene(q mappedQuery) (*models.ScrapedScene, error)
movieResults := sceneMoviesMap.process(q, s.Common) movieResults := sceneMoviesMap.process(q, s.Common)
for _, p := range movieResults { for _, p := range movieResults {
movie := &models.ScrapedSceneMovie{} movie := &models.ScrapedMovie{}
p.apply(movie) p.apply(movie)
ret.Movies = append(ret.Movies, movie) ret.Movies = append(ret.Movies, movie)
} }
@ -899,7 +899,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
performerResults := galleryPerformersMap.process(q, s.Common) performerResults := galleryPerformersMap.process(q, s.Common)
for _, p := range performerResults { for _, p := range performerResults {
performer := &models.ScrapedScenePerformer{} performer := &models.ScrapedPerformer{}
p.apply(performer) p.apply(performer)
ret.Performers = append(ret.Performers, performer) ret.Performers = append(ret.Performers, performer)
} }
@ -910,7 +910,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
tagResults := galleryTagsMap.process(q, s.Common) tagResults := galleryTagsMap.process(q, s.Common)
for _, p := range tagResults { for _, p := range tagResults {
tag := &models.ScrapedSceneTag{} tag := &models.ScrapedTag{}
p.apply(tag) p.apply(tag)
ret.Tags = append(ret.Tags, tag) ret.Tags = append(ret.Tags, tag)
} }
@ -921,7 +921,7 @@ func (s mappedScraper) scrapeGallery(q mappedQuery) (*models.ScrapedGallery, err
studioResults := galleryStudioMap.process(q, s.Common) studioResults := galleryStudioMap.process(q, s.Common)
if len(studioResults) > 0 { if len(studioResults) > 0 {
studio := &models.ScrapedSceneStudio{} studio := &models.ScrapedStudio{}
studioResults[0].apply(studio) studioResults[0].apply(studio)
ret.Studio = studio ret.Studio = studio
} }
@ -951,7 +951,7 @@ func (s mappedScraper) scrapeMovie(q mappedQuery) (*models.ScrapedMovie, error)
studioResults := movieStudioMap.process(q, s.Common) studioResults := movieStudioMap.process(q, s.Common)
if len(studioResults) > 0 { if len(studioResults) > 0 {
studio := &models.ScrapedMovieStudio{} studio := &models.ScrapedStudio{}
studioResults[0].apply(studio) studioResults[0].apply(studio)
ret.Studio = studio ret.Studio = studio
} }

View file

@ -7,10 +7,14 @@ import (
"github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/tag"
) )
// MatchScrapedScenePerformer matches the provided performer with the // MatchScrapedPerformer matches the provided performer with the
// performers in the database and sets the ID field if one is found. // performers in the database and sets the ID field if one is found.
func MatchScrapedScenePerformer(qb models.PerformerReader, p *models.ScrapedScenePerformer) error { func MatchScrapedPerformer(qb models.PerformerReader, p *models.ScrapedPerformer) error {
performers, err := qb.FindByNames([]string{p.Name}, true) if p.Name == nil {
return nil
}
performers, err := qb.FindByNames([]string{*p.Name}, true)
if err != nil { if err != nil {
return err return err
@ -22,13 +26,13 @@ func MatchScrapedScenePerformer(qb models.PerformerReader, p *models.ScrapedScen
} }
id := strconv.Itoa(performers[0].ID) id := strconv.Itoa(performers[0].ID)
p.ID = &id p.StoredID = &id
return nil return nil
} }
// MatchScrapedSceneStudio matches the provided studio with the studios // MatchScrapedStudio matches the provided studio with the studios
// in the database and sets the ID field if one is found. // in the database and sets the ID field if one is found.
func MatchScrapedSceneStudio(qb models.StudioReader, s *models.ScrapedSceneStudio) error { func MatchScrapedStudio(qb models.StudioReader, s *models.ScrapedStudio) error {
studio, err := qb.FindByName(s.Name, true) studio, err := qb.FindByName(s.Name, true)
if err != nil { if err != nil {
@ -41,14 +45,18 @@ func MatchScrapedSceneStudio(qb models.StudioReader, s *models.ScrapedSceneStudi
} }
id := strconv.Itoa(studio.ID) id := strconv.Itoa(studio.ID)
s.ID = &id s.StoredID = &id
return nil return nil
} }
// MatchScrapedSceneMovie matches the provided movie with the movies // MatchScrapedMovie matches the provided movie with the movies
// in the database and sets the ID field if one is found. // in the database and sets the ID field if one is found.
func MatchScrapedSceneMovie(qb models.MovieReader, m *models.ScrapedSceneMovie) error { func MatchScrapedMovie(qb models.MovieReader, m *models.ScrapedMovie) error {
movies, err := qb.FindByNames([]string{m.Name}, true) if m.Name == nil {
return nil
}
movies, err := qb.FindByNames([]string{*m.Name}, true)
if err != nil { if err != nil {
return err return err
@ -60,13 +68,13 @@ func MatchScrapedSceneMovie(qb models.MovieReader, m *models.ScrapedSceneMovie)
} }
id := strconv.Itoa(movies[0].ID) id := strconv.Itoa(movies[0].ID)
m.ID = &id m.StoredID = &id
return nil return nil
} }
// MatchScrapedSceneTag matches the provided tag with the tags // MatchScrapedTag matches the provided tag with the tags
// in the database and sets the ID field if one is found. // in the database and sets the ID field if one is found.
func MatchScrapedSceneTag(qb models.TagReader, s *models.ScrapedSceneTag) error { func MatchScrapedTag(qb models.TagReader, s *models.ScrapedTag) error {
t, err := tag.ByName(qb, s.Name) t, err := tag.ByName(qb, s.Name)
if err != nil { if err != nil {
@ -87,6 +95,6 @@ func MatchScrapedSceneTag(qb models.TagReader, s *models.ScrapedSceneTag) error
} }
id := strconv.Itoa(t.ID) id := strconv.Itoa(t.ID)
s.ID = &id s.StoredID = &id
return nil return nil
} }

View file

@ -3,10 +3,10 @@ package scraper
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"regexp" "regexp"
"strconv"
"strings" "strings"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
@ -260,7 +260,7 @@ func (c Cache) postScrapePerformer(ret *models.ScrapedPerformer) error {
return nil return nil
} }
func (c Cache) postScrapeScenePerformer(ret *models.ScrapedScenePerformer) error { func (c Cache) postScrapeScenePerformer(ret *models.ScrapedPerformer) error {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
tqb := r.Tag() tqb := r.Tag()
@ -290,13 +290,13 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error {
return err return err
} }
if err := MatchScrapedScenePerformer(pqb, p); err != nil { if err := MatchScrapedPerformer(pqb, p); err != nil {
return err return err
} }
} }
for _, p := range ret.Movies { for _, p := range ret.Movies {
err := MatchScrapedSceneMovie(mqb, p) err := MatchScrapedMovie(mqb, p)
if err != nil { if err != nil {
return err return err
} }
@ -309,7 +309,7 @@ func (c Cache) postScrapeScene(ret *models.ScrapedScene) error {
ret.Tags = tags ret.Tags = tags
if ret.Studio != nil { if ret.Studio != nil {
err := MatchScrapedSceneStudio(sqb, ret.Studio) err := MatchScrapedStudio(sqb, ret.Studio)
if err != nil { if err != nil {
return err return err
} }
@ -335,7 +335,7 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error {
sqb := r.Studio() sqb := r.Studio()
for _, p := range ret.Performers { for _, p := range ret.Performers {
err := MatchScrapedScenePerformer(pqb, p) err := MatchScrapedPerformer(pqb, p)
if err != nil { if err != nil {
return err return err
} }
@ -348,7 +348,7 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error {
ret.Tags = tags ret.Tags = tags
if ret.Studio != nil { if ret.Studio != nil {
err := MatchScrapedSceneStudio(sqb, ret.Studio) err := MatchScrapedStudio(sqb, ret.Studio)
if err != nil { if err != nil {
return err return err
} }
@ -362,12 +362,42 @@ func (c Cache) postScrapeGallery(ret *models.ScrapedGallery) error {
return nil return nil
} }
// ScrapeScene uses the scraper with the provided ID to scrape a scene. // ScrapeScene uses the scraper with the provided ID to scrape a scene using existing data.
func (c Cache) ScrapeScene(scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) { func (c Cache) ScrapeScene(scraperID string, sceneID int) (*models.ScrapedScene, error) {
// find scraper with the provided id
s := c.findScraper(scraperID)
if s == nil {
return nil, fmt.Errorf("scraper with ID %s not found", scraperID)
}
// get scene from id
scene, err := getScene(sceneID, c.txnManager)
if err != nil {
return nil, err
}
ret, err := s.ScrapeSceneByScene(scene, c.txnManager, c.globalConfig)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapeScene(ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
// ScrapeSceneFragment uses the scraper with the provided ID to scrape a scene.
func (c Cache) ScrapeSceneFragment(scraperID string, scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
// find scraper with the provided id // find scraper with the provided id
s := c.findScraper(scraperID) s := c.findScraper(scraperID)
if s != nil { if s != nil {
ret, err := s.ScrapeScene(scene, c.txnManager, c.globalConfig) ret, err := s.ScrapeSceneByFragment(scene, c.txnManager, c.globalConfig)
if err != nil { if err != nil {
return nil, err return nil, err
@ -410,11 +440,40 @@ func (c Cache) ScrapeSceneURL(url string) (*models.ScrapedScene, error) {
return nil, nil return nil, nil
} }
// ScrapeGallery uses the scraper with the provided ID to scrape a scene. // ScrapeGallery uses the scraper with the provided ID to scrape a gallery using existing data.
func (c Cache) ScrapeGallery(scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) { func (c Cache) ScrapeGallery(scraperID string, galleryID int) (*models.ScrapedGallery, error) {
s := c.findScraper(scraperID) s := c.findScraper(scraperID)
if s != nil { if s != nil {
ret, err := s.ScrapeGallery(gallery, c.txnManager, c.globalConfig) // get gallery from id
gallery, err := getGallery(galleryID, c.txnManager)
if err != nil {
return nil, err
}
ret, err := s.ScrapeGalleryByGallery(gallery, c.txnManager, c.globalConfig)
if err != nil {
return nil, err
}
if ret != nil {
err = c.postScrapeGallery(ret)
if err != nil {
return nil, err
}
}
return ret, nil
}
return nil, errors.New("Scraped with ID " + scraperID + " not found")
}
// ScrapeGalleryFragment uses the scraper with the provided ID to scrape a gallery.
func (c Cache) ScrapeGalleryFragment(scraperID string, gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
s := c.findScraper(scraperID)
if s != nil {
ret, err := s.ScrapeGalleryByFragment(gallery, c.txnManager, c.globalConfig)
if err != nil { if err != nil {
return nil, err return nil, err
@ -457,23 +516,6 @@ func (c Cache) ScrapeGalleryURL(url string) (*models.ScrapedGallery, error) {
return nil, nil return nil, nil
} }
func matchMovieStudio(qb models.StudioReader, s *models.ScrapedMovieStudio) error {
studio, err := qb.FindByName(s.Name, true)
if err != nil {
return err
}
if studio == nil {
// ignore - cannot match
return nil
}
id := strconv.Itoa(studio.ID)
s.ID = &id
return nil
}
// ScrapeMovieURL uses the first scraper it finds that matches the URL // ScrapeMovieURL uses the first scraper it finds that matches the URL
// provided to scrape a movie. If no scrapers are found that matches // provided to scrape a movie. If no scrapers are found that matches
// the URL, then nil is returned. // the URL, then nil is returned.
@ -487,7 +529,7 @@ func (c Cache) ScrapeMovieURL(url string) (*models.ScrapedMovie, error) {
if ret.Studio != nil { if ret.Studio != nil {
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
return matchMovieStudio(r.Studio(), ret.Studio) return MatchScrapedStudio(r.Studio(), ret.Studio)
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
@ -508,8 +550,8 @@ func (c Cache) ScrapeMovieURL(url string) (*models.ScrapedMovie, error) {
return nil, nil return nil, nil
} }
func postProcessTags(tqb models.TagReader, scrapedTags []*models.ScrapedSceneTag) ([]*models.ScrapedSceneTag, error) { func postProcessTags(tqb models.TagReader, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) {
var ret []*models.ScrapedSceneTag var ret []*models.ScrapedTag
excludePatterns := stash_config.GetInstance().GetScraperExcludeTagPatterns() excludePatterns := stash_config.GetInstance().GetScraperExcludeTagPatterns()
var excludeRegexps []*regexp.Regexp var excludeRegexps []*regexp.Regexp
@ -533,7 +575,7 @@ ScrapeTag:
} }
} }
err := MatchScrapedSceneTag(tqb, t) err := MatchScrapedTag(tqb, t)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -63,7 +63,7 @@ func (s *scriptScraper) runScraperScript(inString string, out interface{}) error
if err = cmd.Start(); err != nil { if err = cmd.Start(); err != nil {
logger.Error("Error running scraper script: " + err.Error()) logger.Error("Error running scraper script: " + err.Error())
return errors.New("Error running scraper script") return errors.New("error running scraper script")
} }
scanner := bufio.NewScanner(stderr) scanner := bufio.NewScanner(stderr)
@ -86,7 +86,7 @@ func (s *scriptScraper) runScraperScript(inString string, out interface{}) error
logger.Debugf("Scraper script finished") logger.Debugf("Scraper script finished")
if err != nil { if err != nil {
return errors.New("Error running scraper script") return errors.New("error running scraper script")
} }
return nil return nil
@ -134,7 +134,21 @@ func (s *scriptScraper) scrapePerformerByURL(url string) (*models.ScrapedPerform
return &ret, err return &ret, err
} }
func (s *scriptScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*models.ScrapedScene, error) { func (s *scriptScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
inString, err := json.Marshal(sceneToUpdateInput(scene))
if err != nil {
return nil, err
}
var ret models.ScrapedScene
err = s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
inString, err := json.Marshal(scene) inString, err := json.Marshal(scene)
if err != nil { if err != nil {
@ -148,7 +162,21 @@ func (s *scriptScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*m
return &ret, err return &ret, err
} }
func (s *scriptScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) { func (s *scriptScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
inString, err := json.Marshal(galleryToUpdateInput(gallery))
if err != nil {
return nil, err
}
var ret models.ScrapedGallery
err = s.runScraperScript(string(inString), &ret)
return &ret, err
}
func (s *scriptScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
inString, err := json.Marshal(gallery) inString, err := json.Marshal(gallery)
if err != nil { if err != nil {

View file

@ -2,6 +2,7 @@ package scraper
import ( import (
"context" "context"
"database/sql"
"errors" "errors"
"strconv" "strconv"
@ -81,11 +82,40 @@ func (s *stashScraper) scrapePerformersByName(name string) ([]*models.ScrapedPer
return ret, nil return ret, nil
} }
// need a separate for scraped stash performers - does not include remote_site_id or image
type scrapedTagStash struct {
Name string `graphql:"name" json:"name"`
}
type scrapedPerformerStash struct {
Name *string `graphql:"name" json:"name"`
Gender *string `graphql:"gender" json:"gender"`
URL *string `graphql:"url" json:"url"`
Twitter *string `graphql:"twitter" json:"twitter"`
Instagram *string `graphql:"instagram" json:"instagram"`
Birthdate *string `graphql:"birthdate" json:"birthdate"`
Ethnicity *string `graphql:"ethnicity" json:"ethnicity"`
Country *string `graphql:"country" json:"country"`
EyeColor *string `graphql:"eye_color" json:"eye_color"`
Height *string `graphql:"height" json:"height"`
Measurements *string `graphql:"measurements" json:"measurements"`
FakeTits *string `graphql:"fake_tits" json:"fake_tits"`
CareerLength *string `graphql:"career_length" json:"career_length"`
Tattoos *string `graphql:"tattoos" json:"tattoos"`
Piercings *string `graphql:"piercings" json:"piercings"`
Aliases *string `graphql:"aliases" json:"aliases"`
Tags []*scrapedTagStash `graphql:"tags" json:"tags"`
Details *string `graphql:"details" json:"details"`
DeathDate *string `graphql:"death_date" json:"death_date"`
HairColor *string `graphql:"hair_color" json:"hair_color"`
Weight *string `graphql:"weight" json:"weight"`
}
func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) { func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
client := s.getStashClient() client := s.getStashClient()
var q struct { var q struct {
FindPerformer *models.ScrapedPerformerStash `graphql:"findPerformer(id: $f)"` FindPerformer *scrapedPerformerStash `graphql:"findPerformer(id: $f)"`
} }
performerID := *scrapedPerformer.URL performerID := *scrapedPerformer.URL
@ -100,13 +130,6 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
return nil, err return nil, err
} }
if q.FindPerformer != nil {
// the ids of the tags must be nilled
for _, t := range q.FindPerformer.Tags {
t.ID = nil
}
}
// need to copy back to a scraped performer // need to copy back to a scraped performer
ret := models.ScrapedPerformer{} ret := models.ScrapedPerformer{}
err = copier.Copy(&ret, q.FindPerformer) err = copier.Copy(&ret, q.FindPerformer)
@ -123,25 +146,27 @@ func (s *stashScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
return &ret, nil return &ret, nil
} }
func (s *stashScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*models.ScrapedScene, error) { type scrapedStudioStash struct {
Name string `graphql:"name" json:"name"`
URL *string `graphql:"url" json:"url"`
}
type scrapedSceneStash struct {
ID string `graphql:"id" json:"id"`
Title *string `graphql:"title" json:"title"`
Details *string `graphql:"details" json:"details"`
URL *string `graphql:"url" json:"url"`
Date *string `graphql:"date" json:"date"`
File *models.SceneFileType `graphql:"file" json:"file"`
Studio *scrapedStudioStash `graphql:"studio" json:"studio"`
Tags []*scrapedTagStash `graphql:"tags" json:"tags"`
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
}
func (s *stashScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
// query by MD5 // query by MD5
// assumes that the scene exists in the database
id, err := strconv.Atoi(scene.ID)
if err != nil {
return nil, err
}
var storedScene *models.Scene
if err := s.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error
storedScene, err = r.Scene().Find(id)
return err
}); err != nil {
return nil, err
}
var q struct { var q struct {
FindScene *models.ScrapedSceneStash `graphql:"findSceneByHash(input: $c)"` FindScene *scrapedSceneStash `graphql:"findSceneByHash(input: $c)"`
} }
type SceneHashInput struct { type SceneHashInput struct {
@ -150,8 +175,8 @@ func (s *stashScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mo
} }
input := SceneHashInput{ input := SceneHashInput{
Checksum: &storedScene.Checksum.String, Checksum: &scene.Checksum.String,
Oshash: &storedScene.OSHash.String, Oshash: &scene.OSHash.String,
} }
vars := map[string]interface{}{ vars := map[string]interface{}{
@ -159,34 +184,18 @@ func (s *stashScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mo
} }
client := s.getStashClient() client := s.getStashClient()
err = client.Query(context.Background(), &q, vars) if err := client.Query(context.Background(), &q, vars); err != nil {
if err != nil {
return nil, err return nil, err
} }
if q.FindScene != nil {
// the ids of the studio, performers and tags must be nilled
if q.FindScene.Studio != nil {
q.FindScene.Studio.ID = nil
}
for _, p := range q.FindScene.Performers {
p.ID = nil
}
for _, t := range q.FindScene.Tags {
t.ID = nil
}
}
// need to copy back to a scraped scene // need to copy back to a scraped scene
ret := models.ScrapedScene{} ret := models.ScrapedScene{}
err = copier.Copy(&ret, q.FindScene) if err := copier.Copy(&ret, q.FindScene); err != nil {
if err != nil {
return nil, err return nil, err
} }
// get the performer image directly // get the performer image directly
var err error
ret.Image, err = getStashSceneImage(s.config.StashServer.URL, q.FindScene.ID, s.globalConfig) ret.Image, err = getStashSceneImage(s.config.StashServer.URL, q.FindScene.ID, s.globalConfig)
if err != nil { if err != nil {
return nil, err return nil, err
@ -195,27 +204,25 @@ func (s *stashScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mo
return &ret, nil return &ret, nil
} }
func (s *stashScraper) scrapeGalleryByFragment(scene models.GalleryUpdateInput) (*models.ScrapedGallery, error) { func (s *stashScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
id, err := strconv.Atoi(scene.ID) return nil, errors.New("scrapeSceneByFragment not supported for stash scraper")
if err != nil { }
return nil, err
}
// query by MD5 type scrapedGalleryStash struct {
// assumes that the gallery exists in the database ID string `graphql:"id" json:"id"`
var storedGallery *models.Gallery Title *string `graphql:"title" json:"title"`
if err := s.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { Details *string `graphql:"details" json:"details"`
qb := r.Gallery() URL *string `graphql:"url" json:"url"`
Date *string `graphql:"date" json:"date"`
var err error File *models.SceneFileType `graphql:"file" json:"file"`
storedGallery, err = qb.Find(id) Studio *scrapedStudioStash `graphql:"studio" json:"studio"`
return err Tags []*scrapedTagStash `graphql:"tags" json:"tags"`
}); err != nil { Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
return nil, err }
}
func (s *stashScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
var q struct { var q struct {
FindGallery *models.ScrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"` FindGallery *scrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"`
} }
type GalleryHashInput struct { type GalleryHashInput struct {
@ -223,7 +230,7 @@ func (s *stashScraper) scrapeGalleryByFragment(scene models.GalleryUpdateInput)
} }
input := GalleryHashInput{ input := GalleryHashInput{
Checksum: &storedGallery.Checksum, Checksum: &gallery.Checksum,
} }
vars := map[string]interface{}{ vars := map[string]interface{}{
@ -231,36 +238,23 @@ func (s *stashScraper) scrapeGalleryByFragment(scene models.GalleryUpdateInput)
} }
client := s.getStashClient() client := s.getStashClient()
err = client.Query(context.Background(), &q, vars) if err := client.Query(context.Background(), &q, vars); err != nil {
if err != nil {
return nil, err return nil, err
} }
if q.FindGallery != nil {
// the ids of the studio, performers and tags must be nilled
if q.FindGallery.Studio != nil {
q.FindGallery.Studio.ID = nil
}
for _, p := range q.FindGallery.Performers {
p.ID = nil
}
for _, t := range q.FindGallery.Tags {
t.ID = nil
}
}
// need to copy back to a scraped scene // need to copy back to a scraped scene
ret := models.ScrapedGallery{} ret := models.ScrapedGallery{}
err = copier.Copy(&ret, q.FindGallery) if err := copier.Copy(&ret, q.FindGallery); err != nil {
if err != nil {
return nil, err return nil, err
} }
return &ret, nil return &ret, nil
} }
func (s *stashScraper) scrapeGalleryByFragment(scene models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByFragment not supported for stash scraper")
}
func (s *stashScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) { func (s *stashScraper) scrapePerformerByURL(url string) (*models.ScrapedPerformer, error) {
return nil, errors.New("scrapePerformerByURL not supported for stash scraper") return nil, errors.New("scrapePerformerByURL not supported for stash scraper")
} }
@ -277,17 +271,11 @@ func (s *stashScraper) scrapeMovieByURL(url string) (*models.ScrapedMovie, error
return nil, errors.New("scrapeMovieByURL not supported for stash scraper") return nil, errors.New("scrapeMovieByURL not supported for stash scraper")
} }
func sceneFromUpdateFragment(scene models.SceneUpdateInput, txnManager models.TransactionManager) (*models.Scene, error) { func getScene(sceneID int, txnManager models.TransactionManager) (*models.Scene, error) {
id, err := strconv.Atoi(scene.ID)
if err != nil {
return nil, err
}
// TODO - should we modify it with the input?
var ret *models.Scene var ret *models.Scene
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
var err error var err error
ret, err = r.Scene().Find(id) ret, err = r.Scene().Find(sceneID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
@ -295,18 +283,66 @@ func sceneFromUpdateFragment(scene models.SceneUpdateInput, txnManager models.Tr
return ret, nil return ret, nil
} }
func galleryFromUpdateFragment(gallery models.GalleryUpdateInput, txnManager models.TransactionManager) (ret *models.Gallery, err error) { func sceneToUpdateInput(scene *models.Scene) models.SceneUpdateInput {
id, err := strconv.Atoi(gallery.ID) toStringPtr := func(s sql.NullString) *string {
if err != nil { if s.Valid {
return nil, err return &s.String
}
return nil
} }
dateToStringPtr := func(s models.SQLiteDate) *string {
if s.Valid {
return &s.String
}
return nil
}
return models.SceneUpdateInput{
ID: strconv.Itoa(scene.ID),
Title: toStringPtr(scene.Title),
Details: toStringPtr(scene.Details),
URL: toStringPtr(scene.URL),
Date: dateToStringPtr(scene.Date),
}
}
func getGallery(galleryID int, txnManager models.TransactionManager) (*models.Gallery, error) {
var ret *models.Gallery
if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error { if err := txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
ret, err = r.Gallery().Find(id) var err error
ret, err = r.Gallery().Find(galleryID)
return err return err
}); err != nil { }); err != nil {
return nil, err return nil, err
} }
return ret, nil return ret, nil
} }
func galleryToUpdateInput(gallery *models.Gallery) models.GalleryUpdateInput {
toStringPtr := func(s sql.NullString) *string {
if s.Valid {
return &s.String
}
return nil
}
dateToStringPtr := func(s models.SQLiteDate) *string {
if s.Valid {
return &s.String
}
return nil
}
return models.GalleryUpdateInput{
ID: strconv.Itoa(gallery.ID),
Title: toStringPtr(gallery.Title),
Details: toStringPtr(gallery.Details),
URL: toStringPtr(gallery.URL),
Date: dateToStringPtr(gallery.Date),
}
}

View file

@ -66,8 +66,79 @@ func (c Client) QueryStashBoxScene(queryStr string) ([]*models.ScrapedScene, err
} }
// FindStashBoxScenesByFingerprints queries stash-box for scenes using every // FindStashBoxScenesByFingerprints queries stash-box for scenes using every
// scene's MD5/OSHASH checksum, or PHash // scene's MD5/OSHASH checksum, or PHash, and returns results in the same order
func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.ScrapedScene, error) { // as the input slice.
func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([][]*models.ScrapedScene, error) {
ids, err := utils.StringSliceToIntSlice(sceneIDs)
if err != nil {
return nil, err
}
var fingerprints []string
// map fingerprints to their scene index
fpToScene := make(map[string][]int)
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Scene()
for index, sceneID := range ids {
scene, err := qb.Find(sceneID)
if err != nil {
return err
}
if scene == nil {
return fmt.Errorf("scene with id %d not found", sceneID)
}
if scene.Checksum.Valid {
fingerprints = append(fingerprints, scene.Checksum.String)
fpToScene[scene.Checksum.String] = append(fpToScene[scene.Checksum.String], index)
}
if scene.OSHash.Valid {
fingerprints = append(fingerprints, scene.OSHash.String)
fpToScene[scene.OSHash.String] = append(fpToScene[scene.OSHash.String], index)
}
if scene.Phash.Valid {
phashStr := utils.PhashToString(scene.Phash.Int64)
fingerprints = append(fingerprints, phashStr)
fpToScene[phashStr] = append(fpToScene[phashStr], index)
}
}
return nil
}); err != nil {
return nil, err
}
allScenes, err := c.findStashBoxScenesByFingerprints(fingerprints)
if err != nil {
return nil, err
}
// set the matched scenes back in their original order
ret := make([][]*models.ScrapedScene, len(sceneIDs))
for _, s := range allScenes {
var addedTo []int
for _, fp := range s.Fingerprints {
sceneIndexes := fpToScene[fp.Hash]
for _, index := range sceneIndexes {
if !utils.IntInclude(addedTo, index) {
addedTo = append(addedTo, index)
ret[index] = append(ret[index], s)
}
}
}
}
return ret, nil
}
// FindStashBoxScenesByFingerprintsFlat queries stash-box for scenes using every
// scene's MD5/OSHASH checksum, or PHash, and returns results a flat slice.
func (c Client) FindStashBoxScenesByFingerprintsFlat(sceneIDs []string) ([]*models.ScrapedScene, error) {
ids, err := utils.StringSliceToIntSlice(sceneIDs) ids, err := utils.StringSliceToIntSlice(sceneIDs)
if err != nil { if err != nil {
return nil, err return nil, err
@ -97,7 +168,8 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.S
} }
if scene.Phash.Valid { if scene.Phash.Valid {
fingerprints = append(fingerprints, utils.PhashToString(scene.Phash.Int64)) phashStr := utils.PhashToString(scene.Phash.Int64)
fingerprints = append(fingerprints, phashStr)
} }
} }
@ -237,10 +309,18 @@ func (c Client) QueryStashBoxPerformer(queryStr string) ([]*models.StashBoxPerfo
Results: performers, Results: performers,
}, },
} }
// set the deprecated image field
for _, p := range res[0].Results {
if len(p.Images) > 0 {
p.Image = &p.Images[0]
}
}
return res, err return res, err
} }
func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedScenePerformer, error) { func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedPerformer, error) {
performers, err := c.client.SearchPerformer(context.TODO(), queryStr) performers, err := c.client.SearchPerformer(context.TODO(), queryStr)
if err != nil { if err != nil {
return nil, err return nil, err
@ -248,7 +328,7 @@ func (c Client) queryStashBoxPerformer(queryStr string) ([]*models.ScrapedSceneP
performerFragments := performers.SearchPerformer performerFragments := performers.SearchPerformer
var ret []*models.ScrapedScenePerformer var ret []*models.ScrapedPerformer
for _, fragment := range performerFragments { for _, fragment := range performerFragments {
performer := performerFragmentToScrapedScenePerformer(*fragment) performer := performerFragmentToScrapedScenePerformer(*fragment)
ret = append(ret, performer) ret = append(ret, performer)
@ -292,6 +372,50 @@ func (c Client) FindStashBoxPerformersByNames(performerIDs []string) ([]*models.
return c.findStashBoxPerformersByNames(performers) return c.findStashBoxPerformersByNames(performers)
} }
func (c Client) FindStashBoxPerformersByPerformerNames(performerIDs []string) ([][]*models.ScrapedPerformer, error) {
ids, err := utils.StringSliceToIntSlice(performerIDs)
if err != nil {
return nil, err
}
var performers []*models.Performer
if err := c.txnManager.WithReadTxn(context.TODO(), func(r models.ReaderRepository) error {
qb := r.Performer()
for _, performerID := range ids {
performer, err := qb.Find(performerID)
if err != nil {
return err
}
if performer == nil {
return fmt.Errorf("performer with id %d not found", performerID)
}
if performer.Name.Valid {
performers = append(performers, performer)
}
}
return nil
}); err != nil {
return nil, err
}
results, err := c.findStashBoxPerformersByNames(performers)
if err != nil {
return nil, err
}
var ret [][]*models.ScrapedPerformer
for _, r := range results {
ret = append(ret, r.Results)
}
return ret, nil
}
func (c Client) findStashBoxPerformersByNames(performers []*models.Performer) ([]*models.StashBoxPerformerQueryResult, error) { func (c Client) findStashBoxPerformersByNames(performers []*models.Performer) ([]*models.StashBoxPerformerQueryResult, error) {
var ret []*models.StashBoxPerformerQueryResult var ret []*models.StashBoxPerformerQueryResult
for _, performer := range performers { for _, performer := range performers {
@ -413,14 +537,14 @@ func fetchImage(url string) (*string, error) {
return &img, nil return &img, nil
} }
func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *models.ScrapedScenePerformer { func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *models.ScrapedPerformer {
id := p.ID id := p.ID
images := []string{} images := []string{}
for _, image := range p.Images { for _, image := range p.Images {
images = append(images, image.URL) images = append(images, image.URL)
} }
sp := &models.ScrapedScenePerformer{ sp := &models.ScrapedPerformer{
Name: p.Name, Name: &p.Name,
Country: p.Country, Country: p.Country,
Measurements: formatMeasurements(p.Measurements), Measurements: formatMeasurements(p.Measurements),
CareerLength: formatCareerLength(p.CareerStartYear, p.CareerEndYear), CareerLength: formatCareerLength(p.CareerStartYear, p.CareerEndYear),
@ -430,10 +554,13 @@ func performerFragmentToScrapedScenePerformer(p graphql.PerformerFragment) *mode
RemoteSiteID: &id, RemoteSiteID: &id,
Images: images, Images: images,
// TODO - tags not currently supported // TODO - tags not currently supported
// TODO - Image - should be returned as a set of URLs. Will need a
// graphql schema change to accommodate this. Leave off for now. // graphql schema change to accommodate this. Leave off for now.
} }
if len(sp.Images) > 0 {
sp.Image = &sp.Images[0]
}
if p.Height != nil && *p.Height > 0 { if p.Height != nil && *p.Height > 0 {
hs := strconv.Itoa(*p.Height) hs := strconv.Itoa(*p.Height)
sp.Height = &hs sp.Height = &hs
@ -511,13 +638,13 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq
if s.Studio != nil { if s.Studio != nil {
studioID := s.Studio.ID studioID := s.Studio.ID
ss.Studio = &models.ScrapedSceneStudio{ ss.Studio = &models.ScrapedStudio{
Name: s.Studio.Name, Name: s.Studio.Name,
URL: findURL(s.Studio.Urls, "HOME"), URL: findURL(s.Studio.Urls, "HOME"),
RemoteSiteID: &studioID, RemoteSiteID: &studioID,
} }
err := scraper.MatchScrapedSceneStudio(r.Studio(), ss.Studio) err := scraper.MatchScrapedStudio(r.Studio(), ss.Studio)
if err != nil { if err != nil {
return err return err
} }
@ -526,7 +653,7 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq
for _, p := range s.Performers { for _, p := range s.Performers {
sp := performerFragmentToScrapedScenePerformer(p.Performer) sp := performerFragmentToScrapedScenePerformer(p.Performer)
err := scraper.MatchScrapedScenePerformer(pqb, sp) err := scraper.MatchScrapedPerformer(pqb, sp)
if err != nil { if err != nil {
return err return err
} }
@ -535,11 +662,11 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq
} }
for _, t := range s.Tags { for _, t := range s.Tags {
st := &models.ScrapedSceneTag{ st := &models.ScrapedTag{
Name: t.Name, Name: t.Name,
} }
err := scraper.MatchScrapedSceneTag(tqb, st) err := scraper.MatchScrapedTag(tqb, st)
if err != nil { if err != nil {
return err return err
} }
@ -555,7 +682,7 @@ func sceneFragmentToScrapedScene(txnManager models.TransactionManager, s *graphq
return ss, nil return ss, nil
} }
func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedScenePerformer, error) { func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedPerformer, error) {
performer, err := c.client.FindPerformerByID(context.TODO(), id) performer, err := c.client.FindPerformerByID(context.TODO(), id)
if err != nil { if err != nil {
return nil, err return nil, err
@ -565,13 +692,13 @@ func (c Client) FindStashBoxPerformerByID(id string) (*models.ScrapedScenePerfor
return ret, nil return ret, nil
} }
func (c Client) FindStashBoxPerformerByName(name string) (*models.ScrapedScenePerformer, error) { func (c Client) FindStashBoxPerformerByName(name string) (*models.ScrapedPerformer, error) {
performers, err := c.client.SearchPerformer(context.TODO(), name) performers, err := c.client.SearchPerformer(context.TODO(), name)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var ret *models.ScrapedScenePerformer var ret *models.ScrapedPerformer
for _, performer := range performers.SearchPerformer { for _, performer := range performers.SearchPerformer {
if strings.EqualFold(performer.Name, name) { if strings.EqualFold(performer.Name, name) {
ret = performerFragmentToScrapedScenePerformer(*performer) ret = performerFragmentToScrapedScenePerformer(*performer)

View file

@ -124,18 +124,9 @@ func (s *xpathScraper) scrapePerformerByFragment(scrapedPerformer models.Scraped
return nil, errors.New("scrapePerformerByFragment not supported for xpath scraper") return nil, errors.New("scrapePerformerByFragment not supported for xpath scraper")
} }
func (s *xpathScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*models.ScrapedScene, error) { func (s *xpathScraper) scrapeSceneByScene(scene *models.Scene) (*models.ScrapedScene, error) {
storedScene, err := sceneFromUpdateFragment(scene, s.txnManager)
if err != nil {
return nil, err
}
if storedScene == nil {
return nil, errors.New("no scene found")
}
// construct the URL // construct the URL
queryURL := queryURLParametersFromScene(storedScene) queryURL := queryURLParametersFromScene(scene)
if s.scraper.QueryURLReplacements != nil { if s.scraper.QueryURLReplacements != nil {
queryURL.applyReplacements(s.scraper.QueryURLReplacements) queryURL.applyReplacements(s.scraper.QueryURLReplacements)
} }
@ -157,18 +148,13 @@ func (s *xpathScraper) scrapeSceneByFragment(scene models.SceneUpdateInput) (*mo
return scraper.scrapeScene(q) return scraper.scrapeScene(q)
} }
func (s *xpathScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) { func (s *xpathScraper) scrapeSceneByFragment(scene models.ScrapedSceneInput) (*models.ScrapedScene, error) {
storedGallery, err := galleryFromUpdateFragment(gallery, s.txnManager) return nil, errors.New("scrapeSceneByFragment not supported for xpath scraper")
if err != nil { }
return nil, err
}
if storedGallery == nil {
return nil, errors.New("no scene found")
}
func (s *xpathScraper) scrapeGalleryByGallery(gallery *models.Gallery) (*models.ScrapedGallery, error) {
// construct the URL // construct the URL
queryURL := queryURLParametersFromGallery(storedGallery) queryURL := queryURLParametersFromGallery(gallery)
if s.scraper.QueryURLReplacements != nil { if s.scraper.QueryURLReplacements != nil {
queryURL.applyReplacements(s.scraper.QueryURLReplacements) queryURL.applyReplacements(s.scraper.QueryURLReplacements)
} }
@ -190,6 +176,10 @@ func (s *xpathScraper) scrapeGalleryByFragment(gallery models.GalleryUpdateInput
return scraper.scrapeGallery(q) return scraper.scrapeGallery(q)
} }
func (s *xpathScraper) scrapeGalleryByFragment(gallery models.ScrapedGalleryInput) (*models.ScrapedGallery, error) {
return nil, errors.New("scrapeGalleryByFragment not supported for xpath scraper")
}
func (s *xpathScraper) loadURL(url string) (*html.Node, error) { func (s *xpathScraper) loadURL(url string) (*html.Node, error) {
r, err := loadURL(url, s.config, s.globalConfig) r, err := loadURL(url, s.config, s.globalConfig)
if err != nil { if err != nil {

View file

@ -593,7 +593,7 @@ func makeSceneXPathConfig() mappedScraper {
return scraper return scraper
} }
func verifyTags(t *testing.T, expectedTagNames []string, actualTags []*models.ScrapedSceneTag) { func verifyTags(t *testing.T, expectedTagNames []string, actualTags []*models.ScrapedTag) {
t.Helper() t.Helper()
i := 0 i := 0
@ -614,7 +614,7 @@ func verifyTags(t *testing.T, expectedTagNames []string, actualTags []*models.Sc
} }
} }
func verifyMovies(t *testing.T, expectedMovieNames []string, actualMovies []*models.ScrapedSceneMovie) { func verifyMovies(t *testing.T, expectedMovieNames []string, actualMovies []*models.ScrapedMovie) {
t.Helper() t.Helper()
i := 0 i := 0
@ -625,7 +625,7 @@ func verifyMovies(t *testing.T, expectedMovieNames []string, actualMovies []*mod
expectedMovie = expectedMovieNames[i] expectedMovie = expectedMovieNames[i]
} }
if i < len(actualMovies) { if i < len(actualMovies) {
actualMovie = actualMovies[i].Name actualMovie = *actualMovies[i].Name
} }
if expectedMovie != actualMovie { if expectedMovie != actualMovie {
@ -635,7 +635,7 @@ func verifyMovies(t *testing.T, expectedMovieNames []string, actualMovies []*mod
} }
} }
func verifyPerformers(t *testing.T, expectedNames []string, expectedURLs []string, actualPerformers []*models.ScrapedScenePerformer) { func verifyPerformers(t *testing.T, expectedNames []string, expectedURLs []string, actualPerformers []*models.ScrapedPerformer) {
t.Helper() t.Helper()
i := 0 i := 0
@ -651,7 +651,7 @@ func verifyPerformers(t *testing.T, expectedNames []string, expectedURLs []strin
expectedURL = expectedURLs[i] expectedURL = expectedURLs[i]
} }
if i < len(actualPerformers) { if i < len(actualPerformers) {
actualName = actualPerformers[i].Name actualName = *actualPerformers[i].Name
if actualPerformers[i].URL != nil { if actualPerformers[i].URL != nil {
actualURL = *actualPerformers[i].URL actualURL = *actualPerformers[i].URL
} }

View file

@ -229,19 +229,18 @@ export const GalleryEditPanel: React.FC<
} }
async function onScrapeClicked(scraper: GQL.Scraper) { async function onScrapeClicked(scraper: GQL.Scraper) {
if (!gallery) return;
setIsLoading(true); setIsLoading(true);
try { try {
const galleryInput = getGalleryInput( const result = await queryScrapeGallery(scraper.id, gallery.id);
formik.values if (!result.data || !result.data.scrapeSingleGallery?.length) {
) as GQL.GalleryUpdateInput;
const result = await queryScrapeGallery(scraper.id, galleryInput);
if (!result.data || !result.data.scrapeGallery) {
Toast.success({ Toast.success({
content: "No galleries found", content: "No galleries found",
}); });
return; return;
} }
setScrapedGallery(result.data.scrapeGallery); setScrapedGallery(result.data.scrapeSingleGallery[0]);
} catch (e) { } catch (e) {
Toast.error(e); Toast.error(e);
} finally { } finally {

View file

@ -45,8 +45,8 @@ function renderScrapedStudioRow(
title: string, title: string,
result: ScrapeResult<string>, result: ScrapeResult<string>,
onChange: (value: ScrapeResult<string>) => void, onChange: (value: ScrapeResult<string>) => void,
newStudio?: GQL.ScrapedSceneStudio, newStudio?: GQL.ScrapedStudio,
onCreateNew?: (value: GQL.ScrapedSceneStudio) => void onCreateNew?: (value: GQL.ScrapedStudio) => void
) { ) {
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
@ -92,9 +92,14 @@ function renderScrapedPerformersRow(
title: string, title: string,
result: ScrapeResult<string[]>, result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void, onChange: (value: ScrapeResult<string[]>) => void,
newPerformers: GQL.ScrapedScenePerformer[], newPerformers: GQL.ScrapedPerformer[],
onCreateNew?: (value: GQL.ScrapedScenePerformer) => void onCreateNew?: (value: GQL.ScrapedPerformer) => void
) { ) {
const performersCopy = newPerformers.map((p) => {
const name: string = p.name ?? "";
return { ...p, name };
});
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
title={title} title={title}
@ -106,7 +111,7 @@ function renderScrapedPerformersRow(
) )
} }
onChange={onChange} onChange={onChange}
newValues={newPerformers} newValues={performersCopy}
onCreateNew={onCreateNew} onCreateNew={onCreateNew}
/> />
); );
@ -139,8 +144,8 @@ function renderScrapedTagsRow(
title: string, title: string,
result: ScrapeResult<string[]>, result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void, onChange: (value: ScrapeResult<string[]>) => void,
newTags: GQL.ScrapedSceneTag[], newTags: GQL.ScrapedTag[],
onCreateNew?: (value: GQL.ScrapedSceneTag) => void onCreateNew?: (value: GQL.ScrapedTag) => void
) { ) {
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
@ -189,9 +194,7 @@ export const GalleryScrapeDialog: React.FC<IGalleryScrapeDialogProps> = (
props.scraped.studio?.stored_id props.scraped.studio?.stored_id
) )
); );
const [newStudio, setNewStudio] = useState< const [newStudio, setNewStudio] = useState<GQL.ScrapedStudio | undefined>(
GQL.ScrapedSceneStudio | undefined
>(
props.scraped.studio && !props.scraped.studio.stored_id props.scraped.studio && !props.scraped.studio.stored_id
? props.scraped.studio ? props.scraped.studio
: undefined : undefined
@ -241,9 +244,9 @@ export const GalleryScrapeDialog: React.FC<IGalleryScrapeDialogProps> = (
mapStoredIdObjects(props.scraped.performers ?? undefined) mapStoredIdObjects(props.scraped.performers ?? undefined)
) )
); );
const [newPerformers, setNewPerformers] = useState< const [newPerformers, setNewPerformers] = useState<GQL.ScrapedPerformer[]>(
GQL.ScrapedScenePerformer[] props.scraped.performers?.filter((t) => !t.stored_id) ?? []
>(props.scraped.performers?.filter((t) => !t.stored_id) ?? []); );
const [tags, setTags] = useState<ScrapeResult<string[]>>( const [tags, setTags] = useState<ScrapeResult<string[]>>(
new ScrapeResult<string[]>( new ScrapeResult<string[]>(
@ -251,7 +254,7 @@ export const GalleryScrapeDialog: React.FC<IGalleryScrapeDialogProps> = (
mapStoredIdObjects(props.scraped.tags ?? undefined) mapStoredIdObjects(props.scraped.tags ?? undefined)
) )
); );
const [newTags, setNewTags] = useState<GQL.ScrapedSceneTag[]>( const [newTags, setNewTags] = useState<GQL.ScrapedTag[]>(
props.scraped.tags?.filter((t) => !t.stored_id) ?? [] props.scraped.tags?.filter((t) => !t.stored_id) ?? []
); );
@ -275,7 +278,7 @@ export const GalleryScrapeDialog: React.FC<IGalleryScrapeDialogProps> = (
return <></>; return <></>;
} }
async function createNewStudio(toCreate: GQL.ScrapedSceneStudio) { async function createNewStudio(toCreate: GQL.ScrapedStudio) {
try { try {
const result = await createStudio({ const result = await createStudio({
variables: { variables: {
@ -308,7 +311,7 @@ export const GalleryScrapeDialog: React.FC<IGalleryScrapeDialogProps> = (
} }
} }
async function createNewPerformer(toCreate: GQL.ScrapedScenePerformer) { async function createNewPerformer(toCreate: GQL.ScrapedPerformer) {
const input = makePerformerCreateInput(toCreate); const input = makePerformerCreateInput(toCreate);
try { try {
@ -349,7 +352,7 @@ export const GalleryScrapeDialog: React.FC<IGalleryScrapeDialogProps> = (
} }
} }
async function createNewTag(toCreate: GQL.ScrapedSceneTag) { async function createNewTag(toCreate: GQL.ScrapedTag) {
const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" };
try { try {
const result = await createTag({ const result = await createTag({

View file

@ -203,8 +203,8 @@ export const MovieEditPanel: React.FC<IMovieEditPanel> = ({
formik.setFieldValue("date", state.date ?? undefined); formik.setFieldValue("date", state.date ?? undefined);
} }
if (state.studio && state.studio.id) { if (state.studio && state.studio.stored_id) {
formik.setFieldValue("studio_id", state.studio.id ?? undefined); formik.setFieldValue("studio_id", state.studio.stored_id ?? undefined);
} }
if (state.director) { if (state.director) {

View file

@ -87,7 +87,10 @@ export const MovieScrapeDialog: React.FC<IMovieScrapeDialogProps> = (
new ScrapeResult<string>(props.movie.synopsis, props.scraped.synopsis) new ScrapeResult<string>(props.movie.synopsis, props.scraped.synopsis)
); );
const [studio, setStudio] = useState<ScrapeResult<string>>( const [studio, setStudio] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(props.movie.studio_id, props.scraped.studio?.id) new ScrapeResult<string>(
props.movie.studio_id,
props.scraped.studio?.stored_id
)
); );
const [url, setURL] = useState<ScrapeResult<string>>( const [url, setURL] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(props.movie.url, props.scraped.url) new ScrapeResult<string>(props.movie.url, props.scraped.url)
@ -123,7 +126,7 @@ export const MovieScrapeDialog: React.FC<IMovieScrapeDialogProps> = (
const durationString = duration.getNewValue(); const durationString = duration.getNewValue();
return { return {
name: name.getNewValue(), name: name.getNewValue() ?? "",
aliases: aliases.getNewValue(), aliases: aliases.getNewValue(),
duration: durationString, duration: durationString,
date: date.getNewValue(), date: date.getNewValue(),
@ -131,7 +134,7 @@ export const MovieScrapeDialog: React.FC<IMovieScrapeDialogProps> = (
synopsis: synopsis.getNewValue(), synopsis: synopsis.getNewValue(),
studio: newStudio studio: newStudio
? { ? {
id: newStudio, stored_id: newStudio,
name: "", name: "",
} }
: undefined, : undefined,

View file

@ -72,7 +72,7 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
// Editing state // Editing state
const [scraper, setScraper] = useState<GQL.Scraper | IStashBox | undefined>(); const [scraper, setScraper] = useState<GQL.Scraper | IStashBox | undefined>();
const [newTags, setNewTags] = useState<GQL.ScrapedSceneTag[]>(); const [newTags, setNewTags] = useState<GQL.ScrapedTag[]>();
const [isScraperModalOpen, setIsScraperModalOpen] = useState<boolean>(false); const [isScraperModalOpen, setIsScraperModalOpen] = useState<boolean>(false);
const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState<boolean>(false); const [isDeleteAlertOpen, setIsDeleteAlertOpen] = useState<boolean>(false);
@ -224,7 +224,7 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
return ret; return ret;
} }
async function createNewTag(toCreate: GQL.ScrapedSceneTag) { async function createNewTag(toCreate: GQL.ScrapedTag) {
const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" };
try { try {
const result = await createTag({ const result = await createTag({
@ -334,9 +334,10 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
// otherwise follow existing behaviour (`undefined`) // otherwise follow existing behaviour (`undefined`)
if ( if (
(!isNew || [null, undefined].includes(formik.values.image)) && (!isNew || [null, undefined].includes(formik.values.image)) &&
state.image !== undefined state.images &&
state.images.length > 0
) { ) {
const imageStr = state.image; const imageStr = state.images[0];
formik.setFieldValue("image", imageStr ?? undefined); formik.setFieldValue("image", imageStr ?? undefined);
} }
if (state.details) { if (state.details) {
@ -524,20 +525,23 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
const { const {
__typename, __typename,
image: _image, images: _image,
tags: _tags, tags: _tags,
...ret ...ret
} = selectedPerformer; } = selectedPerformer;
const result = await queryScrapePerformer(selectedScraper.id, ret); const result = await queryScrapePerformer(selectedScraper.id, ret);
if (!result?.data?.scrapePerformer) return; if (!result?.data?.scrapeSinglePerformer?.length) return;
// assume one result
// if this is a new performer, just dump the data // if this is a new performer, just dump the data
if (isNew) { if (isNew) {
updatePerformerEditStateFromScraper(result.data.scrapePerformer); updatePerformerEditStateFromScraper(
result.data.scrapeSinglePerformer[0]
);
setScraper(undefined); setScraper(undefined);
} else { } else {
setScrapedPerformer(result.data.scrapePerformer); setScrapedPerformer(result.data.scrapeSinglePerformer[0]);
} }
} catch (e) { } catch (e) {
Toast.error(e); Toast.error(e);
@ -569,12 +573,12 @@ export const PerformerEditPanel: React.FC<IPerformerDetails> = ({
} }
} }
async function onScrapeStashBox(performerResult: GQL.ScrapedScenePerformer) { async function onScrapeStashBox(performerResult: GQL.ScrapedPerformer) {
setIsScraperModalOpen(false); setIsScraperModalOpen(false);
const result: Partial<GQL.ScrapedPerformerDataFragment> = { const result: GQL.ScrapedPerformerDataFragment = {
...performerResult, ...performerResult,
image: performerResult.images?.[0] ?? undefined, images: performerResult.images ?? undefined,
country: getCountryByISO(performerResult.country), country: getCountryByISO(performerResult.country),
__typename: "ScrapedPerformer", __typename: "ScrapedPerformer",
}; };

View file

@ -97,8 +97,8 @@ function renderScrapedTagsRow(
title: string, title: string,
result: ScrapeResult<string[]>, result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void, onChange: (value: ScrapeResult<string[]>) => void,
newTags: GQL.ScrapedSceneTag[], newTags: GQL.ScrapedTag[],
onCreateNew?: (value: GQL.ScrapedSceneTag) => void onCreateNew?: (value: GQL.ScrapedTag) => void
) { ) {
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
@ -299,12 +299,17 @@ export const PerformerScrapeDialog: React.FC<IPerformerScrapeDialogProps> = (
) )
); );
const [newTags, setNewTags] = useState<GQL.ScrapedSceneTag[]>( const [newTags, setNewTags] = useState<GQL.ScrapedTag[]>(
props.scraped.tags?.filter((t) => !t.stored_id) ?? [] props.scraped.tags?.filter((t) => !t.stored_id) ?? []
); );
const [image, setImage] = useState<ScrapeResult<string>>( const [image, setImage] = useState<ScrapeResult<string>>(
new ScrapeResult<string>(props.performer.image, props.scraped.image) new ScrapeResult<string>(
props.performer.image,
props.scraped.images && props.scraped.images.length > 0
? props.scraped.images[0]
: undefined
)
); );
const allFields = [ const allFields = [
@ -338,7 +343,7 @@ export const PerformerScrapeDialog: React.FC<IPerformerScrapeDialogProps> = (
return <></>; return <></>;
} }
async function createNewTag(toCreate: GQL.ScrapedSceneTag) { async function createNewTag(toCreate: GQL.ScrapedTag) {
const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" };
try { try {
const result = await createTag({ const result = await createTag({
@ -375,8 +380,9 @@ export const PerformerScrapeDialog: React.FC<IPerformerScrapeDialogProps> = (
} }
function makeNewScrapedItem(): GQL.ScrapedPerformer { function makeNewScrapedItem(): GQL.ScrapedPerformer {
const newImage = image.getNewValue();
return { return {
name: name.getNewValue(), name: name.getNewValue() ?? "",
aliases: aliases.getNewValue(), aliases: aliases.getNewValue(),
birthdate: birthdate.getNewValue(), birthdate: birthdate.getNewValue(),
ethnicity: ethnicity.getNewValue(), ethnicity: ethnicity.getNewValue(),
@ -398,7 +404,7 @@ export const PerformerScrapeDialog: React.FC<IPerformerScrapeDialogProps> = (
name: "", name: "",
}; };
}), }),
image: image.getNewValue(), images: newImage ? [newImage] : undefined,
details: details.getNewValue(), details: details.getNewValue(),
death_date: deathDate.getNewValue(), death_date: deathDate.getNewValue(),
hair_color: hairColor.getNewValue(), hair_color: hairColor.getNewValue(),

View file

@ -30,7 +30,7 @@ const PerformerScrapeModal: React.FC<IProps> = ({
const [query, setQuery] = useState<string>(name ?? ""); const [query, setQuery] = useState<string>(name ?? "");
const { data, loading } = useScrapePerformerList(scraper.id, query); const { data, loading } = useScrapePerformerList(scraper.id, query);
const performers = data?.scrapePerformerList ?? []; const performers = data?.scrapeSinglePerformer ?? [];
const onInputChange = debounce((input: string) => { const onInputChange = debounce((input: string) => {
setQuery(input); setQuery(input);

View file

@ -16,7 +16,7 @@ export interface IStashBox extends GQL.StashBox {
interface IProps { interface IProps {
instance: IStashBox; instance: IStashBox;
onHide: () => void; onHide: () => void;
onSelectPerformer: (performer: GQL.ScrapedScenePerformer) => void; onSelectPerformer: (performer: GQL.ScrapedPerformer) => void;
name?: string; name?: string;
} }
const PerformerStashBoxModal: React.FC<IProps> = ({ const PerformerStashBoxModal: React.FC<IProps> = ({
@ -28,17 +28,19 @@ const PerformerStashBoxModal: React.FC<IProps> = ({
const intl = useIntl(); const intl = useIntl();
const inputRef = useRef<HTMLInputElement>(null); const inputRef = useRef<HTMLInputElement>(null);
const [query, setQuery] = useState<string>(name ?? ""); const [query, setQuery] = useState<string>(name ?? "");
const { data, loading } = GQL.useQueryStashBoxPerformerQuery({ const { data, loading } = GQL.useScrapeSinglePerformerQuery({
variables: { variables: {
input: { source: {
stash_box_index: instance.index, stash_box_index: instance.index,
q: query, },
input: {
query,
}, },
}, },
skip: query === "", skip: query === "",
}); });
const performers = data?.queryStashBoxPerformer?.[0].results ?? []; const performers = data?.scrapeSinglePerformer ?? [];
const onInputChange = debounce((input: string) => { const onInputChange = debounce((input: string) => {
setQuery(input); setQuery(input);

View file

@ -277,12 +277,12 @@ export const SceneEditPanel: React.FC<IProps> = ({
setIsLoading(true); setIsLoading(true);
try { try {
const result = await queryStashBoxScene(stashBoxIndex, scene.id); const result = await queryStashBoxScene(stashBoxIndex, scene.id);
if (!result.data || !result.data.queryStashBoxScene) { if (!result.data || !result.data.scrapeSingleScene) {
return; return;
} }
if (result.data.queryStashBoxScene.length > 0) { if (result.data.scrapeSingleScene.length > 0) {
setScrapedScene(result.data.queryStashBoxScene[0]); setScrapedScene(result.data.scrapeSingleScene[0]);
} else { } else {
Toast.success({ Toast.success({
content: "No scenes found", content: "No scenes found",
@ -298,17 +298,15 @@ export const SceneEditPanel: React.FC<IProps> = ({
async function onScrapeClicked(scraper: GQL.Scraper) { async function onScrapeClicked(scraper: GQL.Scraper) {
setIsLoading(true); setIsLoading(true);
try { try {
const result = await queryScrapeScene( const result = await queryScrapeScene(scraper.id, scene.id);
scraper.id, if (!result.data || !result.data.scrapeSingleScene?.length) {
getSceneInput(formik.values)
);
if (!result.data || !result.data.scrapeScene) {
Toast.success({ Toast.success({
content: "No scenes found", content: "No scenes found",
}); });
return; return;
} }
setScrapedScene(result.data.scrapeScene); // assume one returned scene
setScrapedScene(result.data.scrapeSingleScene[0]);
} catch (e) { } catch (e) {
Toast.error(e); Toast.error(e);
} finally { } finally {

View file

@ -48,8 +48,8 @@ function renderScrapedStudioRow(
title: string, title: string,
result: ScrapeResult<string>, result: ScrapeResult<string>,
onChange: (value: ScrapeResult<string>) => void, onChange: (value: ScrapeResult<string>) => void,
newStudio?: GQL.ScrapedSceneStudio, newStudio?: GQL.ScrapedStudio,
onCreateNew?: (value: GQL.ScrapedSceneStudio) => void onCreateNew?: (value: GQL.ScrapedStudio) => void
) { ) {
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
@ -95,9 +95,14 @@ function renderScrapedPerformersRow(
title: string, title: string,
result: ScrapeResult<string[]>, result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void, onChange: (value: ScrapeResult<string[]>) => void,
newPerformers: GQL.ScrapedScenePerformer[], newPerformers: GQL.ScrapedPerformer[],
onCreateNew?: (value: GQL.ScrapedScenePerformer) => void onCreateNew?: (value: GQL.ScrapedPerformer) => void
) { ) {
const performersCopy = newPerformers.map((p) => {
const name: string = p.name ?? "";
return { ...p, name };
});
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
title={title} title={title}
@ -109,7 +114,7 @@ function renderScrapedPerformersRow(
) )
} }
onChange={onChange} onChange={onChange}
newValues={newPerformers} newValues={performersCopy}
onCreateNew={onCreateNew} onCreateNew={onCreateNew}
/> />
); );
@ -142,9 +147,14 @@ function renderScrapedMoviesRow(
title: string, title: string,
result: ScrapeResult<string[]>, result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void, onChange: (value: ScrapeResult<string[]>) => void,
newMovies: GQL.ScrapedSceneMovie[], newMovies: GQL.ScrapedMovie[],
onCreateNew?: (value: GQL.ScrapedSceneMovie) => void onCreateNew?: (value: GQL.ScrapedMovie) => void
) { ) {
const moviesCopy = newMovies.map((p) => {
const name: string = p.name ?? "";
return { ...p, name };
});
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
title={title} title={title}
@ -156,7 +166,7 @@ function renderScrapedMoviesRow(
) )
} }
onChange={onChange} onChange={onChange}
newValues={newMovies} newValues={moviesCopy}
onCreateNew={onCreateNew} onCreateNew={onCreateNew}
/> />
); );
@ -189,8 +199,8 @@ function renderScrapedTagsRow(
title: string, title: string,
result: ScrapeResult<string[]>, result: ScrapeResult<string[]>,
onChange: (value: ScrapeResult<string[]>) => void, onChange: (value: ScrapeResult<string[]>) => void,
newTags: GQL.ScrapedSceneTag[], newTags: GQL.ScrapedTag[],
onCreateNew?: (value: GQL.ScrapedSceneTag) => void onCreateNew?: (value: GQL.ScrapedTag) => void
) { ) {
return ( return (
<ScrapeDialogRow <ScrapeDialogRow
@ -238,9 +248,7 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
props.scraped.studio?.stored_id props.scraped.studio?.stored_id
) )
); );
const [newStudio, setNewStudio] = useState< const [newStudio, setNewStudio] = useState<GQL.ScrapedStudio | undefined>(
GQL.ScrapedSceneStudio | undefined
>(
props.scraped.studio && !props.scraped.studio.stored_id props.scraped.studio && !props.scraped.studio.stored_id
? props.scraped.studio ? props.scraped.studio
: undefined : undefined
@ -290,9 +298,9 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
mapStoredIdObjects(props.scraped.performers ?? undefined) mapStoredIdObjects(props.scraped.performers ?? undefined)
) )
); );
const [newPerformers, setNewPerformers] = useState< const [newPerformers, setNewPerformers] = useState<GQL.ScrapedPerformer[]>(
GQL.ScrapedScenePerformer[] props.scraped.performers?.filter((t) => !t.stored_id) ?? []
>(props.scraped.performers?.filter((t) => !t.stored_id) ?? []); );
const [movies, setMovies] = useState<ScrapeResult<string[]>>( const [movies, setMovies] = useState<ScrapeResult<string[]>>(
new ScrapeResult<string[]>( new ScrapeResult<string[]>(
@ -300,7 +308,7 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
mapStoredIdObjects(props.scraped.movies ?? undefined) mapStoredIdObjects(props.scraped.movies ?? undefined)
) )
); );
const [newMovies, setNewMovies] = useState<GQL.ScrapedSceneMovie[]>( const [newMovies, setNewMovies] = useState<GQL.ScrapedMovie[]>(
props.scraped.movies?.filter((t) => !t.stored_id) ?? [] props.scraped.movies?.filter((t) => !t.stored_id) ?? []
); );
@ -310,7 +318,7 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
mapStoredIdObjects(props.scraped.tags ?? undefined) mapStoredIdObjects(props.scraped.tags ?? undefined)
) )
); );
const [newTags, setNewTags] = useState<GQL.ScrapedSceneTag[]>( const [newTags, setNewTags] = useState<GQL.ScrapedTag[]>(
props.scraped.tags?.filter((t) => !t.stored_id) ?? [] props.scraped.tags?.filter((t) => !t.stored_id) ?? []
); );
@ -339,7 +347,7 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
return <></>; return <></>;
} }
async function createNewStudio(toCreate: GQL.ScrapedSceneStudio) { async function createNewStudio(toCreate: GQL.ScrapedStudio) {
try { try {
const result = await createStudio({ const result = await createStudio({
variables: { variables: {
@ -366,7 +374,7 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
} }
} }
async function createNewPerformer(toCreate: GQL.ScrapedScenePerformer) { async function createNewPerformer(toCreate: GQL.ScrapedPerformer) {
const input = makePerformerCreateInput(toCreate); const input = makePerformerCreateInput(toCreate);
try { try {
@ -401,7 +409,7 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
} }
} }
async function createNewMovie(toCreate: GQL.ScrapedSceneMovie) { async function createNewMovie(toCreate: GQL.ScrapedMovie) {
let movieInput: GQL.MovieCreateInput = { name: "" }; let movieInput: GQL.MovieCreateInput = { name: "" };
try { try {
movieInput = Object.assign(movieInput, toCreate); movieInput = Object.assign(movieInput, toCreate);
@ -450,7 +458,7 @@ export const SceneScrapeDialog: React.FC<ISceneScrapeDialogProps> = (
} }
} }
async function createNewTag(toCreate: GQL.ScrapedSceneTag) { async function createNewTag(toCreate: GQL.ScrapedTag) {
const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" };
try { try {
const result = await createTag({ const result = await createTag({

View file

@ -105,7 +105,7 @@ interface IStashSearchResultProps {
setTags: boolean; setTags: boolean;
endpoint: string; endpoint: string;
queueFingerprintSubmission: (sceneId: string, endpoint: string) => void; queueFingerprintSubmission: (sceneId: string, endpoint: string) => void;
createNewTag: (toCreate: GQL.ScrapedSceneTag) => void; createNewTag: (toCreate: GQL.ScrapedTag) => void;
excludedFields: Record<string, boolean>; excludedFields: Record<string, boolean>;
setExcludedFields: (v: Record<string, boolean>) => void; setExcludedFields: (v: Record<string, boolean>) => void;
} }

View file

@ -8,7 +8,6 @@ import { stashBoxSceneBatchQuery, useTagCreate } from "src/core/StashService";
import { SceneQueue } from "src/models/sceneQueue"; import { SceneQueue } from "src/models/sceneQueue";
import { useToast } from "src/hooks"; import { useToast } from "src/hooks";
import { uniqBy } from "lodash";
import { ITaggerConfig } from "./constants"; import { ITaggerConfig } from "./constants";
import { selectScenes, IStashBoxScene } from "./utils"; import { selectScenes, IStashBoxScene } from "./utils";
import { TaggerScene } from "./TaggerScene"; import { TaggerScene } from "./TaggerScene";
@ -25,7 +24,7 @@ interface ITaggerListProps {
queue?: SceneQueue; queue?: SceneQueue;
selectedEndpoint: { endpoint: string; index: number }; selectedEndpoint: { endpoint: string; index: number };
config: ITaggerConfig; config: ITaggerConfig;
queryScene: (searchVal: string) => Promise<GQL.QueryStashBoxSceneQuery>; queryScene: (searchVal: string) => Promise<GQL.ScrapeSingleSceneQuery>;
fingerprintQueue: IFingerprintQueue; fingerprintQueue: IFingerprintQueue;
} }
@ -42,27 +41,8 @@ function fingerprintSearchResults(
return ret; return ret;
} }
// perform matching here scenes.forEach((s) => {
scenes.forEach((scene) => { ret[s.id] = fingerprints[s.id];
// ignore where scene entry is not in results
if (
(scene.checksum && fingerprints[scene.checksum] !== undefined) ||
(scene.oshash && fingerprints[scene.oshash] !== undefined) ||
(scene.phash && fingerprints[scene.phash] !== undefined)
) {
const fingerprintMatches = uniqBy(
[
...(fingerprints[scene.checksum ?? ""] ?? []),
...(fingerprints[scene.oshash ?? ""] ?? []),
...(fingerprints[scene.phash ?? ""] ?? []),
].flat(),
(f) => f.stash_id
);
ret[scene.id] = fingerprintMatches;
} else {
delete ret[scene.id];
}
}); });
return ret; return ret;
@ -119,7 +99,7 @@ export const TaggerList: React.FC<ITaggerListProps> = ({
queryScene(searchVal) queryScene(searchVal)
.then((queryData) => { .then((queryData) => {
const s = selectScenes(queryData.queryStashBoxScene); const s = selectScenes(queryData.scrapeSingleScene);
setSearchResults({ setSearchResults({
...searchResults, ...searchResults,
[sceneID]: s, [sceneID]: s,
@ -179,26 +159,10 @@ export const TaggerList: React.FC<ITaggerListProps> = ({
// clear search errors // clear search errors
setSearchErrors({}); setSearchErrors({});
selectScenes(results.data?.queryStashBoxScene).forEach((scene) => { sceneIDs.forEach((sceneID, index) => {
scene.fingerprints?.forEach((f) => { newFingerprints[sceneID] = selectScenes(
newFingerprints[f.hash] = newFingerprints[f.hash] results.data.scrapeMultiScenes[index]
? [...newFingerprints[f.hash], scene] );
: [scene];
});
});
// Null any ids that are still undefined since it means they weren't found
filteredScenes.forEach((scene) => {
if (scene.oshash) {
newFingerprints[scene.oshash] = newFingerprints[scene.oshash] ?? null;
}
if (scene.checksum) {
newFingerprints[scene.checksum] =
newFingerprints[scene.checksum] ?? null;
}
if (scene.phash) {
newFingerprints[scene.phash] = newFingerprints[scene.phash] ?? null;
}
}); });
const newSearchResults = fingerprintSearchResults(scenes, newFingerprints); const newSearchResults = fingerprintSearchResults(scenes, newFingerprints);
@ -210,7 +174,7 @@ export const TaggerList: React.FC<ITaggerListProps> = ({
setFingerprintError(""); setFingerprintError("");
}; };
async function createNewTag(toCreate: GQL.ScrapedSceneTag) { async function createNewTag(toCreate: GQL.ScrapedTag) {
const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" };
try { try {
const result = await createTag({ const result = await createTag({
@ -259,20 +223,12 @@ export const TaggerList: React.FC<ITaggerListProps> = ({
const canFingerprintSearch = () => const canFingerprintSearch = () =>
scenes.some( scenes.some(
(s) => (s) => s.stash_ids.length === 0 && fingerprints[s.id] === undefined
s.stash_ids.length === 0 &&
(!s.oshash || fingerprints[s.oshash] === undefined) &&
(!s.checksum || fingerprints[s.checksum] === undefined) &&
(!s.phash || fingerprints[s.phash] === undefined)
); );
const getFingerprintCount = () => { const getFingerprintCount = () => {
return scenes.filter( return scenes.filter(
(s) => (s) => s.stash_ids.length === 0 && fingerprints[s.id]?.length > 0
s.stash_ids.length === 0 &&
((s.checksum && fingerprints[s.checksum]) ||
(s.oshash && fingerprints[s.oshash]) ||
(s.phash && fingerprints[s.phash]))
).length; ).length;
}; };

View file

@ -95,7 +95,7 @@ export interface ITaggerScene {
tagScene: (scene: Partial<GQL.SlimSceneDataFragment>) => void; tagScene: (scene: Partial<GQL.SlimSceneDataFragment>) => void;
endpoint: string; endpoint: string;
queueFingerprintSubmission: (sceneId: string, endpoint: string) => void; queueFingerprintSubmission: (sceneId: string, endpoint: string) => void;
createNewTag: (toCreate: GQL.ScrapedSceneTag) => void; createNewTag: (toCreate: GQL.ScrapedTag) => void;
} }
export const TaggerScene: React.FC<ITaggerScene> = ({ export const TaggerScene: React.FC<ITaggerScene> = ({

View file

@ -97,9 +97,7 @@ const PerformerTaggerList: React.FC<IPerformerTaggerListProps> = ({
const doBoxSearch = (performerID: string, searchVal: string) => { const doBoxSearch = (performerID: string, searchVal: string) => {
stashBoxPerformerQuery(searchVal, selectedEndpoint.index) stashBoxPerformerQuery(searchVal, selectedEndpoint.index)
.then((queryData) => { .then((queryData) => {
const s = selectPerformers( const s = selectPerformers(queryData.data?.scrapeSinglePerformer ?? []);
queryData.data?.queryStashBoxPerformer?.[0].results ?? []
);
setSearchResults({ setSearchResults({
...searchResults, ...searchResults,
[performerID]: s, [performerID]: s,
@ -137,7 +135,7 @@ const PerformerTaggerList: React.FC<IPerformerTaggerListProps> = ({
stashBoxPerformerQuery(stashID, endpointIndex) stashBoxPerformerQuery(stashID, endpointIndex)
.then((queryData) => { .then((queryData) => {
const data = selectPerformers( const data = selectPerformers(
queryData.data?.queryStashBoxPerformer?.[0].results ?? [] queryData.data?.scrapeSinglePerformer ?? []
); );
if (data.length > 0) { if (data.length > 0) {
setModalPerformer({ setModalPerformer({

View file

@ -201,7 +201,7 @@ export interface IStashBoxScene {
fingerprints: IStashBoxFingerprint[]; fingerprints: IStashBoxFingerprint[];
} }
const selectStudio = (studio: GQL.ScrapedSceneStudio): IStashBoxStudio => ({ const selectStudio = (studio: GQL.ScrapedStudio): IStashBoxStudio => ({
id: studio?.stored_id ?? undefined, id: studio?.stored_id ?? undefined,
stash_id: studio.remote_site_id!, stash_id: studio.remote_site_id!,
name: studio.name, name: studio.name,
@ -212,14 +212,14 @@ const selectFingerprints = (
scene: GQL.ScrapedScene | null scene: GQL.ScrapedScene | null
): IStashBoxFingerprint[] => scene?.fingerprints ?? []; ): IStashBoxFingerprint[] => scene?.fingerprints ?? [];
const selectTags = (tags: GQL.ScrapedSceneTag[]): IStashBoxTag[] => const selectTags = (tags: GQL.ScrapedTag[]): IStashBoxTag[] =>
tags.map((t) => ({ tags.map((t) => ({
id: t.stored_id ?? undefined, id: t.stored_id ?? undefined,
name: t.name ?? "", name: t.name ?? "",
})); }));
export const selectPerformers = ( export const selectPerformers = (
performers: GQL.ScrapedScenePerformer[] performers: GQL.ScrapedPerformer[]
): IStashBoxPerformer[] => ): IStashBoxPerformer[] =>
performers.map((p) => ({ performers.map((p) => ({
id: p.stored_id ?? undefined, id: p.stored_id ?? undefined,

View file

@ -257,17 +257,17 @@ export const useSceneMarkerDestroy = () =>
export const useListPerformerScrapers = () => export const useListPerformerScrapers = () =>
GQL.useListPerformerScrapersQuery(); GQL.useListPerformerScrapersQuery();
export const useScrapePerformerList = (scraperId: string, q: string) => export const useScrapePerformerList = (scraperId: string, q: string) =>
GQL.useScrapePerformerListQuery({ GQL.useScrapeSinglePerformerQuery({
variables: { scraper_id: scraperId, query: q }, variables: {
source: {
scraper_id: scraperId,
},
input: {
query: q,
},
},
skip: q === "", skip: q === "",
}); });
export const useScrapePerformer = (
scraperId: string,
scrapedPerformer: GQL.ScrapedPerformerInput
) =>
GQL.useScrapePerformerQuery({
variables: { scraper_id: scraperId, scraped_performer: scrapedPerformer },
});
export const useListSceneScrapers = () => GQL.useListSceneScrapersQuery(); export const useListSceneScrapers = () => GQL.useListSceneScrapersQuery();
@ -814,11 +814,15 @@ export const queryScrapePerformer = (
scraperId: string, scraperId: string,
scrapedPerformer: GQL.ScrapedPerformerInput scrapedPerformer: GQL.ScrapedPerformerInput
) => ) =>
client.query<GQL.ScrapePerformerQuery>({ client.query<GQL.ScrapeSinglePerformerQuery>({
query: GQL.ScrapePerformerDocument, query: GQL.ScrapeSinglePerformerDocument,
variables: { variables: {
scraper_id: scraperId, source: {
scraped_performer: scrapedPerformer, scraper_id: scraperId,
},
input: {
performer_input: scrapedPerformer,
},
}, },
fetchPolicy: "network-only", fetchPolicy: "network-only",
}); });
@ -859,26 +863,29 @@ export const queryScrapeMovieURL = (url: string) =>
fetchPolicy: "network-only", fetchPolicy: "network-only",
}); });
export const queryScrapeScene = ( export const queryScrapeScene = (scraperId: string, sceneId: string) =>
scraperId: string, client.query<GQL.ScrapeSingleSceneQuery>({
scene: GQL.SceneUpdateInput query: GQL.ScrapeSingleSceneDocument,
) =>
client.query<GQL.ScrapeSceneQuery>({
query: GQL.ScrapeSceneDocument,
variables: { variables: {
scraper_id: scraperId, source: {
scene, scraper_id: scraperId,
},
input: {
scene_id: sceneId,
},
}, },
fetchPolicy: "network-only", fetchPolicy: "network-only",
}); });
export const queryStashBoxScene = (stashBoxIndex: number, sceneID: string) => export const queryStashBoxScene = (stashBoxIndex: number, sceneID: string) =>
client.query<GQL.QueryStashBoxSceneQuery>({ client.query<GQL.ScrapeSingleSceneQuery>({
query: GQL.QueryStashBoxSceneDocument, query: GQL.ScrapeSingleSceneDocument,
variables: { variables: {
input: { source: {
stash_box_index: stashBoxIndex, stash_box_index: stashBoxIndex,
scene_ids: [sceneID], },
input: {
scene_id: sceneID,
}, },
}, },
}); });
@ -887,25 +894,28 @@ export const queryStashBoxPerformer = (
stashBoxIndex: number, stashBoxIndex: number,
performerID: string performerID: string
) => ) =>
client.query<GQL.QueryStashBoxPerformerQuery>({ client.query<GQL.ScrapeSinglePerformerQuery>({
query: GQL.QueryStashBoxPerformerDocument, query: GQL.ScrapeSinglePerformerDocument,
variables: { variables: {
input: { source: {
stash_box_index: stashBoxIndex, stash_box_index: stashBoxIndex,
performer_ids: [performerID], },
input: {
performer_id: performerID,
}, },
}, },
}); });
export const queryScrapeGallery = ( export const queryScrapeGallery = (scraperId: string, galleryId: string) =>
scraperId: string, client.query<GQL.ScrapeSingleGalleryQuery>({
gallery: GQL.GalleryUpdateInput query: GQL.ScrapeSingleGalleryDocument,
) =>
client.query<GQL.ScrapeGalleryQuery>({
query: GQL.ScrapeGalleryDocument,
variables: { variables: {
scraper_id: scraperId, source: {
gallery, scraper_id: scraperId,
},
input: {
gallery_id: galleryId,
},
}, },
fetchPolicy: "network-only", fetchPolicy: "network-only",
}); });
@ -1017,11 +1027,9 @@ export const queryParseSceneFilenames = (
fetchPolicy: "network-only", fetchPolicy: "network-only",
}); });
export const makePerformerCreateInput = ( export const makePerformerCreateInput = (toCreate: GQL.ScrapedPerformer) => {
toCreate: GQL.ScrapedScenePerformer
) => {
const input: GQL.PerformerCreateInput = { const input: GQL.PerformerCreateInput = {
name: toCreate.name, name: toCreate.name ?? "",
url: toCreate.url, url: toCreate.url,
gender: stringToGender(toCreate.gender), gender: stringToGender(toCreate.gender),
birthdate: toCreate.birthdate, birthdate: toCreate.birthdate,
@ -1051,37 +1059,47 @@ export const makePerformerCreateInput = (
}; };
export const stashBoxSceneQuery = (searchVal: string, stashBoxIndex: number) => export const stashBoxSceneQuery = (searchVal: string, stashBoxIndex: number) =>
client?.query< client.query<GQL.ScrapeSingleSceneQuery>({
GQL.QueryStashBoxSceneQuery, query: GQL.ScrapeSingleSceneDocument,
GQL.QueryStashBoxSceneQueryVariables variables: {
>({ source: {
query: GQL.QueryStashBoxSceneDocument, stash_box_index: stashBoxIndex,
variables: { input: { q: searchVal, stash_box_index: stashBoxIndex } }, },
input: {
query: searchVal,
},
},
}); });
export const stashBoxPerformerQuery = ( export const stashBoxPerformerQuery = (
searchVal: string, searchVal: string,
stashBoxIndex: number stashBoxIndex: number
) => ) =>
client?.query< client.query<GQL.ScrapeSinglePerformerQuery>({
GQL.QueryStashBoxPerformerQuery, query: GQL.ScrapeSinglePerformerDocument,
GQL.QueryStashBoxPerformerQueryVariables variables: {
>({ source: {
query: GQL.QueryStashBoxPerformerDocument, stash_box_index: stashBoxIndex,
variables: { input: { q: searchVal, stash_box_index: stashBoxIndex } }, },
input: {
query: searchVal,
},
},
}); });
export const stashBoxSceneBatchQuery = ( export const stashBoxSceneBatchQuery = (
sceneIds: string[], sceneIds: string[],
stashBoxIndex: number stashBoxIndex: number
) => ) =>
client?.query< client.query<GQL.ScrapeMultiScenesQuery>({
GQL.QueryStashBoxSceneQuery, query: GQL.ScrapeMultiScenesDocument,
GQL.QueryStashBoxSceneQueryVariables
>({
query: GQL.QueryStashBoxSceneDocument,
variables: { variables: {
input: { scene_ids: sceneIds, stash_box_index: stashBoxIndex }, source: {
stash_box_index: stashBoxIndex,
},
input: {
scene_ids: sceneIds,
},
}, },
}); });
@ -1089,12 +1107,14 @@ export const stashBoxPerformerBatchQuery = (
performerIds: string[], performerIds: string[],
stashBoxIndex: number stashBoxIndex: number
) => ) =>
client?.query< client.query<GQL.ScrapeMultiPerformersQuery>({
GQL.QueryStashBoxPerformerQuery, query: GQL.ScrapeMultiPerformersDocument,
GQL.QueryStashBoxPerformerQueryVariables
>({
query: GQL.QueryStashBoxPerformerDocument,
variables: { variables: {
input: { performer_ids: performerIds, stash_box_index: stashBoxIndex }, source: {
stash_box_index: stashBoxIndex,
},
input: {
performer_ids: performerIds,
},
}, },
}); });