mirror of
https://github.com/stashapp/stash.git
synced 2025-12-06 16:34:02 +01:00
Refactor stashbox package (#5699)
* Move stashbox package under pkg * Remove StashBox from method names * Add fingerprint conversion methods to Fingerprint Refactor Fingerprints methods * Make FindSceneByFingerprints accept fingerprints not scene ids * Refactor SubmitSceneDraft to not require readers * Have SubmitFingerprints accept scenes Remove SceneReader dependency * Move ScrapedScene to models package * Move ScrapedImage into models package * Move ScrapedGallery into models package * Move Scene relationship matching out of stashbox package This is now expected to be done in the client code * Remove TagFinder dependency from stashbox.Client * Make stashbox scene find full hierarchy of studios * Move studio resolution into separate method * Move studio matching out of stashbox package This is now client code responsibility * Move performer matching out of FindPerformerByID and FindPerformerByName * Refactor performer querying logic and remove unused stashbox models Renames FindStashBoxPerformersByPerformerNames to QueryPerformers and accepts names instead of performer ids * Refactor SubmitPerformerDraft to not load relationships This will be the responsibility of the calling code * Remove repository references
This commit is contained in:
parent
5d3d02e1e7
commit
db7d45792e
43 changed files with 1292 additions and 1163 deletions
|
|
@ -17,7 +17,7 @@ autobind:
|
||||||
- github.com/stashapp/stash/pkg/scraper
|
- github.com/stashapp/stash/pkg/scraper
|
||||||
- github.com/stashapp/stash/internal/identify
|
- github.com/stashapp/stash/internal/identify
|
||||||
- github.com/stashapp/stash/internal/dlna
|
- github.com/stashapp/stash/internal/dlna
|
||||||
- github.com/stashapp/stash/pkg/scraper/stashbox
|
- github.com/stashapp/stash/pkg/stashbox
|
||||||
|
|
||||||
models:
|
models:
|
||||||
# Scalars
|
# Scalars
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,6 @@ import (
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/plugin/hook"
|
"github.com/stashapp/stash/pkg/plugin/hook"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
@ -138,10 +137,6 @@ func (r *Resolver) withReadTxn(ctx context.Context, fn func(ctx context.Context)
|
||||||
return r.repository.WithReadTxn(ctx, fn)
|
return r.repository.WithReadTxn(ctx, fn)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Resolver) stashboxRepository() stashbox.Repository {
|
|
||||||
return stashbox.NewRepository(r.repository)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
|
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) (ret []*models.SceneMarker, err error) {
|
||||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
ret, err = r.repository.SceneMarker.Wall(ctx, q)
|
ret, err = r.repository.SceneMarker.Wall(ctx, q)
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,10 @@ import (
|
||||||
|
|
||||||
"github.com/stashapp/stash/internal/manager"
|
"github.com/stashapp/stash/internal/manager"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/scene"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||||
|
"github.com/stashapp/stash/pkg/stashbox"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input StashBoxFingerprintSubmissionInput) (bool, error) {
|
func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input StashBoxFingerprintSubmissionInput) (bool, error) {
|
||||||
|
|
@ -15,8 +19,23 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ids, err := stringslice.StringSliceToIntSlice(input.SceneIds)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
client := r.newStashBoxClient(*b)
|
client := r.newStashBoxClient(*b)
|
||||||
return client.SubmitStashBoxFingerprints(ctx, input.SceneIds)
|
|
||||||
|
var scenes []*models.Scene
|
||||||
|
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
scenes, err = r.sceneService.FindMany(ctx, ids, scene.LoadStashIDs, scene.LoadFiles)
|
||||||
|
return err
|
||||||
|
}); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return client.SubmitFingerprints(ctx, scenes)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
|
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) {
|
||||||
|
|
@ -69,17 +88,76 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input S
|
||||||
logger.Errorf("Error getting scene cover: %v", err)
|
logger.Errorf("Error getting scene cover: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := scene.LoadURLs(ctx, r.repository.Scene); err != nil {
|
draft, err := r.makeSceneDraft(ctx, scene, cover)
|
||||||
return fmt.Errorf("loading scene URLs: %w", err)
|
if err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
res, err = client.SubmitSceneDraft(ctx, scene, cover)
|
res, err = client.SubmitSceneDraft(ctx, *draft)
|
||||||
return err
|
return err
|
||||||
})
|
})
|
||||||
|
|
||||||
return res, err
|
return res, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) makeSceneDraft(ctx context.Context, s *models.Scene, cover []byte) (*stashbox.SceneDraft, error) {
|
||||||
|
if err := s.LoadURLs(ctx, r.repository.Scene); err != nil {
|
||||||
|
return nil, fmt.Errorf("loading scene URLs: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.LoadStashIDs(ctx, r.repository.Scene); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
draft := &stashbox.SceneDraft{
|
||||||
|
Scene: s,
|
||||||
|
}
|
||||||
|
|
||||||
|
pqb := r.repository.Performer
|
||||||
|
sqb := r.repository.Studio
|
||||||
|
|
||||||
|
if s.StudioID != nil {
|
||||||
|
var err error
|
||||||
|
draft.Studio, err = sqb.Find(ctx, *s.StudioID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if draft.Studio == nil {
|
||||||
|
return nil, fmt.Errorf("studio with id %d not found", *s.StudioID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := draft.Studio.LoadStashIDs(ctx, r.repository.Studio); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// submit all file fingerprints
|
||||||
|
if err := s.LoadFiles(ctx, r.repository.Scene); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
scenePerformers, err := pqb.FindBySceneID(ctx, s.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, p := range scenePerformers {
|
||||||
|
if err := p.LoadStashIDs(ctx, pqb); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
draft.Performers = scenePerformers
|
||||||
|
|
||||||
|
draft.Tags, err = r.repository.Tag.FindBySceneID(ctx, s.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
draft.Cover = cover
|
||||||
|
|
||||||
|
return draft, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
|
func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
|
||||||
b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint)
|
b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -105,7 +183,22 @@ func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, inp
|
||||||
return fmt.Errorf("performer with id %d not found", id)
|
return fmt.Errorf("performer with id %d not found", id)
|
||||||
}
|
}
|
||||||
|
|
||||||
res, err = client.SubmitPerformerDraft(ctx, performer)
|
pqb := r.repository.Performer
|
||||||
|
if err := performer.LoadAliases(ctx, pqb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := performer.LoadURLs(ctx, pqb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := performer.LoadStashIDs(ctx, pqb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
img, _ := pqb.GetImage(ctx, performer.ID)
|
||||||
|
|
||||||
|
res, err = client.SubmitPerformerDraft(ctx, performer, img)
|
||||||
return err
|
return err
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,9 +6,10 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
"github.com/stashapp/stash/pkg/sliceutil"
|
||||||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -29,7 +30,7 @@ func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*mo
|
||||||
return marshalScrapedPerformer(content)
|
return marshalScrapedPerformer(content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*scraper.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) {
|
||||||
if query == "" {
|
if query == "" {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
@ -47,7 +48,7 @@ func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string,
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scraper.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
|
||||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeScene)
|
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeScene)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -61,7 +62,7 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scrape
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*scraper.ScrapedGallery, error) {
|
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
|
||||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeGallery)
|
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeGallery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -75,7 +76,7 @@ func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*scra
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeImageURL(ctx context.Context, url string) (*scraper.ScrapedImage, error) {
|
func (r *queryResolver) ScrapeImageURL(ctx context.Context, url string) (*models.ScrapedImage, error) {
|
||||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeImage)
|
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeImage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -129,8 +130,8 @@ func (r *queryResolver) ScrapeGroupURL(ctx context.Context, url string) (*models
|
||||||
return group, nil
|
return group, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.Source, input ScrapeSingleSceneInput) ([]*scraper.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.Source, input ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) {
|
||||||
var ret []*scraper.ScrapedScene
|
var ret []*models.ScrapedScene
|
||||||
|
|
||||||
var sceneID int
|
var sceneID int
|
||||||
if input.SceneID != nil {
|
if input.SceneID != nil {
|
||||||
|
|
@ -182,9 +183,14 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case input.SceneID != nil:
|
case input.SceneID != nil:
|
||||||
ret, err = client.FindStashBoxSceneByFingerprints(ctx, sceneID)
|
var fps []models.Fingerprints
|
||||||
|
fps, err = r.getScenesFingerprints(ctx, []int{sceneID})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ret, err = client.FindSceneByFingerprints(ctx, fps[0])
|
||||||
case input.Query != nil:
|
case input.Query != nil:
|
||||||
ret, err = client.QueryStashBoxScene(ctx, *input.Query)
|
ret, err = client.QueryScene(ctx, *input.Query)
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("%w: scene_id or query must be set", ErrInput)
|
return nil, fmt.Errorf("%w: scene_id or query must be set", ErrInput)
|
||||||
}
|
}
|
||||||
|
|
@ -192,6 +198,11 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO - this should happen after any scene is scraped
|
||||||
|
if err := r.matchScenesRelationships(ctx, ret, *source.StashBoxEndpoint); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("%w: scraper_id or stash_box_index must be set", ErrInput)
|
return nil, fmt.Errorf("%w: scraper_id or stash_box_index must be set", ErrInput)
|
||||||
}
|
}
|
||||||
|
|
@ -199,7 +210,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.Source, input ScrapeMultiScenesInput) ([][]*scraper.ScrapedScene, error) {
|
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.Source, input ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) {
|
||||||
if source.ScraperID != nil {
|
if source.ScraperID != nil {
|
||||||
return nil, ErrNotImplemented
|
return nil, ErrNotImplemented
|
||||||
} else if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
} else if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
||||||
|
|
@ -215,12 +226,89 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.So
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return client.FindStashBoxScenesByFingerprints(ctx, sceneIDs)
|
fps, err := r.getScenesFingerprints(ctx, sceneIDs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ret, err := client.FindScenesByFingerprints(ctx, fps)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// match relationships - this mutates the existing scenes so we can
|
||||||
|
// just flatten the slice and pass it in
|
||||||
|
flat := sliceutil.Flatten(ret)
|
||||||
|
|
||||||
|
if err := r.matchScenesRelationships(ctx, flat, *source.StashBoxEndpoint); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) getScenesFingerprints(ctx context.Context, ids []int) ([]models.Fingerprints, error) {
|
||||||
|
fingerprints := make([]models.Fingerprints, len(ids))
|
||||||
|
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
qb := r.repository.Scene
|
||||||
|
|
||||||
|
for i, sceneID := range ids {
|
||||||
|
scene, err := qb.Find(ctx, sceneID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if scene == nil {
|
||||||
|
return fmt.Errorf("scene with id %d not found", sceneID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scene.LoadFiles(ctx, qb); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var sceneFPs models.Fingerprints
|
||||||
|
|
||||||
|
for _, f := range scene.Files.List() {
|
||||||
|
sceneFPs = append(sceneFPs, f.Fingerprints...)
|
||||||
|
}
|
||||||
|
|
||||||
|
fingerprints[i] = sceneFPs
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return fingerprints, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchSceneRelationships accepts scraped scenes and attempts to match its relationships to existing stash models.
|
||||||
|
func (r *queryResolver) matchScenesRelationships(ctx context.Context, ss []*models.ScrapedScene, endpoint string) error {
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
matcher := match.SceneRelationships{
|
||||||
|
PerformerFinder: r.repository.Performer,
|
||||||
|
TagFinder: r.repository.Tag,
|
||||||
|
StudioFinder: r.repository.Studio,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, s := range ss {
|
||||||
|
if err := matcher.MatchRelationships(ctx, s, endpoint); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.Source, input ScrapeSingleStudioInput) ([]*models.ScrapedStudio, error) {
|
func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.Source, input ScrapeSingleStudioInput) ([]*models.ScrapedStudio, error) {
|
||||||
if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
||||||
b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint)
|
b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint)
|
||||||
|
|
@ -231,7 +319,7 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S
|
||||||
client := r.newStashBoxClient(*b)
|
client := r.newStashBoxClient(*b)
|
||||||
|
|
||||||
var ret []*models.ScrapedStudio
|
var ret []*models.ScrapedStudio
|
||||||
out, err := client.FindStashBoxStudio(ctx, *input.Query)
|
out, err := client.FindStudio(ctx, *input.Query)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -240,6 +328,17 @@ func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.S
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(ret) > 0 {
|
if len(ret) > 0 {
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
for _, studio := range ret {
|
||||||
|
if err := match.ScrapedStudioHierarchy(ctx, r.repository.Studio, studio, *source.StashBoxEndpoint); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -285,22 +384,28 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scrape
|
||||||
|
|
||||||
client := r.newStashBoxClient(*b)
|
client := r.newStashBoxClient(*b)
|
||||||
|
|
||||||
var res []*stashbox.StashBoxPerformerQueryResult
|
var query string
|
||||||
switch {
|
switch {
|
||||||
case input.PerformerID != nil:
|
case input.PerformerID != nil:
|
||||||
res, err = client.FindStashBoxPerformersByNames(ctx, []string{*input.PerformerID})
|
names, err := r.findPerformerNames(ctx, []string{*input.PerformerID})
|
||||||
case input.Query != nil:
|
|
||||||
res, err = client.QueryStashBoxPerformer(ctx, *input.Query)
|
|
||||||
default:
|
|
||||||
return nil, ErrNotImplemented
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(res) > 0 {
|
query = names[0]
|
||||||
ret = res[0].Results
|
case input.Query != nil:
|
||||||
|
query = *input.Query
|
||||||
|
default:
|
||||||
|
return nil, ErrNotImplemented
|
||||||
|
}
|
||||||
|
|
||||||
|
if query == "" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
ret, err = client.QueryPerformer(ctx, query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||||
|
|
@ -313,6 +418,11 @@ func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source scrape
|
||||||
if source.ScraperID != nil {
|
if source.ScraperID != nil {
|
||||||
return nil, ErrNotImplemented
|
return nil, ErrNotImplemented
|
||||||
} else if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
} else if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
||||||
|
names, err := r.findPerformerNames(ctx, input.PerformerIds)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint)
|
b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -320,14 +430,40 @@ func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source scrape
|
||||||
|
|
||||||
client := r.newStashBoxClient(*b)
|
client := r.newStashBoxClient(*b)
|
||||||
|
|
||||||
return client.FindStashBoxPerformersByPerformerNames(ctx, input.PerformerIds)
|
return client.QueryPerformers(ctx, names)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper.Source, input ScrapeSingleGalleryInput) ([]*scraper.ScrapedGallery, error) {
|
func (r *queryResolver) findPerformerNames(ctx context.Context, performerIDs []string) ([]string, error) {
|
||||||
var ret []*scraper.ScrapedGallery
|
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
names := make([]string, len(ids))
|
||||||
|
|
||||||
|
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
p, err := r.repository.Performer.FindMany(ctx, ids)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, pp := range p {
|
||||||
|
names[i] = pp.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return names, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper.Source, input ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) {
|
||||||
|
var ret []*models.ScrapedGallery
|
||||||
|
|
||||||
if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil {
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
|
|
@ -369,7 +505,7 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper.
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *queryResolver) ScrapeSingleImage(ctx context.Context, source scraper.Source, input ScrapeSingleImageInput) ([]*scraper.ScrapedImage, error) {
|
func (r *queryResolver) ScrapeSingleImage(ctx context.Context, source scraper.Source, input ScrapeSingleImageInput) ([]*models.ScrapedImage, error) {
|
||||||
if source.StashBoxIndex != nil {
|
if source.StashBoxIndex != nil {
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,8 +9,8 @@ import (
|
||||||
|
|
||||||
// marshalScrapedScenes converts ScrapedContent into ScrapedScene. If conversion fails, an
|
// marshalScrapedScenes converts ScrapedContent into ScrapedScene. If conversion fails, an
|
||||||
// error is returned to the caller.
|
// error is returned to the caller.
|
||||||
func marshalScrapedScenes(content []scraper.ScrapedContent) ([]*scraper.ScrapedScene, error) {
|
func marshalScrapedScenes(content []scraper.ScrapedContent) ([]*models.ScrapedScene, error) {
|
||||||
var ret []*scraper.ScrapedScene
|
var ret []*models.ScrapedScene
|
||||||
for _, c := range content {
|
for _, c := range content {
|
||||||
if c == nil {
|
if c == nil {
|
||||||
// graphql schema requires scenes to be non-nil
|
// graphql schema requires scenes to be non-nil
|
||||||
|
|
@ -18,9 +18,9 @@ func marshalScrapedScenes(content []scraper.ScrapedContent) ([]*scraper.ScrapedS
|
||||||
}
|
}
|
||||||
|
|
||||||
switch s := c.(type) {
|
switch s := c.(type) {
|
||||||
case *scraper.ScrapedScene:
|
case *models.ScrapedScene:
|
||||||
ret = append(ret, s)
|
ret = append(ret, s)
|
||||||
case scraper.ScrapedScene:
|
case models.ScrapedScene:
|
||||||
ret = append(ret, &s)
|
ret = append(ret, &s)
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedScene", models.ErrConversion)
|
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedScene", models.ErrConversion)
|
||||||
|
|
@ -55,8 +55,8 @@ func marshalScrapedPerformers(content []scraper.ScrapedContent) ([]*models.Scrap
|
||||||
|
|
||||||
// marshalScrapedGalleries converts ScrapedContent into ScrapedGallery. If
|
// marshalScrapedGalleries converts ScrapedContent into ScrapedGallery. If
|
||||||
// conversion fails, an error is returned.
|
// conversion fails, an error is returned.
|
||||||
func marshalScrapedGalleries(content []scraper.ScrapedContent) ([]*scraper.ScrapedGallery, error) {
|
func marshalScrapedGalleries(content []scraper.ScrapedContent) ([]*models.ScrapedGallery, error) {
|
||||||
var ret []*scraper.ScrapedGallery
|
var ret []*models.ScrapedGallery
|
||||||
for _, c := range content {
|
for _, c := range content {
|
||||||
if c == nil {
|
if c == nil {
|
||||||
// graphql schema requires galleries to be non-nil
|
// graphql schema requires galleries to be non-nil
|
||||||
|
|
@ -64,9 +64,9 @@ func marshalScrapedGalleries(content []scraper.ScrapedContent) ([]*scraper.Scrap
|
||||||
}
|
}
|
||||||
|
|
||||||
switch g := c.(type) {
|
switch g := c.(type) {
|
||||||
case *scraper.ScrapedGallery:
|
case *models.ScrapedGallery:
|
||||||
ret = append(ret, g)
|
ret = append(ret, g)
|
||||||
case scraper.ScrapedGallery:
|
case models.ScrapedGallery:
|
||||||
ret = append(ret, &g)
|
ret = append(ret, &g)
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGallery", models.ErrConversion)
|
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGallery", models.ErrConversion)
|
||||||
|
|
@ -76,8 +76,8 @@ func marshalScrapedGalleries(content []scraper.ScrapedContent) ([]*scraper.Scrap
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func marshalScrapedImages(content []scraper.ScrapedContent) ([]*scraper.ScrapedImage, error) {
|
func marshalScrapedImages(content []scraper.ScrapedContent) ([]*models.ScrapedImage, error) {
|
||||||
var ret []*scraper.ScrapedImage
|
var ret []*models.ScrapedImage
|
||||||
for _, c := range content {
|
for _, c := range content {
|
||||||
if c == nil {
|
if c == nil {
|
||||||
// graphql schema requires images to be non-nil
|
// graphql schema requires images to be non-nil
|
||||||
|
|
@ -85,9 +85,9 @@ func marshalScrapedImages(content []scraper.ScrapedContent) ([]*scraper.ScrapedI
|
||||||
}
|
}
|
||||||
|
|
||||||
switch g := c.(type) {
|
switch g := c.(type) {
|
||||||
case *scraper.ScrapedImage:
|
case *models.ScrapedImage:
|
||||||
ret = append(ret, g)
|
ret = append(ret, g)
|
||||||
case scraper.ScrapedImage:
|
case models.ScrapedImage:
|
||||||
ret = append(ret, &g)
|
ret = append(ret, &g)
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedImage", models.ErrConversion)
|
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedImage", models.ErrConversion)
|
||||||
|
|
@ -131,7 +131,7 @@ func marshalScrapedPerformer(content scraper.ScrapedContent) (*models.ScrapedPer
|
||||||
}
|
}
|
||||||
|
|
||||||
// marshalScrapedScene will marshal a single scraped scene
|
// marshalScrapedScene will marshal a single scraped scene
|
||||||
func marshalScrapedScene(content scraper.ScrapedContent) (*scraper.ScrapedScene, error) {
|
func marshalScrapedScene(content scraper.ScrapedContent) (*models.ScrapedScene, error) {
|
||||||
s, err := marshalScrapedScenes([]scraper.ScrapedContent{content})
|
s, err := marshalScrapedScenes([]scraper.ScrapedContent{content})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -141,7 +141,7 @@ func marshalScrapedScene(content scraper.ScrapedContent) (*scraper.ScrapedScene,
|
||||||
}
|
}
|
||||||
|
|
||||||
// marshalScrapedGallery will marshal a single scraped gallery
|
// marshalScrapedGallery will marshal a single scraped gallery
|
||||||
func marshalScrapedGallery(content scraper.ScrapedContent) (*scraper.ScrapedGallery, error) {
|
func marshalScrapedGallery(content scraper.ScrapedContent) (*models.ScrapedGallery, error) {
|
||||||
g, err := marshalScrapedGalleries([]scraper.ScrapedContent{content})
|
g, err := marshalScrapedGalleries([]scraper.ScrapedContent{content})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -151,7 +151,7 @@ func marshalScrapedGallery(content scraper.ScrapedContent) (*scraper.ScrapedGall
|
||||||
}
|
}
|
||||||
|
|
||||||
// marshalScrapedImage will marshal a single scraped image
|
// marshalScrapedImage will marshal a single scraped image
|
||||||
func marshalScrapedImage(content scraper.ScrapedContent) (*scraper.ScrapedImage, error) {
|
func marshalScrapedImage(content scraper.ScrapedContent) (*models.ScrapedImage, error) {
|
||||||
g, err := marshalScrapedImages([]scraper.ScrapedContent{content})
|
g, err := marshalScrapedImages([]scraper.ScrapedContent{content})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
||||||
|
|
@ -7,11 +7,11 @@ import (
|
||||||
"github.com/stashapp/stash/internal/manager"
|
"github.com/stashapp/stash/internal/manager"
|
||||||
"github.com/stashapp/stash/internal/manager/config"
|
"github.com/stashapp/stash/internal/manager/config"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
"github.com/stashapp/stash/pkg/stashbox"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (r *Resolver) newStashBoxClient(box models.StashBox) *stashbox.Client {
|
func (r *Resolver) newStashBoxClient(box models.StashBox) *stashbox.Client {
|
||||||
return stashbox.NewClient(box, r.stashboxRepository(), manager.GetInstance().Config.GetScraperExcludeTagPatterns())
|
return stashbox.NewClient(box, manager.GetInstance().Config.GetScraperExcludeTagPatterns())
|
||||||
}
|
}
|
||||||
|
|
||||||
func resolveStashBoxFn(indexField, endpointField string) func(index *int, endpoint *string) (*models.StashBox, error) {
|
func resolveStashBoxFn(indexField, endpointField string) func(index *int, endpoint *string) (*models.StashBox, error) {
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,6 @@ import (
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scene"
|
"github.com/stashapp/stash/pkg/scene"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
|
||||||
"github.com/stashapp/stash/pkg/sliceutil"
|
"github.com/stashapp/stash/pkg/sliceutil"
|
||||||
"github.com/stashapp/stash/pkg/txn"
|
"github.com/stashapp/stash/pkg/txn"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
|
@ -32,7 +31,7 @@ func (e *MultipleMatchesFoundError) Error() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
type SceneScraper interface {
|
type SceneScraper interface {
|
||||||
ScrapeScenes(ctx context.Context, sceneID int) ([]*scraper.ScrapedScene, error)
|
ScrapeScenes(ctx context.Context, sceneID int) ([]*models.ScrapedScene, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type SceneUpdatePostHookExecutor interface {
|
type SceneUpdatePostHookExecutor interface {
|
||||||
|
|
@ -96,7 +95,7 @@ func (t *SceneIdentifier) Identify(ctx context.Context, scene *models.Scene) err
|
||||||
}
|
}
|
||||||
|
|
||||||
type scrapeResult struct {
|
type scrapeResult struct {
|
||||||
result *scraper.ScrapedScene
|
result *models.ScrapedScene
|
||||||
source ScraperSource
|
source ScraperSource
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -374,7 +373,7 @@ func getFieldOptions(options []MetadataOptions) map[string]*FieldOptions {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func getScenePartial(scene *models.Scene, scraped *scraper.ScrapedScene, fieldOptions map[string]*FieldOptions, setOrganized bool) models.ScenePartial {
|
func getScenePartial(scene *models.Scene, scraped *models.ScrapedScene, fieldOptions map[string]*FieldOptions, setOrganized bool) models.ScenePartial {
|
||||||
partial := models.ScenePartial{}
|
partial := models.ScenePartial{}
|
||||||
|
|
||||||
if scraped.Title != nil && (scene.Title != *scraped.Title) {
|
if scraped.Title != nil && (scene.Title != *scraped.Title) {
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ import (
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/models/mocks"
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
)
|
)
|
||||||
|
|
@ -19,10 +18,10 @@ var testCtx = context.Background()
|
||||||
|
|
||||||
type mockSceneScraper struct {
|
type mockSceneScraper struct {
|
||||||
errIDs []int
|
errIDs []int
|
||||||
results map[int][]*scraper.ScrapedScene
|
results map[int][]*models.ScrapedScene
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mockSceneScraper) ScrapeScenes(ctx context.Context, sceneID int) ([]*scraper.ScrapedScene, error) {
|
func (s mockSceneScraper) ScrapeScenes(ctx context.Context, sceneID int) ([]*models.ScrapedScene, error) {
|
||||||
if slices.Contains(s.errIDs, sceneID) {
|
if slices.Contains(s.errIDs, sceneID) {
|
||||||
return nil, errors.New("scrape scene error")
|
return nil, errors.New("scrape scene error")
|
||||||
}
|
}
|
||||||
|
|
@ -70,7 +69,7 @@ func TestSceneIdentifier_Identify(t *testing.T) {
|
||||||
{
|
{
|
||||||
Scraper: mockSceneScraper{
|
Scraper: mockSceneScraper{
|
||||||
errIDs: []int{errID1},
|
errIDs: []int{errID1},
|
||||||
results: map[int][]*scraper.ScrapedScene{
|
results: map[int][]*models.ScrapedScene{
|
||||||
found1ID: {{
|
found1ID: {{
|
||||||
Title: &scrapedTitle,
|
Title: &scrapedTitle,
|
||||||
}},
|
}},
|
||||||
|
|
@ -80,7 +79,7 @@ func TestSceneIdentifier_Identify(t *testing.T) {
|
||||||
{
|
{
|
||||||
Scraper: mockSceneScraper{
|
Scraper: mockSceneScraper{
|
||||||
errIDs: []int{errID2},
|
errIDs: []int{errID2},
|
||||||
results: map[int][]*scraper.ScrapedScene{
|
results: map[int][]*models.ScrapedScene{
|
||||||
found2ID: {{
|
found2ID: {{
|
||||||
Title: &scrapedTitle,
|
Title: &scrapedTitle,
|
||||||
}},
|
}},
|
||||||
|
|
@ -250,7 +249,7 @@ func TestSceneIdentifier_modifyScene(t *testing.T) {
|
||||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||||
},
|
},
|
||||||
&scrapeResult{
|
&scrapeResult{
|
||||||
result: &scraper.ScrapedScene{},
|
result: &models.ScrapedScene{},
|
||||||
source: ScraperSource{
|
source: ScraperSource{
|
||||||
Options: defaultOptions,
|
Options: defaultOptions,
|
||||||
},
|
},
|
||||||
|
|
@ -386,14 +385,14 @@ func Test_getScenePartial(t *testing.T) {
|
||||||
Mode: models.RelationshipUpdateModeSet,
|
Mode: models.RelationshipUpdateModeSet,
|
||||||
}
|
}
|
||||||
|
|
||||||
scrapedScene := &scraper.ScrapedScene{
|
scrapedScene := &models.ScrapedScene{
|
||||||
Title: &scrapedTitle,
|
Title: &scrapedTitle,
|
||||||
Date: &scrapedDate,
|
Date: &scrapedDate,
|
||||||
Details: &scrapedDetails,
|
Details: &scrapedDetails,
|
||||||
URLs: []string{scrapedURL},
|
URLs: []string{scrapedURL},
|
||||||
}
|
}
|
||||||
|
|
||||||
scrapedUnchangedScene := &scraper.ScrapedScene{
|
scrapedUnchangedScene := &models.ScrapedScene{
|
||||||
Title: &originalTitle,
|
Title: &originalTitle,
|
||||||
Date: &originalDate,
|
Date: &originalDate,
|
||||||
Details: &originalDetails,
|
Details: &originalDetails,
|
||||||
|
|
@ -423,7 +422,7 @@ func Test_getScenePartial(t *testing.T) {
|
||||||
|
|
||||||
type args struct {
|
type args struct {
|
||||||
scene *models.Scene
|
scene *models.Scene
|
||||||
scraped *scraper.ScrapedScene
|
scraped *models.ScrapedScene
|
||||||
fieldOptions map[string]*FieldOptions
|
fieldOptions map[string]*FieldOptions
|
||||||
setOrganized bool
|
setOrganized bool
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,6 @@ import (
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/models/mocks"
|
"github.com/stashapp/stash/pkg/models/mocks"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
)
|
)
|
||||||
|
|
@ -125,7 +124,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
|
||||||
source: ScraperSource{
|
source: ScraperSource{
|
||||||
RemoteSite: "endpoint",
|
RemoteSite: "endpoint",
|
||||||
},
|
},
|
||||||
result: &scraper.ScrapedScene{
|
result: &models.ScrapedScene{
|
||||||
Studio: tt.result,
|
Studio: tt.result,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -315,7 +314,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
||||||
tr.scene = tt.scene
|
tr.scene = tt.scene
|
||||||
tr.fieldOptions["performers"] = tt.fieldOptions
|
tr.fieldOptions["performers"] = tt.fieldOptions
|
||||||
tr.result = &scrapeResult{
|
tr.result = &scrapeResult{
|
||||||
result: &scraper.ScrapedScene{
|
result: &models.ScrapedScene{
|
||||||
Performers: tt.scraped,
|
Performers: tt.scraped,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -507,7 +506,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
||||||
tr.scene = tt.scene
|
tr.scene = tt.scene
|
||||||
tr.fieldOptions["tags"] = tt.fieldOptions
|
tr.fieldOptions["tags"] = tt.fieldOptions
|
||||||
tr.result = &scrapeResult{
|
tr.result = &scrapeResult{
|
||||||
result: &scraper.ScrapedScene{
|
result: &models.ScrapedScene{
|
||||||
Tags: tt.scraped,
|
Tags: tt.scraped,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -727,7 +726,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
||||||
source: ScraperSource{
|
source: ScraperSource{
|
||||||
RemoteSite: tt.endpoint,
|
RemoteSite: tt.endpoint,
|
||||||
},
|
},
|
||||||
result: &scraper.ScrapedScene{
|
result: &models.ScrapedScene{
|
||||||
RemoteSiteID: tt.remoteSiteID,
|
RemoteSiteID: tt.remoteSiteID,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -827,7 +826,7 @@ func Test_sceneRelationships_cover(t *testing.T) {
|
||||||
ID: tt.sceneID,
|
ID: tt.sceneID,
|
||||||
}
|
}
|
||||||
tr.result = &scrapeResult{
|
tr.result = &scrapeResult{
|
||||||
result: &scraper.ScrapedScene{
|
result: &models.ScrapedScene{
|
||||||
Image: tt.image,
|
Image: tt.image,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,9 @@ type SceneService interface {
|
||||||
AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error
|
AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error
|
||||||
Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error
|
Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error
|
||||||
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
|
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
|
||||||
|
|
||||||
|
FindMany(ctx context.Context, ids []int, load ...scene.LoadRelationshipOption) ([]*models.Scene, error)
|
||||||
|
sceneFingerprintGetter
|
||||||
}
|
}
|
||||||
|
|
||||||
type ImageService interface {
|
type ImageService interface {
|
||||||
|
|
|
||||||
|
|
@ -9,11 +9,13 @@ import (
|
||||||
"github.com/stashapp/stash/internal/identify"
|
"github.com/stashapp/stash/internal/identify"
|
||||||
"github.com/stashapp/stash/pkg/job"
|
"github.com/stashapp/stash/pkg/job"
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scene"
|
"github.com/stashapp/stash/pkg/scene"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
|
||||||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||||
|
"github.com/stashapp/stash/pkg/stashbox"
|
||||||
|
"github.com/stashapp/stash/pkg/txn"
|
||||||
)
|
)
|
||||||
|
|
||||||
var ErrInput = errors.New("invalid request input")
|
var ErrInput = errors.New("invalid request input")
|
||||||
|
|
@ -169,12 +171,20 @@ func (j *IdentifyJob) getSources() ([]identify.ScraperSource, error) {
|
||||||
|
|
||||||
var src identify.ScraperSource
|
var src identify.ScraperSource
|
||||||
if stashBox != nil {
|
if stashBox != nil {
|
||||||
stashboxRepository := stashbox.NewRepository(instance.Repository)
|
matcher := match.SceneRelationships{
|
||||||
|
PerformerFinder: instance.Repository.Performer,
|
||||||
|
TagFinder: instance.Repository.Tag,
|
||||||
|
StudioFinder: instance.Repository.Studio,
|
||||||
|
}
|
||||||
|
|
||||||
src = identify.ScraperSource{
|
src = identify.ScraperSource{
|
||||||
Name: "stash-box: " + stashBox.Endpoint,
|
Name: "stash-box: " + stashBox.Endpoint,
|
||||||
Scraper: stashboxSource{
|
Scraper: stashboxSource{
|
||||||
stashbox.NewClient(*stashBox, stashboxRepository, instance.Config.GetScraperExcludeTagPatterns()),
|
Client: stashbox.NewClient(*stashBox, instance.Config.GetScraperExcludeTagPatterns()),
|
||||||
stashBox.Endpoint,
|
endpoint: stashBox.Endpoint,
|
||||||
|
txnManager: instance.Repository.TxnManager,
|
||||||
|
sceneFingerprintGetter: instance.SceneService,
|
||||||
|
matcher: matcher,
|
||||||
},
|
},
|
||||||
RemoteSite: stashBox.Endpoint,
|
RemoteSite: stashBox.Endpoint,
|
||||||
}
|
}
|
||||||
|
|
@ -247,14 +257,42 @@ func resolveStashBox(sb []*models.StashBox, source scraper.Source) (*models.Stas
|
||||||
type stashboxSource struct {
|
type stashboxSource struct {
|
||||||
*stashbox.Client
|
*stashbox.Client
|
||||||
endpoint string
|
endpoint string
|
||||||
|
|
||||||
|
txnManager models.TxnManager
|
||||||
|
sceneFingerprintGetter sceneFingerprintGetter
|
||||||
|
matcher match.SceneRelationships
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s stashboxSource) ScrapeScenes(ctx context.Context, sceneID int) ([]*scraper.ScrapedScene, error) {
|
type sceneFingerprintGetter interface {
|
||||||
results, err := s.FindStashBoxSceneByFingerprints(ctx, sceneID)
|
GetScenesFingerprints(ctx context.Context, ids []int) ([]models.Fingerprints, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s stashboxSource) ScrapeScenes(ctx context.Context, sceneID int) ([]*models.ScrapedScene, error) {
|
||||||
|
var fps []models.Fingerprints
|
||||||
|
if err := txn.WithReadTxn(ctx, s.txnManager, func(ctx context.Context) error {
|
||||||
|
var err error
|
||||||
|
fps, err = s.sceneFingerprintGetter.GetScenesFingerprints(ctx, []int{sceneID})
|
||||||
|
return err
|
||||||
|
}); err != nil {
|
||||||
|
return nil, fmt.Errorf("error getting scene fingerprints: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
results, err := s.FindSceneByFingerprints(ctx, fps[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("error querying stash-box using scene ID %d: %w", sceneID, err)
|
return nil, fmt.Errorf("error querying stash-box using scene ID %d: %w", sceneID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := txn.WithReadTxn(ctx, s.txnManager, func(ctx context.Context) error {
|
||||||
|
for _, ret := range results {
|
||||||
|
if err := s.matcher.MatchRelationships(ctx, ret, s.endpoint); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, fmt.Errorf("error matching scene relationships: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
return results, nil
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
@ -271,7 +309,7 @@ type scraperSource struct {
|
||||||
scraperID string
|
scraperID string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s scraperSource) ScrapeScenes(ctx context.Context, sceneID int) ([]*scraper.ScrapedScene, error) {
|
func (s scraperSource) ScrapeScenes(ctx context.Context, sceneID int) ([]*models.ScrapedScene, error) {
|
||||||
content, err := s.cache.ScrapeID(ctx, s.scraperID, sceneID, scraper.ScrapeContentTypeScene)
|
content, err := s.cache.ScrapeID(ctx, s.scraperID, sceneID, scraper.ScrapeContentTypeScene)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -282,8 +320,8 @@ func (s scraperSource) ScrapeScenes(ctx context.Context, sceneID int) ([]*scrape
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if scene, ok := content.(scraper.ScrapedScene); ok {
|
if scene, ok := content.(models.ScrapedScene); ok {
|
||||||
return []*scraper.ScrapedScene{&scene}, nil
|
return []*models.ScrapedScene{&scene}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errors.New("could not convert content to scene")
|
return nil, errors.New("could not convert content to scene")
|
||||||
|
|
|
||||||
|
|
@ -6,10 +6,11 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/match"
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/performer"
|
"github.com/stashapp/stash/pkg/performer"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
|
||||||
"github.com/stashapp/stash/pkg/sliceutil"
|
"github.com/stashapp/stash/pkg/sliceutil"
|
||||||
|
"github.com/stashapp/stash/pkg/stashbox"
|
||||||
"github.com/stashapp/stash/pkg/studio"
|
"github.com/stashapp/stash/pkg/studio"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -95,8 +96,7 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode
|
||||||
|
|
||||||
r := instance.Repository
|
r := instance.Repository
|
||||||
|
|
||||||
stashboxRepository := stashbox.NewRepository(r)
|
client := stashbox.NewClient(*t.box, instance.Config.GetScraperExcludeTagPatterns())
|
||||||
client := stashbox.NewClient(*t.box, stashboxRepository, instance.Config.GetScraperExcludeTagPatterns())
|
|
||||||
|
|
||||||
if t.refresh {
|
if t.refresh {
|
||||||
var remoteID string
|
var remoteID string
|
||||||
|
|
@ -119,7 +119,7 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if remoteID != "" {
|
if remoteID != "" {
|
||||||
performer, err = client.FindStashBoxPerformerByID(ctx, remoteID)
|
performer, err = client.FindPerformerByID(ctx, remoteID)
|
||||||
|
|
||||||
if performer != nil && performer.RemoteMergedIntoId != nil {
|
if performer != nil && performer.RemoteMergedIntoId != nil {
|
||||||
mergedPerformer, err := t.handleMergedPerformer(ctx, performer, client)
|
mergedPerformer, err := t.handleMergedPerformer(ctx, performer, client)
|
||||||
|
|
@ -140,14 +140,22 @@ func (t *StashBoxBatchTagTask) findStashBoxPerformer(ctx context.Context) (*mode
|
||||||
} else {
|
} else {
|
||||||
name = t.performer.Name
|
name = t.performer.Name
|
||||||
}
|
}
|
||||||
performer, err = client.FindStashBoxPerformerByName(ctx, name)
|
performer, err = client.FindPerformerByName(ctx, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if performer != nil {
|
||||||
|
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
return match.ScrapedPerformer(ctx, r.Performer, performer, t.box.Endpoint)
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return performer, err
|
return performer, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *StashBoxBatchTagTask) handleMergedPerformer(ctx context.Context, performer *models.ScrapedPerformer, client *stashbox.Client) (mergedPerformer *models.ScrapedPerformer, err error) {
|
func (t *StashBoxBatchTagTask) handleMergedPerformer(ctx context.Context, performer *models.ScrapedPerformer, client *stashbox.Client) (mergedPerformer *models.ScrapedPerformer, err error) {
|
||||||
mergedPerformer, err = client.FindStashBoxPerformerByID(ctx, *performer.RemoteMergedIntoId)
|
mergedPerformer, err = client.FindPerformerByID(ctx, *performer.RemoteMergedIntoId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading merged performer %s from stashbox", *performer.RemoteMergedIntoId)
|
return nil, fmt.Errorf("loading merged performer %s from stashbox", *performer.RemoteMergedIntoId)
|
||||||
}
|
}
|
||||||
|
|
@ -287,8 +295,7 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.
|
||||||
|
|
||||||
r := instance.Repository
|
r := instance.Repository
|
||||||
|
|
||||||
stashboxRepository := stashbox.NewRepository(r)
|
client := stashbox.NewClient(*t.box, instance.Config.GetScraperExcludeTagPatterns())
|
||||||
client := stashbox.NewClient(*t.box, stashboxRepository, instance.Config.GetScraperExcludeTagPatterns())
|
|
||||||
|
|
||||||
if t.refresh {
|
if t.refresh {
|
||||||
var remoteID string
|
var remoteID string
|
||||||
|
|
@ -309,7 +316,7 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if remoteID != "" {
|
if remoteID != "" {
|
||||||
studio, err = client.FindStashBoxStudio(ctx, remoteID)
|
studio, err = client.FindStudio(ctx, remoteID)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
var name string
|
var name string
|
||||||
|
|
@ -318,7 +325,19 @@ func (t *StashBoxBatchTagTask) findStashBoxStudio(ctx context.Context) (*models.
|
||||||
} else {
|
} else {
|
||||||
name = t.studio.Name
|
name = t.studio.Name
|
||||||
}
|
}
|
||||||
studio, err = client.FindStashBoxStudio(ctx, name)
|
studio, err = client.FindStudio(ctx, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||||
|
if studio != nil {
|
||||||
|
if err := match.ScrapedStudioHierarchy(ctx, r.Studio, studio, t.box.Endpoint); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return studio, err
|
return studio, err
|
||||||
|
|
|
||||||
|
|
@ -20,18 +20,52 @@ type GroupNamesFinder interface {
|
||||||
FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Group, error)
|
FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Group, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SceneRelationships struct {
|
||||||
|
PerformerFinder PerformerFinder
|
||||||
|
TagFinder models.TagQueryer
|
||||||
|
StudioFinder StudioFinder
|
||||||
|
}
|
||||||
|
|
||||||
|
// MatchRelationships accepts a scraped scene and attempts to match its relationships to existing stash models.
|
||||||
|
func (r SceneRelationships) MatchRelationships(ctx context.Context, s *models.ScrapedScene, endpoint string) error {
|
||||||
|
thisStudio := s.Studio
|
||||||
|
for thisStudio != nil {
|
||||||
|
if err := ScrapedStudio(ctx, r.StudioFinder, s.Studio, endpoint); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
thisStudio = thisStudio.Parent
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, p := range s.Performers {
|
||||||
|
err := ScrapedPerformer(ctx, r.PerformerFinder, p, endpoint)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, t := range s.Tags {
|
||||||
|
err := ScrapedTag(ctx, r.TagFinder, t)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// ScrapedPerformer matches the provided performer with the
|
// ScrapedPerformer matches the provided performer with the
|
||||||
// performers in the database and sets the ID field if one is found.
|
// performers in the database and sets the ID field if one is found.
|
||||||
func ScrapedPerformer(ctx context.Context, qb PerformerFinder, p *models.ScrapedPerformer, stashBoxEndpoint *string) error {
|
func ScrapedPerformer(ctx context.Context, qb PerformerFinder, p *models.ScrapedPerformer, stashBoxEndpoint string) error {
|
||||||
if p.StoredID != nil || p.Name == nil {
|
if p.StoredID != nil || p.Name == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if a performer with the StashID already exists
|
// Check if a performer with the StashID already exists
|
||||||
if stashBoxEndpoint != nil && p.RemoteSiteID != nil {
|
if stashBoxEndpoint != "" && p.RemoteSiteID != nil {
|
||||||
performers, err := qb.FindByStashID(ctx, models.StashID{
|
performers, err := qb.FindByStashID(ctx, models.StashID{
|
||||||
StashID: *p.RemoteSiteID,
|
StashID: *p.RemoteSiteID,
|
||||||
Endpoint: *stashBoxEndpoint,
|
Endpoint: stashBoxEndpoint,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
@ -73,16 +107,16 @@ type StudioFinder interface {
|
||||||
|
|
||||||
// ScrapedStudio matches the provided studio with the studios
|
// ScrapedStudio matches the provided studio with the studios
|
||||||
// in the database and sets the ID field if one is found.
|
// in the database and sets the ID field if one is found.
|
||||||
func ScrapedStudio(ctx context.Context, qb StudioFinder, s *models.ScrapedStudio, stashBoxEndpoint *string) error {
|
func ScrapedStudio(ctx context.Context, qb StudioFinder, s *models.ScrapedStudio, stashBoxEndpoint string) error {
|
||||||
if s.StoredID != nil {
|
if s.StoredID != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if a studio with the StashID already exists
|
// Check if a studio with the StashID already exists
|
||||||
if stashBoxEndpoint != nil && s.RemoteSiteID != nil {
|
if stashBoxEndpoint != "" && s.RemoteSiteID != nil {
|
||||||
studios, err := qb.FindByStashID(ctx, models.StashID{
|
studios, err := qb.FindByStashID(ctx, models.StashID{
|
||||||
StashID: *s.RemoteSiteID,
|
StashID: *s.RemoteSiteID,
|
||||||
Endpoint: *stashBoxEndpoint,
|
Endpoint: stashBoxEndpoint,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
@ -118,6 +152,19 @@ func ScrapedStudio(ctx context.Context, qb StudioFinder, s *models.ScrapedStudio
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ScrapedStudioHierarchy executes ScrapedStudio for the provided studio and its parents recursively.
|
||||||
|
func ScrapedStudioHierarchy(ctx context.Context, qb StudioFinder, s *models.ScrapedStudio, stashBoxEndpoint string) error {
|
||||||
|
if err := ScrapedStudio(ctx, qb, s, stashBoxEndpoint); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Parent == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return ScrapedStudioHierarchy(ctx, qb, s.Parent, stashBoxEndpoint)
|
||||||
|
}
|
||||||
|
|
||||||
// ScrapedGroup matches the provided movie with the movies
|
// ScrapedGroup matches the provided movie with the movies
|
||||||
// in the database and returns the ID field if one is found.
|
// in the database and returns the ID field if one is found.
|
||||||
func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, name *string) (matchedID *string, err error) {
|
func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, name *string) (matchedID *string, err error) {
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,20 @@ func (f *Fingerprint) Value() string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// String returns the string representation of the Fingerprint.
|
||||||
|
// It will return an empty string if the Fingerprint is not a string.
|
||||||
|
func (f Fingerprint) String() string {
|
||||||
|
s, _ := f.Fingerprint.(string)
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Int64 returns the int64 representation of the Fingerprint.
|
||||||
|
// It will return 0 if the Fingerprint is not an int64.
|
||||||
|
func (f Fingerprint) Int64() int64 {
|
||||||
|
v, _ := f.Fingerprint.(int64)
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
||||||
type Fingerprints []Fingerprint
|
type Fingerprints []Fingerprint
|
||||||
|
|
||||||
func (f Fingerprints) Remove(type_ string) Fingerprints {
|
func (f Fingerprints) Remove(type_ string) Fingerprints {
|
||||||
|
|
@ -102,33 +116,27 @@ func (f Fingerprints) For(type_ string) *Fingerprint {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Fingerprints) Get(type_ string) interface{} {
|
func (f Fingerprints) Get(type_ string) interface{} {
|
||||||
for _, fp := range f {
|
fp := f.For(type_)
|
||||||
if fp.Type == type_ {
|
if fp == nil {
|
||||||
return fp.Fingerprint
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
}
|
||||||
|
return fp.Fingerprint
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Fingerprints) GetString(type_ string) string {
|
func (f Fingerprints) GetString(type_ string) string {
|
||||||
fp := f.Get(type_)
|
fp := f.For(type_)
|
||||||
if fp != nil {
|
if fp == nil {
|
||||||
s, _ := fp.(string)
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
return ""
|
||||||
|
}
|
||||||
|
return fp.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f Fingerprints) GetInt64(type_ string) int64 {
|
func (f Fingerprints) GetInt64(type_ string) int64 {
|
||||||
fp := f.Get(type_)
|
fp := f.For(type_)
|
||||||
if fp != nil {
|
if fp != nil {
|
||||||
v, _ := fp.(int64)
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
}
|
||||||
|
return fp.Int64()
|
||||||
}
|
}
|
||||||
|
|
||||||
// AppendUnique appends a fingerprint to the list if a Fingerprint of the same type does not already exist in the list. If one does, then it is updated with o's Fingerprint value.
|
// AppendUnique appends a fingerprint to the list if a Fingerprint of the same type does not already exist in the list. If one does, then it is updated with o's Fingerprint value.
|
||||||
|
|
|
||||||
|
|
@ -492,3 +492,88 @@ func (g ScrapedGroup) ScrapedMovie() ScrapedMovie {
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ScrapedScene struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Code *string `json:"code"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
Director *string `json:"director"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
URLs []string `json:"urls"`
|
||||||
|
Date *string `json:"date"`
|
||||||
|
// This should be a base64 encoded data URL
|
||||||
|
Image *string `json:"image"`
|
||||||
|
File *SceneFileType `json:"file"`
|
||||||
|
Studio *ScrapedStudio `json:"studio"`
|
||||||
|
Tags []*ScrapedTag `json:"tags"`
|
||||||
|
Performers []*ScrapedPerformer `json:"performers"`
|
||||||
|
Groups []*ScrapedGroup `json:"groups"`
|
||||||
|
Movies []*ScrapedMovie `json:"movies"`
|
||||||
|
RemoteSiteID *string `json:"remote_site_id"`
|
||||||
|
Duration *int `json:"duration"`
|
||||||
|
Fingerprints []*StashBoxFingerprint `json:"fingerprints"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ScrapedScene) IsScrapedContent() {}
|
||||||
|
|
||||||
|
type ScrapedSceneInput struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Code *string `json:"code"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
Director *string `json:"director"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
URLs []string `json:"urls"`
|
||||||
|
Date *string `json:"date"`
|
||||||
|
RemoteSiteID *string `json:"remote_site_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScrapedImage struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Code *string `json:"code"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
Photographer *string `json:"photographer"`
|
||||||
|
URLs []string `json:"urls"`
|
||||||
|
Date *string `json:"date"`
|
||||||
|
Studio *ScrapedStudio `json:"studio"`
|
||||||
|
Tags []*ScrapedTag `json:"tags"`
|
||||||
|
Performers []*ScrapedPerformer `json:"performers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ScrapedImage) IsScrapedContent() {}
|
||||||
|
|
||||||
|
type ScrapedImageInput struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Code *string `json:"code"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
URLs []string `json:"urls"`
|
||||||
|
Date *string `json:"date"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScrapedGallery struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Code *string `json:"code"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
Photographer *string `json:"photographer"`
|
||||||
|
URLs []string `json:"urls"`
|
||||||
|
Date *string `json:"date"`
|
||||||
|
Studio *ScrapedStudio `json:"studio"`
|
||||||
|
Tags []*ScrapedTag `json:"tags"`
|
||||||
|
Performers []*ScrapedPerformer `json:"performers"`
|
||||||
|
|
||||||
|
// deprecated
|
||||||
|
URL *string `json:"url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ScrapedGallery) IsScrapedContent() {}
|
||||||
|
|
||||||
|
type ScrapedGalleryInput struct {
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Code *string `json:"code"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
Photographer *string `json:"photographer"`
|
||||||
|
URLs []string `json:"urls"`
|
||||||
|
Date *string `json:"date"`
|
||||||
|
|
||||||
|
// deprecated
|
||||||
|
URL *string `json:"url"`
|
||||||
|
}
|
||||||
|
|
|
||||||
66
pkg/scene/find.go
Normal file
66
pkg/scene/find.go
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LoadRelationshipOption func(context.Context, *models.Scene, models.SceneReader) error
|
||||||
|
|
||||||
|
func LoadURLs(ctx context.Context, scene *models.Scene, r models.SceneReader) error {
|
||||||
|
if err := scene.LoadURLs(ctx, r); err != nil {
|
||||||
|
return fmt.Errorf("loading scene URLs: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadStashIDs(ctx context.Context, scene *models.Scene, r models.SceneReader) error {
|
||||||
|
if err := scene.LoadStashIDs(ctx, r); err != nil {
|
||||||
|
return fmt.Errorf("failed to load stash IDs for scene %d: %w", scene.ID, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadFiles(ctx context.Context, scene *models.Scene, r models.SceneReader) error {
|
||||||
|
if err := scene.LoadFiles(ctx, r); err != nil {
|
||||||
|
return fmt.Errorf("failed to load files for scene %d: %w", scene.ID, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindMany retrieves multiple scenes by their IDs.
|
||||||
|
// This method will load the specified relationships for each scene.
|
||||||
|
func (s *Service) FindMany(ctx context.Context, ids []int, load ...LoadRelationshipOption) ([]*models.Scene, error) {
|
||||||
|
var scenes []*models.Scene
|
||||||
|
qb := s.Repository
|
||||||
|
|
||||||
|
var err error
|
||||||
|
scenes, err = qb.FindMany(ctx, ids)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO - we should bulk load these relationships
|
||||||
|
for _, scene := range scenes {
|
||||||
|
if err := s.LoadRelationships(ctx, scene, load...); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return scenes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Service) LoadRelationships(ctx context.Context, scene *models.Scene, load ...LoadRelationshipOption) error {
|
||||||
|
for _, l := range load {
|
||||||
|
if err := l(ctx, scene, s.Repository); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
40
pkg/scene/fingerprints.go
Normal file
40
pkg/scene/fingerprints.go
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
package scene
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GetFingerprints returns the fingerprints for the given scene ids.
|
||||||
|
func (s *Service) GetScenesFingerprints(ctx context.Context, ids []int) ([]models.Fingerprints, error) {
|
||||||
|
fingerprints := make([]models.Fingerprints, len(ids))
|
||||||
|
|
||||||
|
qb := s.Repository
|
||||||
|
|
||||||
|
for i, sceneID := range ids {
|
||||||
|
scene, err := qb.Find(ctx, sceneID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if scene == nil {
|
||||||
|
return nil, fmt.Errorf("scene with id %d not found", sceneID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scene.LoadFiles(ctx, qb); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var sceneFPs models.Fingerprints
|
||||||
|
|
||||||
|
for _, f := range scene.Files.List() {
|
||||||
|
sceneFPs = append(sceneFPs, f.Fingerprints...)
|
||||||
|
}
|
||||||
|
|
||||||
|
fingerprints[i] = sceneFPs
|
||||||
|
}
|
||||||
|
|
||||||
|
return fingerprints, nil
|
||||||
|
}
|
||||||
|
|
@ -29,9 +29,9 @@ type scraperActionImpl interface {
|
||||||
scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error)
|
scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error)
|
||||||
scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error)
|
scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error)
|
||||||
|
|
||||||
scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error)
|
scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error)
|
||||||
scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error)
|
scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error)
|
||||||
scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error)
|
scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, globalConfig GlobalConfig) scraperActionImpl {
|
func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, globalConfig GlobalConfig) scraperActionImpl {
|
||||||
|
|
|
||||||
|
|
@ -89,8 +89,8 @@ func autotagMatchTags(ctx context.Context, path string, tagReader models.TagAuto
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scene *models.Scene) (*ScrapedScene, error) {
|
func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
var ret *ScrapedScene
|
var ret *models.ScrapedScene
|
||||||
const trimExt = false
|
const trimExt = false
|
||||||
|
|
||||||
// populate performers, studio and tags based on scene path
|
// populate performers, studio and tags based on scene path
|
||||||
|
|
@ -115,7 +115,7 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
||||||
ret = &ScrapedScene{
|
ret = &models.ScrapedScene{
|
||||||
Performers: performers,
|
Performers: performers,
|
||||||
Studio: studio,
|
Studio: studio,
|
||||||
Tags: tags,
|
Tags: tags,
|
||||||
|
|
@ -130,7 +130,7 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error) {
|
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
path := gallery.Path
|
path := gallery.Path
|
||||||
if path == "" {
|
if path == "" {
|
||||||
// not valid for non-path-based galleries
|
// not valid for non-path-based galleries
|
||||||
|
|
@ -140,7 +140,7 @@ func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, ga
|
||||||
// only trim extension if gallery is file-based
|
// only trim extension if gallery is file-based
|
||||||
trimExt := gallery.PrimaryFileID != nil
|
trimExt := gallery.PrimaryFileID != nil
|
||||||
|
|
||||||
var ret *ScrapedGallery
|
var ret *models.ScrapedGallery
|
||||||
|
|
||||||
// populate performers, studio and tags based on scene path
|
// populate performers, studio and tags based on scene path
|
||||||
if err := txn.WithReadTxn(ctx, s.txnManager, func(ctx context.Context) error {
|
if err := txn.WithReadTxn(ctx, s.txnManager, func(ctx context.Context) error {
|
||||||
|
|
@ -160,7 +160,7 @@ func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, ga
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
||||||
ret = &ScrapedGallery{
|
ret = &models.ScrapedGallery{
|
||||||
Performers: performers,
|
Performers: performers,
|
||||||
Studio: studio,
|
Studio: studio,
|
||||||
Tags: tags,
|
Tags: tags,
|
||||||
|
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
package scraper
|
|
||||||
|
|
||||||
import "github.com/stashapp/stash/pkg/models"
|
|
||||||
|
|
||||||
type ScrapedGallery struct {
|
|
||||||
Title *string `json:"title"`
|
|
||||||
Code *string `json:"code"`
|
|
||||||
Details *string `json:"details"`
|
|
||||||
Photographer *string `json:"photographer"`
|
|
||||||
URLs []string `json:"urls"`
|
|
||||||
Date *string `json:"date"`
|
|
||||||
Studio *models.ScrapedStudio `json:"studio"`
|
|
||||||
Tags []*models.ScrapedTag `json:"tags"`
|
|
||||||
Performers []*models.ScrapedPerformer `json:"performers"`
|
|
||||||
|
|
||||||
// deprecated
|
|
||||||
URL *string `json:"url"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ScrapedGallery) IsScrapedContent() {}
|
|
||||||
|
|
||||||
type ScrapedGalleryInput struct {
|
|
||||||
Title *string `json:"title"`
|
|
||||||
Code *string `json:"code"`
|
|
||||||
Details *string `json:"details"`
|
|
||||||
Photographer *string `json:"photographer"`
|
|
||||||
URLs []string `json:"urls"`
|
|
||||||
Date *string `json:"date"`
|
|
||||||
|
|
||||||
// deprecated
|
|
||||||
URL *string `json:"url"`
|
|
||||||
}
|
|
||||||
|
|
@ -60,7 +60,7 @@ func (g group) viaFragment(ctx context.Context, client *http.Client, input Input
|
||||||
return s.scrapeByFragment(ctx, input)
|
return s.scrapeByFragment(ctx, input)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*ScrapedScene, error) {
|
func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
if g.config.SceneByFragment == nil {
|
if g.config.SceneByFragment == nil {
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
@ -69,7 +69,7 @@ func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.
|
||||||
return s.scrapeSceneByScene(ctx, scene)
|
return s.scrapeSceneByScene(ctx, scene)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error) {
|
func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
if g.config.GalleryByFragment == nil {
|
if g.config.GalleryByFragment == nil {
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
@ -78,7 +78,7 @@ func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *mod
|
||||||
return s.scrapeGalleryByGallery(ctx, gallery)
|
return s.scrapeGalleryByGallery(ctx, gallery)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g group) viaImage(ctx context.Context, client *http.Client, gallery *models.Image) (*ScrapedImage, error) {
|
func (g group) viaImage(ctx context.Context, client *http.Client, gallery *models.Image) (*models.ScrapedImage, error) {
|
||||||
if g.config.ImageByFragment == nil {
|
if g.config.ImageByFragment == nil {
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,28 +11,6 @@ import (
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ScrapedImage struct {
|
|
||||||
Title *string `json:"title"`
|
|
||||||
Code *string `json:"code"`
|
|
||||||
Details *string `json:"details"`
|
|
||||||
Photographer *string `json:"photographer"`
|
|
||||||
URLs []string `json:"urls"`
|
|
||||||
Date *string `json:"date"`
|
|
||||||
Studio *models.ScrapedStudio `json:"studio"`
|
|
||||||
Tags []*models.ScrapedTag `json:"tags"`
|
|
||||||
Performers []*models.ScrapedPerformer `json:"performers"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ScrapedImage) IsScrapedContent() {}
|
|
||||||
|
|
||||||
type ScrapedImageInput struct {
|
|
||||||
Title *string `json:"title"`
|
|
||||||
Code *string `json:"code"`
|
|
||||||
Details *string `json:"details"`
|
|
||||||
URLs []string `json:"urls"`
|
|
||||||
Date *string `json:"date"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error {
|
func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error {
|
||||||
// backwards compatibility: we fetch the image if it's a URL and set it to the first image
|
// backwards compatibility: we fetch the image if it's a URL and set it to the first image
|
||||||
// Image is deprecated, so only do this if Images is unset
|
// Image is deprecated, so only do this if Images is unset
|
||||||
|
|
@ -59,7 +37,7 @@ func setPerformerImage(ctx context.Context, client *http.Client, p *models.Scrap
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func setSceneImage(ctx context.Context, client *http.Client, s *ScrapedScene, globalConfig GlobalConfig) error {
|
func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedScene, globalConfig GlobalConfig) error {
|
||||||
// don't try to get the image if it doesn't appear to be a URL
|
// don't try to get the image if it doesn't appear to be a URL
|
||||||
if s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
|
if s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
|
||||||
// nothing to do
|
// nothing to do
|
||||||
|
|
|
||||||
|
|
@ -172,7 +172,7 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeCo
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error) {
|
func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScene(scene)
|
queryURL := queryURLParametersFromScene(scene)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
|
@ -231,7 +231,7 @@ func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (Scrape
|
||||||
return scraper.scrapeScene(ctx, q)
|
return scraper.scrapeScene(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) {
|
func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromImage(image)
|
queryURL := queryURLParametersFromImage(image)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
|
@ -255,7 +255,7 @@ func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Imag
|
||||||
return scraper.scrapeImage(ctx, q)
|
return scraper.scrapeImage(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) {
|
func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromGallery(gallery)
|
queryURL := queryURLParametersFromGallery(gallery)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
|
|
||||||
|
|
@ -997,8 +997,8 @@ func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]*
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// processSceneRelationships sets the relationships on the ScrapedScene. It returns true if any relationships were set.
|
// processSceneRelationships sets the relationships on the models.ScrapedScene. It returns true if any relationships were set.
|
||||||
func (s mappedScraper) processSceneRelationships(ctx context.Context, q mappedQuery, resultIndex int, ret *ScrapedScene) bool {
|
func (s mappedScraper) processSceneRelationships(ctx context.Context, q mappedQuery, resultIndex int, ret *models.ScrapedScene) bool {
|
||||||
sceneScraperConfig := s.Scene
|
sceneScraperConfig := s.Scene
|
||||||
|
|
||||||
scenePerformersMap := sceneScraperConfig.Performers
|
scenePerformersMap := sceneScraperConfig.Performers
|
||||||
|
|
@ -1082,8 +1082,8 @@ func processRelationships[T any](ctx context.Context, s mappedScraper, relations
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*ScrapedScene, error) {
|
func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*models.ScrapedScene, error) {
|
||||||
var ret []*ScrapedScene
|
var ret []*models.ScrapedScene
|
||||||
|
|
||||||
sceneScraperConfig := s.Scene
|
sceneScraperConfig := s.Scene
|
||||||
sceneMap := sceneScraperConfig.mappedConfig
|
sceneMap := sceneScraperConfig.mappedConfig
|
||||||
|
|
@ -1097,7 +1097,7 @@ func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*Scra
|
||||||
for i, r := range results {
|
for i, r := range results {
|
||||||
logger.Debug(`Processing scene:`)
|
logger.Debug(`Processing scene:`)
|
||||||
|
|
||||||
var thisScene ScrapedScene
|
var thisScene models.ScrapedScene
|
||||||
r.apply(&thisScene)
|
r.apply(&thisScene)
|
||||||
s.processSceneRelationships(ctx, q, i, &thisScene)
|
s.processSceneRelationships(ctx, q, i, &thisScene)
|
||||||
ret = append(ret, &thisScene)
|
ret = append(ret, &thisScene)
|
||||||
|
|
@ -1106,7 +1106,7 @@ func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*Scra
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*ScrapedScene, error) {
|
func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*models.ScrapedScene, error) {
|
||||||
sceneScraperConfig := s.Scene
|
sceneScraperConfig := s.Scene
|
||||||
if sceneScraperConfig == nil {
|
if sceneScraperConfig == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
|
@ -1117,7 +1117,7 @@ func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*Scraped
|
||||||
logger.Debug(`Processing scene:`)
|
logger.Debug(`Processing scene:`)
|
||||||
results := sceneMap.process(ctx, q, s.Common, urlsIsMulti)
|
results := sceneMap.process(ctx, q, s.Common, urlsIsMulti)
|
||||||
|
|
||||||
var ret ScrapedScene
|
var ret models.ScrapedScene
|
||||||
if len(results) > 0 {
|
if len(results) > 0 {
|
||||||
results[0].apply(&ret)
|
results[0].apply(&ret)
|
||||||
}
|
}
|
||||||
|
|
@ -1133,8 +1133,8 @@ func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*Scraped
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*ScrapedImage, error) {
|
func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*models.ScrapedImage, error) {
|
||||||
var ret ScrapedImage
|
var ret models.ScrapedImage
|
||||||
|
|
||||||
imageScraperConfig := s.Image
|
imageScraperConfig := s.Image
|
||||||
if imageScraperConfig == nil {
|
if imageScraperConfig == nil {
|
||||||
|
|
@ -1184,8 +1184,8 @@ func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*Scraped
|
||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*ScrapedGallery, error) {
|
func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*models.ScrapedGallery, error) {
|
||||||
var ret ScrapedGallery
|
var ret models.ScrapedGallery
|
||||||
|
|
||||||
galleryScraperConfig := s.Gallery
|
galleryScraperConfig := s.Gallery
|
||||||
if galleryScraperConfig == nil {
|
if galleryScraperConfig == nil {
|
||||||
|
|
|
||||||
|
|
@ -23,23 +23,23 @@ func (c Cache) postScrape(ctx context.Context, content ScrapedContent, excludeTa
|
||||||
}
|
}
|
||||||
case models.ScrapedPerformer:
|
case models.ScrapedPerformer:
|
||||||
return c.postScrapePerformer(ctx, v, excludeTagRE)
|
return c.postScrapePerformer(ctx, v, excludeTagRE)
|
||||||
case *ScrapedScene:
|
case *models.ScrapedScene:
|
||||||
if v != nil {
|
if v != nil {
|
||||||
return c.postScrapeScene(ctx, *v, excludeTagRE)
|
return c.postScrapeScene(ctx, *v, excludeTagRE)
|
||||||
}
|
}
|
||||||
case ScrapedScene:
|
case models.ScrapedScene:
|
||||||
return c.postScrapeScene(ctx, v, excludeTagRE)
|
return c.postScrapeScene(ctx, v, excludeTagRE)
|
||||||
case *ScrapedGallery:
|
case *models.ScrapedGallery:
|
||||||
if v != nil {
|
if v != nil {
|
||||||
return c.postScrapeGallery(ctx, *v, excludeTagRE)
|
return c.postScrapeGallery(ctx, *v, excludeTagRE)
|
||||||
}
|
}
|
||||||
case ScrapedGallery:
|
case models.ScrapedGallery:
|
||||||
return c.postScrapeGallery(ctx, v, excludeTagRE)
|
return c.postScrapeGallery(ctx, v, excludeTagRE)
|
||||||
case *ScrapedImage:
|
case *models.ScrapedImage:
|
||||||
if v != nil {
|
if v != nil {
|
||||||
return c.postScrapeImage(ctx, *v, excludeTagRE)
|
return c.postScrapeImage(ctx, *v, excludeTagRE)
|
||||||
}
|
}
|
||||||
case ScrapedImage:
|
case models.ScrapedImage:
|
||||||
return c.postScrapeImage(ctx, v, excludeTagRE)
|
return c.postScrapeImage(ctx, v, excludeTagRE)
|
||||||
case *models.ScrapedMovie:
|
case *models.ScrapedMovie:
|
||||||
if v != nil {
|
if v != nil {
|
||||||
|
|
@ -133,7 +133,7 @@ func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie, exclu
|
||||||
m.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
m.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
||||||
|
|
||||||
if m.Studio != nil {
|
if m.Studio != nil {
|
||||||
if err := match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, nil); err != nil {
|
if err := match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, ""); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -165,7 +165,7 @@ func (c Cache) postScrapeGroup(ctx context.Context, m models.ScrapedGroup, exclu
|
||||||
m.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
m.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
||||||
|
|
||||||
if m.Studio != nil {
|
if m.Studio != nil {
|
||||||
if err := match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, nil); err != nil {
|
if err := match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, ""); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -201,7 +201,7 @@ func (c Cache) postScrapeScenePerformer(ctx context.Context, p models.ScrapedPer
|
||||||
return ignoredTags, nil
|
return ignoredTags, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene, excludeTagRE []*regexp.Regexp) (_ ScrapedContent, ignoredTags []string, err error) {
|
func (c Cache) postScrapeScene(ctx context.Context, scene models.ScrapedScene, excludeTagRE []*regexp.Regexp) (_ ScrapedContent, ignoredTags []string, err error) {
|
||||||
// set the URL/URLs field
|
// set the URL/URLs field
|
||||||
if scene.URL == nil && len(scene.URLs) > 0 {
|
if scene.URL == nil && len(scene.URLs) > 0 {
|
||||||
scene.URL = &scene.URLs[0]
|
scene.URL = &scene.URLs[0]
|
||||||
|
|
@ -227,7 +227,7 @@ func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene, excludeT
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := match.ScrapedPerformer(ctx, pqb, p, nil); err != nil {
|
if err := match.ScrapedPerformer(ctx, pqb, p, ""); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -277,7 +277,7 @@ func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene, excludeT
|
||||||
scene.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
scene.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
||||||
|
|
||||||
if scene.Studio != nil {
|
if scene.Studio != nil {
|
||||||
err := match.ScrapedStudio(ctx, sqb, scene.Studio, nil)
|
err := match.ScrapedStudio(ctx, sqb, scene.Studio, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -296,7 +296,7 @@ func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene, excludeT
|
||||||
return scene, ignoredTags, nil
|
return scene, ignoredTags, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery, excludeTagRE []*regexp.Regexp) (_ ScrapedContent, ignoredTags []string, err error) {
|
func (c Cache) postScrapeGallery(ctx context.Context, g models.ScrapedGallery, excludeTagRE []*regexp.Regexp) (_ ScrapedContent, ignoredTags []string, err error) {
|
||||||
// set the URL/URLs field
|
// set the URL/URLs field
|
||||||
if g.URL == nil && len(g.URLs) > 0 {
|
if g.URL == nil && len(g.URLs) > 0 {
|
||||||
g.URL = &g.URLs[0]
|
g.URL = &g.URLs[0]
|
||||||
|
|
@ -312,7 +312,7 @@ func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery, excludeT
|
||||||
sqb := r.StudioFinder
|
sqb := r.StudioFinder
|
||||||
|
|
||||||
for _, p := range g.Performers {
|
for _, p := range g.Performers {
|
||||||
err := match.ScrapedPerformer(ctx, pqb, p, nil)
|
err := match.ScrapedPerformer(ctx, pqb, p, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -325,7 +325,7 @@ func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery, excludeT
|
||||||
g.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
g.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
||||||
|
|
||||||
if g.Studio != nil {
|
if g.Studio != nil {
|
||||||
err := match.ScrapedStudio(ctx, sqb, g.Studio, nil)
|
err := match.ScrapedStudio(ctx, sqb, g.Studio, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -339,7 +339,7 @@ func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery, excludeT
|
||||||
return g, ignoredTags, nil
|
return g, ignoredTags, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Cache) postScrapeImage(ctx context.Context, image ScrapedImage, excludeTagRE []*regexp.Regexp) (_ ScrapedContent, ignoredTags []string, err error) {
|
func (c Cache) postScrapeImage(ctx context.Context, image models.ScrapedImage, excludeTagRE []*regexp.Regexp) (_ ScrapedContent, ignoredTags []string, err error) {
|
||||||
r := c.repository
|
r := c.repository
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||||
pqb := r.PerformerFinder
|
pqb := r.PerformerFinder
|
||||||
|
|
@ -347,7 +347,7 @@ func (c Cache) postScrapeImage(ctx context.Context, image ScrapedImage, excludeT
|
||||||
sqb := r.StudioFinder
|
sqb := r.StudioFinder
|
||||||
|
|
||||||
for _, p := range image.Performers {
|
for _, p := range image.Performers {
|
||||||
if err := match.ScrapedPerformer(ctx, pqb, p, nil); err != nil {
|
if err := match.ScrapedPerformer(ctx, pqb, p, ""); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -360,7 +360,7 @@ func (c Cache) postScrapeImage(ctx context.Context, image ScrapedImage, excludeT
|
||||||
image.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
image.Tags, ignoredTags = FilterTags(excludeTagRE, tags)
|
||||||
|
|
||||||
if image.Studio != nil {
|
if image.Studio != nil {
|
||||||
err := match.ScrapedStudio(ctx, sqb, image.Studio, nil)
|
err := match.ScrapedStudio(ctx, sqb, image.Studio, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,7 @@ func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func queryURLParametersFromScrapedScene(scene ScrapedSceneInput) queryURLParameters {
|
func queryURLParametersFromScrapedScene(scene models.ScrapedSceneInput) queryURLParameters {
|
||||||
ret := make(queryURLParameters)
|
ret := make(queryURLParameters)
|
||||||
|
|
||||||
setField := func(field string, value *string) {
|
setField := func(field string, value *string) {
|
||||||
|
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
package scraper
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ScrapedScene struct {
|
|
||||||
Title *string `json:"title"`
|
|
||||||
Code *string `json:"code"`
|
|
||||||
Details *string `json:"details"`
|
|
||||||
Director *string `json:"director"`
|
|
||||||
URL *string `json:"url"`
|
|
||||||
URLs []string `json:"urls"`
|
|
||||||
Date *string `json:"date"`
|
|
||||||
// This should be a base64 encoded data URL
|
|
||||||
Image *string `json:"image"`
|
|
||||||
File *models.SceneFileType `json:"file"`
|
|
||||||
Studio *models.ScrapedStudio `json:"studio"`
|
|
||||||
Tags []*models.ScrapedTag `json:"tags"`
|
|
||||||
Performers []*models.ScrapedPerformer `json:"performers"`
|
|
||||||
Groups []*models.ScrapedGroup `json:"groups"`
|
|
||||||
Movies []*models.ScrapedMovie `json:"movies"`
|
|
||||||
RemoteSiteID *string `json:"remote_site_id"`
|
|
||||||
Duration *int `json:"duration"`
|
|
||||||
Fingerprints []*models.StashBoxFingerprint `json:"fingerprints"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ScrapedScene) IsScrapedContent() {}
|
|
||||||
|
|
||||||
type ScrapedSceneInput struct {
|
|
||||||
Title *string `json:"title"`
|
|
||||||
Code *string `json:"code"`
|
|
||||||
Details *string `json:"details"`
|
|
||||||
Director *string `json:"director"`
|
|
||||||
URL *string `json:"url"`
|
|
||||||
URLs []string `json:"urls"`
|
|
||||||
Date *string `json:"date"`
|
|
||||||
RemoteSiteID *string `json:"remote_site_id"`
|
|
||||||
}
|
|
||||||
|
|
@ -163,9 +163,9 @@ var (
|
||||||
// set to nil.
|
// set to nil.
|
||||||
type Input struct {
|
type Input struct {
|
||||||
Performer *ScrapedPerformerInput
|
Performer *ScrapedPerformerInput
|
||||||
Scene *ScrapedSceneInput
|
Scene *models.ScrapedSceneInput
|
||||||
Gallery *ScrapedGalleryInput
|
Gallery *models.ScrapedGalleryInput
|
||||||
Image *ScrapedImageInput
|
Image *models.ScrapedImageInput
|
||||||
}
|
}
|
||||||
|
|
||||||
// populateURL populates the URL field of the input based on the
|
// populateURL populates the URL field of the input based on the
|
||||||
|
|
@ -227,7 +227,7 @@ type fragmentScraper interface {
|
||||||
type sceneScraper interface {
|
type sceneScraper interface {
|
||||||
scraper
|
scraper
|
||||||
|
|
||||||
viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*ScrapedScene, error)
|
viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// imageScraper is a scraper which supports image scrapes with
|
// imageScraper is a scraper which supports image scrapes with
|
||||||
|
|
@ -235,7 +235,7 @@ type sceneScraper interface {
|
||||||
type imageScraper interface {
|
type imageScraper interface {
|
||||||
scraper
|
scraper
|
||||||
|
|
||||||
viaImage(ctx context.Context, client *http.Client, image *models.Image) (*ScrapedImage, error)
|
viaImage(ctx context.Context, client *http.Client, image *models.Image) (*models.ScrapedImage, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// galleryScraper is a scraper which supports gallery scrapes with
|
// galleryScraper is a scraper which supports gallery scrapes with
|
||||||
|
|
@ -243,5 +243,5 @@ type imageScraper interface {
|
||||||
type galleryScraper interface {
|
type galleryScraper interface {
|
||||||
scraper
|
scraper
|
||||||
|
|
||||||
viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error)
|
viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -328,7 +328,7 @@ func (s *scriptScraper) scrapeByName(ctx context.Context, name string, ty Scrape
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case ScrapeContentTypeScene:
|
case ScrapeContentTypeScene:
|
||||||
var scenes []ScrapedScene
|
var scenes []models.ScrapedScene
|
||||||
err = s.runScraperScript(ctx, input, &scenes)
|
err = s.runScraperScript(ctx, input, &scenes)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
for _, s := range scenes {
|
for _, s := range scenes {
|
||||||
|
|
@ -377,11 +377,11 @@ func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeConte
|
||||||
err := s.runScraperScript(ctx, input, &performer)
|
err := s.runScraperScript(ctx, input, &performer)
|
||||||
return performer, err
|
return performer, err
|
||||||
case ScrapeContentTypeGallery:
|
case ScrapeContentTypeGallery:
|
||||||
var gallery *ScrapedGallery
|
var gallery *models.ScrapedGallery
|
||||||
err := s.runScraperScript(ctx, input, &gallery)
|
err := s.runScraperScript(ctx, input, &gallery)
|
||||||
return gallery, err
|
return gallery, err
|
||||||
case ScrapeContentTypeScene:
|
case ScrapeContentTypeScene:
|
||||||
var scene *ScrapedScene
|
var scene *models.ScrapedScene
|
||||||
err := s.runScraperScript(ctx, input, &scene)
|
err := s.runScraperScript(ctx, input, &scene)
|
||||||
return scene, err
|
return scene, err
|
||||||
case ScrapeContentTypeMovie, ScrapeContentTypeGroup:
|
case ScrapeContentTypeMovie, ScrapeContentTypeGroup:
|
||||||
|
|
@ -389,7 +389,7 @@ func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeConte
|
||||||
err := s.runScraperScript(ctx, input, &movie)
|
err := s.runScraperScript(ctx, input, &movie)
|
||||||
return movie, err
|
return movie, err
|
||||||
case ScrapeContentTypeImage:
|
case ScrapeContentTypeImage:
|
||||||
var image *ScrapedImage
|
var image *models.ScrapedImage
|
||||||
err := s.runScraperScript(ctx, input, &image)
|
err := s.runScraperScript(ctx, input, &image)
|
||||||
return image, err
|
return image, err
|
||||||
}
|
}
|
||||||
|
|
@ -397,42 +397,42 @@ func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeConte
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error) {
|
func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
inString, err := json.Marshal(sceneInputFromScene(scene))
|
inString, err := json.Marshal(sceneInputFromScene(scene))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret *ScrapedScene
|
var ret *models.ScrapedScene
|
||||||
|
|
||||||
err = s.runScraperScript(ctx, string(inString), &ret)
|
err = s.runScraperScript(ctx, string(inString), &ret)
|
||||||
|
|
||||||
return ret, err
|
return ret, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) {
|
func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
inString, err := json.Marshal(galleryInputFromGallery(gallery))
|
inString, err := json.Marshal(galleryInputFromGallery(gallery))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret *ScrapedGallery
|
var ret *models.ScrapedGallery
|
||||||
|
|
||||||
err = s.runScraperScript(ctx, string(inString), &ret)
|
err = s.runScraperScript(ctx, string(inString), &ret)
|
||||||
|
|
||||||
return ret, err
|
return ret, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *scriptScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) {
|
func (s *scriptScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) {
|
||||||
inString, err := json.Marshal(imageToUpdateInput(image))
|
inString, err := json.Marshal(imageToUpdateInput(image))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var ret *ScrapedImage
|
var ret *models.ScrapedImage
|
||||||
|
|
||||||
err = s.runScraperScript(ctx, string(inString), &ret)
|
err = s.runScraperScript(ctx, string(inString), &ret)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -216,11 +216,11 @@ type scrapedStudioStash struct {
|
||||||
|
|
||||||
type stashFindSceneNamesResultType struct {
|
type stashFindSceneNamesResultType struct {
|
||||||
Count int `graphql:"count"`
|
Count int `graphql:"count"`
|
||||||
Scenes []*scrapedSceneStash `graphql:"scenes"`
|
Scenes []*ScrapedSceneStash `graphql:"scenes"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapedStashSceneToScrapedScene(ctx context.Context, scene *scrapedSceneStash) (*ScrapedScene, error) {
|
func (s *stashScraper) scrapedStashSceneToScrapedScene(ctx context.Context, scene *ScrapedSceneStash) (*models.ScrapedScene, error) {
|
||||||
ret := ScrapedScene{}
|
ret := models.ScrapedScene{}
|
||||||
err := copier.Copy(&ret, scene)
|
err := copier.Copy(&ret, scene)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -329,7 +329,7 @@ func (f stashVideoFile) SceneFileType() models.SceneFileType {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
type scrapedSceneStash struct {
|
type ScrapedSceneStash struct {
|
||||||
ID string `graphql:"id" json:"id"`
|
ID string `graphql:"id" json:"id"`
|
||||||
Title *string `graphql:"title" json:"title"`
|
Title *string `graphql:"title" json:"title"`
|
||||||
Details *string `graphql:"details" json:"details"`
|
Details *string `graphql:"details" json:"details"`
|
||||||
|
|
@ -341,10 +341,10 @@ type scrapedSceneStash struct {
|
||||||
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error) {
|
func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// query by MD5
|
// query by MD5
|
||||||
var q struct {
|
var q struct {
|
||||||
FindScene *scrapedSceneStash `graphql:"findSceneByHash(input: $c)"`
|
FindScene *ScrapedSceneStash `graphql:"findSceneByHash(input: $c)"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type SceneHashInput struct {
|
type SceneHashInput struct {
|
||||||
|
|
@ -401,7 +401,7 @@ type scrapedGalleryStash struct {
|
||||||
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) {
|
func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
var q struct {
|
var q struct {
|
||||||
FindGallery *scrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"`
|
FindGallery *scrapedGalleryStash `graphql:"findGalleryByHash(input: $c)"`
|
||||||
}
|
}
|
||||||
|
|
@ -425,7 +425,7 @@ func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode
|
||||||
}
|
}
|
||||||
|
|
||||||
// need to copy back to a scraped scene
|
// need to copy back to a scraped scene
|
||||||
ret := ScrapedGallery{}
|
ret := models.ScrapedGallery{}
|
||||||
if err := copier.Copy(&ret, q.FindGallery); err != nil {
|
if err := copier.Copy(&ret, q.FindGallery); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
@ -433,7 +433,7 @@ func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode
|
||||||
return &ret, nil
|
return &ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *stashScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) {
|
func (s *stashScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) {
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,102 +0,0 @@
|
||||||
// Package stashbox provides a client interface to a stash-box server instance.
|
|
||||||
package stashbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"net/http"
|
|
||||||
"regexp"
|
|
||||||
|
|
||||||
"github.com/Yamashou/gqlgenc/clientv2"
|
|
||||||
"github.com/stashapp/stash/pkg/match"
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox/graphql"
|
|
||||||
"github.com/stashapp/stash/pkg/txn"
|
|
||||||
)
|
|
||||||
|
|
||||||
type SceneReader interface {
|
|
||||||
models.SceneGetter
|
|
||||||
models.StashIDLoader
|
|
||||||
models.VideoFileLoader
|
|
||||||
}
|
|
||||||
|
|
||||||
type PerformerReader interface {
|
|
||||||
models.PerformerGetter
|
|
||||||
match.PerformerFinder
|
|
||||||
models.AliasLoader
|
|
||||||
models.StashIDLoader
|
|
||||||
models.URLLoader
|
|
||||||
FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error)
|
|
||||||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type StudioReader interface {
|
|
||||||
models.StudioGetter
|
|
||||||
match.StudioFinder
|
|
||||||
models.StashIDLoader
|
|
||||||
}
|
|
||||||
|
|
||||||
type TagFinder interface {
|
|
||||||
models.TagQueryer
|
|
||||||
FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type Repository struct {
|
|
||||||
TxnManager models.TxnManager
|
|
||||||
|
|
||||||
Scene SceneReader
|
|
||||||
Performer PerformerReader
|
|
||||||
Tag TagFinder
|
|
||||||
Studio StudioReader
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewRepository(repo models.Repository) Repository {
|
|
||||||
return Repository{
|
|
||||||
TxnManager: repo.TxnManager,
|
|
||||||
Scene: repo.Scene,
|
|
||||||
Performer: repo.Performer,
|
|
||||||
Tag: repo.Tag,
|
|
||||||
Studio: repo.Studio,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Repository) WithReadTxn(ctx context.Context, fn txn.TxnFunc) error {
|
|
||||||
return txn.WithReadTxn(ctx, r.TxnManager, fn)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client represents the client interface to a stash-box server instance.
|
|
||||||
type Client struct {
|
|
||||||
client *graphql.Client
|
|
||||||
repository Repository
|
|
||||||
box models.StashBox
|
|
||||||
|
|
||||||
// tag patterns to be excluded
|
|
||||||
excludeTagRE []*regexp.Regexp
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewClient returns a new instance of a stash-box client.
|
|
||||||
func NewClient(box models.StashBox, repo Repository, excludeTagPatterns []string) *Client {
|
|
||||||
authHeader := func(ctx context.Context, req *http.Request, gqlInfo *clientv2.GQLRequestInfo, res interface{}, next clientv2.RequestInterceptorFunc) error {
|
|
||||||
req.Header.Set("ApiKey", box.APIKey)
|
|
||||||
return next(ctx, req, gqlInfo, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &graphql.Client{
|
|
||||||
Client: clientv2.NewClient(http.DefaultClient, box.Endpoint, nil, authHeader),
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Client{
|
|
||||||
client: client,
|
|
||||||
repository: repo,
|
|
||||||
box: box,
|
|
||||||
excludeTagRE: scraper.CompileExclusionRegexps(excludeTagPatterns),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) getHTTPClient() *http.Client {
|
|
||||||
return c.client.Client.Client
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) GetUser(ctx context.Context) (*graphql.Me, error) {
|
|
||||||
return c.client.Me(ctx)
|
|
||||||
}
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
package stashbox
|
|
||||||
|
|
||||||
import "github.com/stashapp/stash/pkg/models"
|
|
||||||
|
|
||||||
type StashBoxStudioQueryResult struct {
|
|
||||||
Query string `json:"query"`
|
|
||||||
Results []*models.ScrapedStudio `json:"results"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type StashBoxPerformerQueryResult struct {
|
|
||||||
Query string `json:"query"`
|
|
||||||
Results []*models.ScrapedPerformer `json:"results"`
|
|
||||||
}
|
|
||||||
|
|
@ -1,589 +0,0 @@
|
||||||
package stashbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/logger"
|
|
||||||
"github.com/stashapp/stash/pkg/match"
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox/graphql"
|
|
||||||
"github.com/stashapp/stash/pkg/sliceutil"
|
|
||||||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
// QueryStashBoxScene queries stash-box for scenes using a query string.
|
|
||||||
func (c Client) QueryStashBoxScene(ctx context.Context, queryStr string) ([]*scraper.ScrapedScene, error) {
|
|
||||||
scenes, err := c.client.SearchScene(ctx, queryStr)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
sceneFragments := scenes.SearchScene
|
|
||||||
|
|
||||||
var ret []*scraper.ScrapedScene
|
|
||||||
var ignoredTags []string
|
|
||||||
for _, s := range sceneFragments {
|
|
||||||
ss, err := c.sceneFragmentToScrapedScene(ctx, s)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var thisIgnoredTags []string
|
|
||||||
ss.Tags, thisIgnoredTags = scraper.FilterTags(c.excludeTagRE, ss.Tags)
|
|
||||||
ignoredTags = sliceutil.AppendUniques(ignoredTags, thisIgnoredTags)
|
|
||||||
|
|
||||||
ret = append(ret, ss)
|
|
||||||
}
|
|
||||||
|
|
||||||
scraper.LogIgnoredTags(ignoredTags)
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// FindStashBoxScenesByFingerprints queries stash-box for a scene using the
|
|
||||||
// scene's MD5/OSHASH checksum, or PHash.
|
|
||||||
func (c Client) FindStashBoxSceneByFingerprints(ctx context.Context, sceneID int) ([]*scraper.ScrapedScene, error) {
|
|
||||||
res, err := c.FindStashBoxScenesByFingerprints(ctx, []int{sceneID})
|
|
||||||
if len(res) > 0 {
|
|
||||||
return res[0], err
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// FindStashBoxScenesByFingerprints queries stash-box for scenes using every
|
|
||||||
// scene's MD5/OSHASH checksum, or PHash, and returns results in the same order
|
|
||||||
// as the input slice.
|
|
||||||
func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) ([][]*scraper.ScrapedScene, error) {
|
|
||||||
var fingerprints [][]*graphql.FingerprintQueryInput
|
|
||||||
|
|
||||||
r := c.repository
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
qb := r.Scene
|
|
||||||
|
|
||||||
for _, sceneID := range ids {
|
|
||||||
scene, err := qb.Find(ctx, sceneID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if scene == nil {
|
|
||||||
return fmt.Errorf("scene with id %d not found", sceneID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := scene.LoadFiles(ctx, r.Scene); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var sceneFPs []*graphql.FingerprintQueryInput
|
|
||||||
|
|
||||||
for _, f := range scene.Files.List() {
|
|
||||||
checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5)
|
|
||||||
if checksum != "" {
|
|
||||||
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
|
|
||||||
Hash: checksum,
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmMd5,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash)
|
|
||||||
if oshash != "" {
|
|
||||||
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
|
|
||||||
Hash: oshash,
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmOshash,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash)
|
|
||||||
if phash != 0 {
|
|
||||||
phashStr := utils.PhashToString(phash)
|
|
||||||
sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{
|
|
||||||
Hash: phashStr,
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmPhash,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fingerprints = append(fingerprints, sceneFPs)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.findStashBoxScenesByFingerprints(ctx, fingerprints)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) findStashBoxScenesByFingerprints(ctx context.Context, scenes [][]*graphql.FingerprintQueryInput) ([][]*scraper.ScrapedScene, error) {
|
|
||||||
var results [][]*scraper.ScrapedScene
|
|
||||||
|
|
||||||
// filter out nils
|
|
||||||
var validScenes [][]*graphql.FingerprintQueryInput
|
|
||||||
for _, s := range scenes {
|
|
||||||
if len(s) > 0 {
|
|
||||||
validScenes = append(validScenes, s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var ignoredTags []string
|
|
||||||
|
|
||||||
for i := 0; i < len(validScenes); i += 40 {
|
|
||||||
end := i + 40
|
|
||||||
if end > len(validScenes) {
|
|
||||||
end = len(validScenes)
|
|
||||||
}
|
|
||||||
scenes, err := c.client.FindScenesBySceneFingerprints(ctx, validScenes[i:end])
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, sceneFragments := range scenes.FindScenesBySceneFingerprints {
|
|
||||||
var sceneResults []*scraper.ScrapedScene
|
|
||||||
for _, scene := range sceneFragments {
|
|
||||||
ss, err := c.sceneFragmentToScrapedScene(ctx, scene)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var thisIgnoredTags []string
|
|
||||||
ss.Tags, thisIgnoredTags = scraper.FilterTags(c.excludeTagRE, ss.Tags)
|
|
||||||
ignoredTags = sliceutil.AppendUniques(ignoredTags, thisIgnoredTags)
|
|
||||||
|
|
||||||
sceneResults = append(sceneResults, ss)
|
|
||||||
}
|
|
||||||
results = append(results, sceneResults)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
scraper.LogIgnoredTags(ignoredTags)
|
|
||||||
|
|
||||||
// repopulate the results to be the same order as the input
|
|
||||||
ret := make([][]*scraper.ScrapedScene, len(scenes))
|
|
||||||
upTo := 0
|
|
||||||
|
|
||||||
for i, v := range scenes {
|
|
||||||
if len(v) > 0 {
|
|
||||||
ret[i] = results[upTo]
|
|
||||||
upTo++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) sceneFragmentToScrapedScene(ctx context.Context, s *graphql.SceneFragment) (*scraper.ScrapedScene, error) {
|
|
||||||
stashID := s.ID
|
|
||||||
|
|
||||||
ss := &scraper.ScrapedScene{
|
|
||||||
Title: s.Title,
|
|
||||||
Code: s.Code,
|
|
||||||
Date: s.Date,
|
|
||||||
Details: s.Details,
|
|
||||||
Director: s.Director,
|
|
||||||
URL: findURL(s.Urls, "STUDIO"),
|
|
||||||
Duration: s.Duration,
|
|
||||||
RemoteSiteID: &stashID,
|
|
||||||
Fingerprints: getFingerprints(s),
|
|
||||||
// Image
|
|
||||||
// stash_id
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, u := range s.Urls {
|
|
||||||
ss.URLs = append(ss.URLs, u.URL)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(ss.URLs) > 0 {
|
|
||||||
ss.URL = &ss.URLs[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(s.Images) > 0 {
|
|
||||||
// TODO - #454 code sorts images by aspect ratio according to a wanted
|
|
||||||
// orientation. I'm just grabbing the first for now
|
|
||||||
ss.Image = getFirstImage(ctx, c.getHTTPClient(), s.Images)
|
|
||||||
}
|
|
||||||
|
|
||||||
if ss.URL == nil && len(s.Urls) > 0 {
|
|
||||||
// The scene in Stash-box may not have a Studio URL but it does have another URL.
|
|
||||||
// For example it has a www.manyvids.com URL, which is auto set as type ManyVids.
|
|
||||||
// This should be re-visited once Stashapp can support more than one URL.
|
|
||||||
ss.URL = &s.Urls[0].URL
|
|
||||||
}
|
|
||||||
|
|
||||||
r := c.repository
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
pqb := r.Performer
|
|
||||||
tqb := r.Tag
|
|
||||||
|
|
||||||
if s.Studio != nil {
|
|
||||||
ss.Studio = studioFragmentToScrapedStudio(*s.Studio)
|
|
||||||
|
|
||||||
err := match.ScrapedStudio(ctx, r.Studio, ss.Studio, &c.box.Endpoint)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var parentStudio *graphql.FindStudio
|
|
||||||
if s.Studio.Parent != nil {
|
|
||||||
parentStudio, err = c.client.FindStudio(ctx, &s.Studio.Parent.ID, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if parentStudio.FindStudio != nil {
|
|
||||||
ss.Studio.Parent = studioFragmentToScrapedStudio(*parentStudio.FindStudio)
|
|
||||||
|
|
||||||
err = match.ScrapedStudio(ctx, r.Studio, ss.Studio.Parent, &c.box.Endpoint)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, p := range s.Performers {
|
|
||||||
sp := performerFragmentToScrapedPerformer(*p.Performer)
|
|
||||||
|
|
||||||
err := match.ScrapedPerformer(ctx, pqb, sp, &c.box.Endpoint)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ss.Performers = append(ss.Performers, sp)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, t := range s.Tags {
|
|
||||||
st := &models.ScrapedTag{
|
|
||||||
Name: t.Name,
|
|
||||||
}
|
|
||||||
|
|
||||||
err := match.ScrapedTag(ctx, tqb, st)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ss.Tags = append(ss.Tags, st)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ss, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFirstImage(ctx context.Context, client *http.Client, images []*graphql.ImageFragment) *string {
|
|
||||||
ret, err := fetchImage(ctx, client, images[0].URL)
|
|
||||||
if err != nil && !errors.Is(err, context.Canceled) {
|
|
||||||
logger.Warnf("Error fetching image %s: %s", images[0].URL, err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFingerprints(scene *graphql.SceneFragment) []*models.StashBoxFingerprint {
|
|
||||||
fingerprints := []*models.StashBoxFingerprint{}
|
|
||||||
for _, fp := range scene.Fingerprints {
|
|
||||||
fingerprint := models.StashBoxFingerprint{
|
|
||||||
Algorithm: fp.Algorithm.String(),
|
|
||||||
Hash: fp.Hash,
|
|
||||||
Duration: fp.Duration,
|
|
||||||
}
|
|
||||||
fingerprints = append(fingerprints, &fingerprint)
|
|
||||||
}
|
|
||||||
return fingerprints
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, cover []byte) (*string, error) {
|
|
||||||
draft := graphql.SceneDraftInput{}
|
|
||||||
var image io.Reader
|
|
||||||
r := c.repository
|
|
||||||
pqb := r.Performer
|
|
||||||
sqb := r.Studio
|
|
||||||
endpoint := c.box.Endpoint
|
|
||||||
|
|
||||||
if scene.Title != "" {
|
|
||||||
draft.Title = &scene.Title
|
|
||||||
}
|
|
||||||
if scene.Code != "" {
|
|
||||||
draft.Code = &scene.Code
|
|
||||||
}
|
|
||||||
if scene.Details != "" {
|
|
||||||
draft.Details = &scene.Details
|
|
||||||
}
|
|
||||||
if scene.Director != "" {
|
|
||||||
draft.Director = &scene.Director
|
|
||||||
}
|
|
||||||
// TODO - draft does not accept multiple URLs. Use single URL for now.
|
|
||||||
if len(scene.URLs.List()) > 0 {
|
|
||||||
url := strings.TrimSpace(scene.URLs.List()[0])
|
|
||||||
draft.URL = &url
|
|
||||||
}
|
|
||||||
if scene.Date != nil {
|
|
||||||
v := scene.Date.String()
|
|
||||||
draft.Date = &v
|
|
||||||
}
|
|
||||||
|
|
||||||
if scene.StudioID != nil {
|
|
||||||
studio, err := sqb.Find(ctx, *scene.StudioID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if studio == nil {
|
|
||||||
return nil, fmt.Errorf("studio with id %d not found", *scene.StudioID)
|
|
||||||
}
|
|
||||||
|
|
||||||
studioDraft := graphql.DraftEntityInput{
|
|
||||||
Name: studio.Name,
|
|
||||||
}
|
|
||||||
|
|
||||||
stashIDs, err := sqb.GetStashIDs(ctx, studio.ID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, stashID := range stashIDs {
|
|
||||||
c := stashID
|
|
||||||
if stashID.Endpoint == endpoint {
|
|
||||||
studioDraft.ID = &c.StashID
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
draft.Studio = &studioDraft
|
|
||||||
}
|
|
||||||
|
|
||||||
fingerprints := []*graphql.FingerprintInput{}
|
|
||||||
|
|
||||||
// submit all file fingerprints
|
|
||||||
if err := scene.LoadFiles(ctx, r.Scene); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, f := range scene.Files.List() {
|
|
||||||
duration := f.Duration
|
|
||||||
|
|
||||||
if duration != 0 {
|
|
||||||
if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" {
|
|
||||||
fingerprint := graphql.FingerprintInput{
|
|
||||||
Hash: oshash,
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmOshash,
|
|
||||||
Duration: int(duration),
|
|
||||||
}
|
|
||||||
fingerprints = appendFingerprintUnique(fingerprints, &fingerprint)
|
|
||||||
}
|
|
||||||
|
|
||||||
if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" {
|
|
||||||
fingerprint := graphql.FingerprintInput{
|
|
||||||
Hash: checksum,
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmMd5,
|
|
||||||
Duration: int(duration),
|
|
||||||
}
|
|
||||||
fingerprints = appendFingerprintUnique(fingerprints, &fingerprint)
|
|
||||||
}
|
|
||||||
|
|
||||||
if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 {
|
|
||||||
fingerprint := graphql.FingerprintInput{
|
|
||||||
Hash: utils.PhashToString(phash),
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmPhash,
|
|
||||||
Duration: int(duration),
|
|
||||||
}
|
|
||||||
fingerprints = appendFingerprintUnique(fingerprints, &fingerprint)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
draft.Fingerprints = fingerprints
|
|
||||||
|
|
||||||
scenePerformers, err := pqb.FindBySceneID(ctx, scene.ID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
performers := []*graphql.DraftEntityInput{}
|
|
||||||
for _, p := range scenePerformers {
|
|
||||||
performerDraft := graphql.DraftEntityInput{
|
|
||||||
Name: p.Name,
|
|
||||||
}
|
|
||||||
|
|
||||||
stashIDs, err := pqb.GetStashIDs(ctx, p.ID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, stashID := range stashIDs {
|
|
||||||
c := stashID
|
|
||||||
if stashID.Endpoint == endpoint {
|
|
||||||
performerDraft.ID = &c.StashID
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
performers = append(performers, &performerDraft)
|
|
||||||
}
|
|
||||||
draft.Performers = performers
|
|
||||||
|
|
||||||
var tags []*graphql.DraftEntityInput
|
|
||||||
sceneTags, err := r.Tag.FindBySceneID(ctx, scene.ID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, tag := range sceneTags {
|
|
||||||
tags = append(tags, &graphql.DraftEntityInput{Name: tag.Name})
|
|
||||||
}
|
|
||||||
draft.Tags = tags
|
|
||||||
|
|
||||||
if len(cover) > 0 {
|
|
||||||
image = bytes.NewReader(cover)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := scene.LoadStashIDs(ctx, r.Scene); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
stashIDs := scene.StashIDs.List()
|
|
||||||
var stashID *string
|
|
||||||
for _, v := range stashIDs {
|
|
||||||
if v.Endpoint == endpoint {
|
|
||||||
vv := v.StashID
|
|
||||||
stashID = &vv
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
draft.ID = stashID
|
|
||||||
|
|
||||||
var id *string
|
|
||||||
var ret graphql.SubmitSceneDraft
|
|
||||||
err = c.submitDraft(ctx, graphql.SubmitSceneDraftDocument, draft, image, &ret)
|
|
||||||
id = ret.SubmitSceneDraft.ID
|
|
||||||
|
|
||||||
return id, err
|
|
||||||
|
|
||||||
// ret, err := c.client.SubmitSceneDraft(ctx, draft, uploadImage(image))
|
|
||||||
// if err != nil {
|
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
|
|
||||||
// id := ret.SubmitSceneDraft.ID
|
|
||||||
// return id, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []string) (bool, error) {
|
|
||||||
ids, err := stringslice.StringSliceToIntSlice(sceneIDs)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
endpoint := c.box.Endpoint
|
|
||||||
|
|
||||||
var fingerprints []graphql.FingerprintSubmission
|
|
||||||
|
|
||||||
r := c.repository
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
qb := r.Scene
|
|
||||||
|
|
||||||
for _, sceneID := range ids {
|
|
||||||
scene, err := qb.Find(ctx, sceneID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if scene == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := scene.LoadStashIDs(ctx, qb); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := scene.LoadFiles(ctx, qb); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
stashIDs := scene.StashIDs.List()
|
|
||||||
sceneStashID := ""
|
|
||||||
for _, stashID := range stashIDs {
|
|
||||||
if stashID.Endpoint == endpoint {
|
|
||||||
sceneStashID = stashID.StashID
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if sceneStashID != "" {
|
|
||||||
for _, f := range scene.Files.List() {
|
|
||||||
duration := f.Duration
|
|
||||||
|
|
||||||
if duration != 0 {
|
|
||||||
if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" {
|
|
||||||
fingerprint := graphql.FingerprintInput{
|
|
||||||
Hash: checksum,
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmMd5,
|
|
||||||
Duration: int(duration),
|
|
||||||
}
|
|
||||||
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
|
|
||||||
SceneID: sceneStashID,
|
|
||||||
Fingerprint: &fingerprint,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" {
|
|
||||||
fingerprint := graphql.FingerprintInput{
|
|
||||||
Hash: oshash,
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmOshash,
|
|
||||||
Duration: int(duration),
|
|
||||||
}
|
|
||||||
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
|
|
||||||
SceneID: sceneStashID,
|
|
||||||
Fingerprint: &fingerprint,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 {
|
|
||||||
fingerprint := graphql.FingerprintInput{
|
|
||||||
Hash: utils.PhashToString(phash),
|
|
||||||
Algorithm: graphql.FingerprintAlgorithmPhash,
|
|
||||||
Duration: int(duration),
|
|
||||||
}
|
|
||||||
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
|
|
||||||
SceneID: sceneStashID,
|
|
||||||
Fingerprint: &fingerprint,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.submitStashBoxFingerprints(ctx, fingerprints)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) submitStashBoxFingerprints(ctx context.Context, fingerprints []graphql.FingerprintSubmission) (bool, error) {
|
|
||||||
for _, fingerprint := range fingerprints {
|
|
||||||
_, err := c.client.SubmitFingerprint(ctx, fingerprint)
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func appendFingerprintUnique(v []*graphql.FingerprintInput, toAdd *graphql.FingerprintInput) []*graphql.FingerprintInput {
|
|
||||||
for _, vv := range v {
|
|
||||||
if vv.Algorithm == toAdd.Algorithm && vv.Hash == toAdd.Hash {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return append(v, toAdd)
|
|
||||||
}
|
|
||||||
|
|
@ -151,7 +151,7 @@ func (s *xpathScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC
|
||||||
return nil, ErrNotSupported
|
return nil, ErrNotSupported
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error) {
|
func (s *xpathScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromScene(scene)
|
queryURL := queryURLParametersFromScene(scene)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
|
@ -210,7 +210,7 @@ func (s *xpathScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap
|
||||||
return scraper.scrapeScene(ctx, q)
|
return scraper.scrapeScene(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) {
|
func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromGallery(gallery)
|
queryURL := queryURLParametersFromGallery(gallery)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
|
@ -234,7 +234,7 @@ func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode
|
||||||
return scraper.scrapeGallery(ctx, q)
|
return scraper.scrapeGallery(ctx, q)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) {
|
func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*models.ScrapedImage, error) {
|
||||||
// construct the URL
|
// construct the URL
|
||||||
queryURL := queryURLParametersFromImage(image)
|
queryURL := queryURLParametersFromImage(image)
|
||||||
if s.scraper.QueryURLReplacements != nil {
|
if s.scraper.QueryURLReplacements != nil {
|
||||||
|
|
|
||||||
|
|
@ -167,3 +167,13 @@ func ValuesToPtrs[T any](vs []T) []*T {
|
||||||
}
|
}
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Flatten returns a single slice containing all elements of the provided
|
||||||
|
// slice of slices.
|
||||||
|
func Flatten[T any](vs [][]T) []T {
|
||||||
|
var ret []T
|
||||||
|
for _, v := range vs {
|
||||||
|
ret = append(ret, v...)
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
|
||||||
48
pkg/stashbox/client.go
Normal file
48
pkg/stashbox/client.go
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
// Package stashbox provides a client interface to a stash-box server instance.
|
||||||
|
package stashbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/Yamashou/gqlgenc/clientv2"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
|
"github.com/stashapp/stash/pkg/stashbox/graphql"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Client represents the client interface to a stash-box server instance.
|
||||||
|
type Client struct {
|
||||||
|
client *graphql.Client
|
||||||
|
box models.StashBox
|
||||||
|
|
||||||
|
// tag patterns to be excluded
|
||||||
|
excludeTagRE []*regexp.Regexp
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewClient returns a new instance of a stash-box client.
|
||||||
|
func NewClient(box models.StashBox, excludeTagPatterns []string) *Client {
|
||||||
|
authHeader := func(ctx context.Context, req *http.Request, gqlInfo *clientv2.GQLRequestInfo, res interface{}, next clientv2.RequestInterceptorFunc) error {
|
||||||
|
req.Header.Set("ApiKey", box.APIKey)
|
||||||
|
return next(ctx, req, gqlInfo, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
client := &graphql.Client{
|
||||||
|
Client: clientv2.NewClient(http.DefaultClient, box.Endpoint, nil, authHeader),
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Client{
|
||||||
|
client: client,
|
||||||
|
box: box,
|
||||||
|
excludeTagRE: scraper.CompileExclusionRegexps(excludeTagPatterns),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) getHTTPClient() *http.Client {
|
||||||
|
return c.client.Client.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) GetUser(ctx context.Context) (*graphql.Me, error) {
|
||||||
|
return c.client.Me(ctx)
|
||||||
|
}
|
||||||
|
|
@ -9,39 +9,31 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/stashapp/stash/pkg/match"
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scraper"
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox/graphql"
|
|
||||||
"github.com/stashapp/stash/pkg/sliceutil"
|
"github.com/stashapp/stash/pkg/sliceutil"
|
||||||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||||
|
"github.com/stashapp/stash/pkg/stashbox/graphql"
|
||||||
"github.com/stashapp/stash/pkg/utils"
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
"golang.org/x/text/cases"
|
"golang.org/x/text/cases"
|
||||||
"golang.org/x/text/language"
|
"golang.org/x/text/language"
|
||||||
)
|
)
|
||||||
|
|
||||||
// QueryStashBoxPerformer queries stash-box for performers using a query string.
|
// QueryPerformer queries stash-box for performers using a query string.
|
||||||
func (c Client) QueryStashBoxPerformer(ctx context.Context, queryStr string) ([]*StashBoxPerformerQueryResult, error) {
|
func (c Client) QueryPerformer(ctx context.Context, queryStr string) ([]*models.ScrapedPerformer, error) {
|
||||||
performers, err := c.queryStashBoxPerformer(ctx, queryStr)
|
performers, err := c.queryPerformer(ctx, queryStr)
|
||||||
|
|
||||||
res := []*StashBoxPerformerQueryResult{
|
|
||||||
{
|
|
||||||
Query: queryStr,
|
|
||||||
Results: performers,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// set the deprecated image field
|
// set the deprecated image field
|
||||||
for _, p := range res[0].Results {
|
for _, p := range performers {
|
||||||
if len(p.Images) > 0 {
|
if len(p.Images) > 0 {
|
||||||
p.Image = &p.Images[0]
|
p.Image = &p.Images[0]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return res, err
|
return performers, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) queryStashBoxPerformer(ctx context.Context, queryStr string) ([]*models.ScrapedPerformer, error) {
|
func (c Client) queryPerformer(ctx context.Context, queryStr string) ([]*models.ScrapedPerformer, error) {
|
||||||
performers, err := c.client.SearchPerformer(ctx, queryStr)
|
performers, err := c.client.SearchPerformer(ctx, queryStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -67,102 +59,19 @@ func (c Client) queryStashBoxPerformer(ctx context.Context, queryStr string) ([]
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindStashBoxPerformersByNames queries stash-box for performers by name
|
// QueryPerformers queries stash-box for performers using a list of names.
|
||||||
func (c Client) FindStashBoxPerformersByNames(ctx context.Context, performerIDs []string) ([]*StashBoxPerformerQueryResult, error) {
|
func (c Client) QueryPerformers(ctx context.Context, names []string) ([][]*models.ScrapedPerformer, error) {
|
||||||
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
|
ret := make([][]*models.ScrapedPerformer, len(names))
|
||||||
|
for i, name := range names {
|
||||||
|
if name != "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
ret[i], err = c.queryPerformer(ctx, name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var performers []*models.Performer
|
|
||||||
r := c.repository
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
qb := r.Performer
|
|
||||||
|
|
||||||
for _, performerID := range ids {
|
|
||||||
performer, err := qb.Find(ctx, performerID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if performer == nil {
|
|
||||||
return fmt.Errorf("performer with id %d not found", performerID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if performer.Name != "" {
|
|
||||||
performers = append(performers, performer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.findStashBoxPerformersByNames(ctx, performers)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) FindStashBoxPerformersByPerformerNames(ctx context.Context, performerIDs []string) ([][]*models.ScrapedPerformer, error) {
|
|
||||||
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var performers []*models.Performer
|
|
||||||
|
|
||||||
r := c.repository
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
qb := r.Performer
|
|
||||||
|
|
||||||
for _, performerID := range ids {
|
|
||||||
performer, err := qb.Find(ctx, performerID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if performer == nil {
|
|
||||||
return fmt.Errorf("performer with id %d not found", performerID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if performer.Name != "" {
|
|
||||||
performers = append(performers, performer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
results, err := c.findStashBoxPerformersByNames(ctx, performers)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var ret [][]*models.ScrapedPerformer
|
|
||||||
for _, r := range results {
|
|
||||||
ret = append(ret, r.Results)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Client) findStashBoxPerformersByNames(ctx context.Context, performers []*models.Performer) ([]*StashBoxPerformerQueryResult, error) {
|
|
||||||
var ret []*StashBoxPerformerQueryResult
|
|
||||||
for _, performer := range performers {
|
|
||||||
if performer.Name != "" {
|
|
||||||
performerResults, err := c.queryStashBoxPerformer(ctx, performer.Name)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
result := StashBoxPerformerQueryResult{
|
|
||||||
Query: strconv.Itoa(performer.ID),
|
|
||||||
Results: performerResults,
|
|
||||||
}
|
|
||||||
|
|
||||||
ret = append(ret, &result)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
|
|
@ -388,7 +297,8 @@ func padFuzzyDate(date *string) *string {
|
||||||
return &paddedDate
|
return &paddedDate
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) FindStashBoxPerformerByID(ctx context.Context, id string) (*models.ScrapedPerformer, error) {
|
// FindPerformerByID queries stash-box for a performer by ID.
|
||||||
|
func (c Client) FindPerformerByID(ctx context.Context, id string) (*models.ScrapedPerformer, error) {
|
||||||
performer, err := c.client.FindPerformerByID(ctx, id)
|
performer, err := c.client.FindPerformerByID(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -400,18 +310,12 @@ func (c Client) FindStashBoxPerformerByID(ctx context.Context, id string) (*mode
|
||||||
|
|
||||||
ret := performerFragmentToScrapedPerformer(*performer.FindPerformer)
|
ret := performerFragmentToScrapedPerformer(*performer.FindPerformer)
|
||||||
|
|
||||||
r := c.repository
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
err := match.ScrapedPerformer(ctx, r.Performer, ret, &c.box.Endpoint)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) FindStashBoxPerformerByName(ctx context.Context, name string) (*models.ScrapedPerformer, error) {
|
// FindPerformerByName queries stash-box for a performer by name.
|
||||||
|
// Unlike QueryPerformer, this function will only return a performer if the name matches exactly.
|
||||||
|
func (c Client) FindPerformerByName(ctx context.Context, name string) (*models.ScrapedPerformer, error) {
|
||||||
performers, err := c.client.SearchPerformer(ctx, name)
|
performers, err := c.client.SearchPerformer(ctx, name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -424,41 +328,17 @@ func (c Client) FindStashBoxPerformerByName(ctx context.Context, name string) (*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ret == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
r := c.repository
|
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
err := match.ScrapedPerformer(ctx, r.Performer, ret, &c.box.Endpoint)
|
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Performer) (*string, error) {
|
// SubmitPerformerDraft submits a performer draft to stash-box.
|
||||||
|
// The performer parameter must have aliases, URLs and stash IDs loaded.
|
||||||
|
func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Performer, img []byte) (*string, error) {
|
||||||
draft := graphql.PerformerDraftInput{}
|
draft := graphql.PerformerDraftInput{}
|
||||||
var image io.Reader
|
var image io.Reader
|
||||||
pqb := c.repository.Performer
|
|
||||||
endpoint := c.box.Endpoint
|
endpoint := c.box.Endpoint
|
||||||
|
|
||||||
if err := performer.LoadAliases(ctx, pqb); err != nil {
|
if len(img) > 0 {
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := performer.LoadURLs(ctx, pqb); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := performer.LoadStashIDs(ctx, pqb); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
img, _ := pqb.GetImage(ctx, performer.ID)
|
|
||||||
if img != nil {
|
|
||||||
image = bytes.NewReader(img)
|
image = bytes.NewReader(img)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -524,12 +404,8 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf
|
||||||
draft.Urls = performer.URLs.List()
|
draft.Urls = performer.URLs.List()
|
||||||
}
|
}
|
||||||
|
|
||||||
stashIDs, err := pqb.GetStashIDs(ctx, performer.ID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var stashID *string
|
var stashID *string
|
||||||
for _, v := range stashIDs {
|
for _, v := range performer.StashIDs.List() {
|
||||||
c := v
|
c := v
|
||||||
if v.Endpoint == endpoint {
|
if v.Endpoint == endpoint {
|
||||||
stashID = &c.StashID
|
stashID = &c.StashID
|
||||||
|
|
@ -540,7 +416,7 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf
|
||||||
|
|
||||||
var id *string
|
var id *string
|
||||||
var ret graphql.SubmitPerformerDraft
|
var ret graphql.SubmitPerformerDraft
|
||||||
err = c.submitDraft(ctx, graphql.SubmitPerformerDraftDocument, draft, image, &ret)
|
err := c.submitDraft(ctx, graphql.SubmitPerformerDraftDocument, draft, image, &ret)
|
||||||
id = ret.SubmitPerformerDraft.ID
|
id = ret.SubmitPerformerDraft.ID
|
||||||
|
|
||||||
return id, err
|
return id, err
|
||||||
468
pkg/stashbox/scene.go
Normal file
468
pkg/stashbox/scene.go
Normal file
|
|
@ -0,0 +1,468 @@
|
||||||
|
package stashbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/stashapp/stash/pkg/logger"
|
||||||
|
"github.com/stashapp/stash/pkg/models"
|
||||||
|
"github.com/stashapp/stash/pkg/scraper"
|
||||||
|
"github.com/stashapp/stash/pkg/sliceutil"
|
||||||
|
"github.com/stashapp/stash/pkg/stashbox/graphql"
|
||||||
|
"github.com/stashapp/stash/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// QueryScene queries stash-box for scenes using a query string.
|
||||||
|
func (c Client) QueryScene(ctx context.Context, queryStr string) ([]*models.ScrapedScene, error) {
|
||||||
|
scenes, err := c.client.SearchScene(ctx, queryStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneFragments := scenes.SearchScene
|
||||||
|
|
||||||
|
var ret []*models.ScrapedScene
|
||||||
|
var ignoredTags []string
|
||||||
|
for _, s := range sceneFragments {
|
||||||
|
ss, err := c.sceneFragmentToScrapedScene(ctx, s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var thisIgnoredTags []string
|
||||||
|
ss.Tags, thisIgnoredTags = scraper.FilterTags(c.excludeTagRE, ss.Tags)
|
||||||
|
ignoredTags = sliceutil.AppendUniques(ignoredTags, thisIgnoredTags)
|
||||||
|
|
||||||
|
ret = append(ret, ss)
|
||||||
|
}
|
||||||
|
|
||||||
|
scraper.LogIgnoredTags(ignoredTags)
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindStashBoxScenesByFingerprints queries stash-box for a scene using the
|
||||||
|
// scene's MD5/OSHASH checksum, or PHash.
|
||||||
|
func (c Client) FindSceneByFingerprints(ctx context.Context, fps models.Fingerprints) ([]*models.ScrapedScene, error) {
|
||||||
|
res, err := c.FindScenesByFingerprints(ctx, []models.Fingerprints{fps})
|
||||||
|
if len(res) > 0 {
|
||||||
|
return res[0], err
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindScenesByFingerprints queries stash-box for scenes using every
|
||||||
|
// scene's MD5/OSHASH checksum, or PHash, and returns results in the same order
|
||||||
|
// as the input slice.
|
||||||
|
func (c Client) FindScenesByFingerprints(ctx context.Context, fps []models.Fingerprints) ([][]*models.ScrapedScene, error) {
|
||||||
|
var fingerprints [][]*graphql.FingerprintQueryInput
|
||||||
|
|
||||||
|
for _, fp := range fps {
|
||||||
|
fingerprints = append(fingerprints, convertFingerprints(fp))
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.findScenesByFingerprints(ctx, fingerprints)
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertFingerprints(fps models.Fingerprints) []*graphql.FingerprintQueryInput {
|
||||||
|
var ret []*graphql.FingerprintQueryInput
|
||||||
|
|
||||||
|
for _, f := range fps {
|
||||||
|
var i = &graphql.FingerprintQueryInput{}
|
||||||
|
switch f.Type {
|
||||||
|
case models.FingerprintTypeMD5:
|
||||||
|
i.Algorithm = graphql.FingerprintAlgorithmMd5
|
||||||
|
i.Hash = f.String()
|
||||||
|
case models.FingerprintTypeOshash:
|
||||||
|
i.Algorithm = graphql.FingerprintAlgorithmOshash
|
||||||
|
i.Hash = f.String()
|
||||||
|
case models.FingerprintTypePhash:
|
||||||
|
i.Algorithm = graphql.FingerprintAlgorithmPhash
|
||||||
|
i.Hash = utils.PhashToString(f.Int64())
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if !i.Algorithm.IsValid() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) findScenesByFingerprints(ctx context.Context, scenes [][]*graphql.FingerprintQueryInput) ([][]*models.ScrapedScene, error) {
|
||||||
|
var results [][]*models.ScrapedScene
|
||||||
|
|
||||||
|
// filter out nils
|
||||||
|
var validScenes [][]*graphql.FingerprintQueryInput
|
||||||
|
for _, s := range scenes {
|
||||||
|
if len(s) > 0 {
|
||||||
|
validScenes = append(validScenes, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var ignoredTags []string
|
||||||
|
|
||||||
|
for i := 0; i < len(validScenes); i += 40 {
|
||||||
|
end := i + 40
|
||||||
|
if end > len(validScenes) {
|
||||||
|
end = len(validScenes)
|
||||||
|
}
|
||||||
|
scenes, err := c.client.FindScenesBySceneFingerprints(ctx, validScenes[i:end])
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, sceneFragments := range scenes.FindScenesBySceneFingerprints {
|
||||||
|
var sceneResults []*models.ScrapedScene
|
||||||
|
for _, scene := range sceneFragments {
|
||||||
|
ss, err := c.sceneFragmentToScrapedScene(ctx, scene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var thisIgnoredTags []string
|
||||||
|
ss.Tags, thisIgnoredTags = scraper.FilterTags(c.excludeTagRE, ss.Tags)
|
||||||
|
ignoredTags = sliceutil.AppendUniques(ignoredTags, thisIgnoredTags)
|
||||||
|
|
||||||
|
sceneResults = append(sceneResults, ss)
|
||||||
|
}
|
||||||
|
results = append(results, sceneResults)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
scraper.LogIgnoredTags(ignoredTags)
|
||||||
|
|
||||||
|
// repopulate the results to be the same order as the input
|
||||||
|
ret := make([][]*models.ScrapedScene, len(scenes))
|
||||||
|
upTo := 0
|
||||||
|
|
||||||
|
for i, v := range scenes {
|
||||||
|
if len(v) > 0 {
|
||||||
|
ret[i] = results[upTo]
|
||||||
|
upTo++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) sceneFragmentToScrapedScene(ctx context.Context, s *graphql.SceneFragment) (*models.ScrapedScene, error) {
|
||||||
|
stashID := s.ID
|
||||||
|
|
||||||
|
ss := &models.ScrapedScene{
|
||||||
|
Title: s.Title,
|
||||||
|
Code: s.Code,
|
||||||
|
Date: s.Date,
|
||||||
|
Details: s.Details,
|
||||||
|
Director: s.Director,
|
||||||
|
URL: findURL(s.Urls, "STUDIO"),
|
||||||
|
Duration: s.Duration,
|
||||||
|
RemoteSiteID: &stashID,
|
||||||
|
Fingerprints: getFingerprints(s),
|
||||||
|
// Image
|
||||||
|
// stash_id
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, u := range s.Urls {
|
||||||
|
ss.URLs = append(ss.URLs, u.URL)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ss.URLs) > 0 {
|
||||||
|
ss.URL = &ss.URLs[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(s.Images) > 0 {
|
||||||
|
// TODO - #454 code sorts images by aspect ratio according to a wanted
|
||||||
|
// orientation. I'm just grabbing the first for now
|
||||||
|
ss.Image = getFirstImage(ctx, c.getHTTPClient(), s.Images)
|
||||||
|
}
|
||||||
|
|
||||||
|
ss.URLs = make([]string, len(s.Urls))
|
||||||
|
for i, u := range s.Urls {
|
||||||
|
ss.URLs[i] = u.URL
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.Studio != nil {
|
||||||
|
var err error
|
||||||
|
ss.Studio, err = c.resolveStudio(ctx, s.Studio)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, p := range s.Performers {
|
||||||
|
sp := performerFragmentToScrapedPerformer(*p.Performer)
|
||||||
|
ss.Performers = append(ss.Performers, sp)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, t := range s.Tags {
|
||||||
|
st := &models.ScrapedTag{
|
||||||
|
Name: t.Name,
|
||||||
|
}
|
||||||
|
ss.Tags = append(ss.Tags, st)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ss, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getFirstImage(ctx context.Context, client *http.Client, images []*graphql.ImageFragment) *string {
|
||||||
|
ret, err := fetchImage(ctx, client, images[0].URL)
|
||||||
|
if err != nil && !errors.Is(err, context.Canceled) {
|
||||||
|
logger.Warnf("Error fetching image %s: %s", images[0].URL, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func getFingerprints(scene *graphql.SceneFragment) []*models.StashBoxFingerprint {
|
||||||
|
fingerprints := []*models.StashBoxFingerprint{}
|
||||||
|
for _, fp := range scene.Fingerprints {
|
||||||
|
fingerprint := models.StashBoxFingerprint{
|
||||||
|
Algorithm: fp.Algorithm.String(),
|
||||||
|
Hash: fp.Hash,
|
||||||
|
Duration: fp.Duration,
|
||||||
|
}
|
||||||
|
fingerprints = append(fingerprints, &fingerprint)
|
||||||
|
}
|
||||||
|
return fingerprints
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneDraft struct {
|
||||||
|
// Files, URLs, StashIDs must be loaded
|
||||||
|
Scene *models.Scene
|
||||||
|
// StashIDs must be loaded
|
||||||
|
Performers []*models.Performer
|
||||||
|
// StashIDs must be loaded
|
||||||
|
Studio *models.Studio
|
||||||
|
Tags []*models.Tag
|
||||||
|
Cover []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) SubmitSceneDraft(ctx context.Context, d SceneDraft) (*string, error) {
|
||||||
|
draft := newSceneDraftInput(d, c.box.Endpoint)
|
||||||
|
var image io.Reader
|
||||||
|
|
||||||
|
if len(d.Cover) > 0 {
|
||||||
|
image = bytes.NewReader(d.Cover)
|
||||||
|
}
|
||||||
|
|
||||||
|
var id *string
|
||||||
|
var ret graphql.SubmitSceneDraft
|
||||||
|
err := c.submitDraft(ctx, graphql.SubmitSceneDraftDocument, draft, image, &ret)
|
||||||
|
id = ret.SubmitSceneDraft.ID
|
||||||
|
|
||||||
|
return id, err
|
||||||
|
|
||||||
|
// ret, err := c.client.SubmitSceneDraft(ctx, draft, uploadImage(image))
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
// }
|
||||||
|
|
||||||
|
// id := ret.SubmitSceneDraft.ID
|
||||||
|
// return id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSceneDraftInput(d SceneDraft, endpoint string) graphql.SceneDraftInput {
|
||||||
|
scene := d.Scene
|
||||||
|
|
||||||
|
draft := graphql.SceneDraftInput{}
|
||||||
|
|
||||||
|
if scene.Title != "" {
|
||||||
|
draft.Title = &scene.Title
|
||||||
|
}
|
||||||
|
if scene.Code != "" {
|
||||||
|
draft.Code = &scene.Code
|
||||||
|
}
|
||||||
|
if scene.Details != "" {
|
||||||
|
draft.Details = &scene.Details
|
||||||
|
}
|
||||||
|
if scene.Director != "" {
|
||||||
|
draft.Director = &scene.Director
|
||||||
|
}
|
||||||
|
// TODO - draft does not accept multiple URLs. Use single URL for now.
|
||||||
|
if len(scene.URLs.List()) > 0 {
|
||||||
|
url := strings.TrimSpace(scene.URLs.List()[0])
|
||||||
|
draft.URL = &url
|
||||||
|
}
|
||||||
|
if scene.Date != nil {
|
||||||
|
v := scene.Date.String()
|
||||||
|
draft.Date = &v
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Studio != nil {
|
||||||
|
studio := d.Studio
|
||||||
|
|
||||||
|
studioDraft := graphql.DraftEntityInput{
|
||||||
|
Name: studio.Name,
|
||||||
|
}
|
||||||
|
|
||||||
|
stashIDs := studio.StashIDs.List()
|
||||||
|
for _, stashID := range stashIDs {
|
||||||
|
c := stashID
|
||||||
|
if stashID.Endpoint == endpoint {
|
||||||
|
studioDraft.ID = &c.StashID
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
draft.Studio = &studioDraft
|
||||||
|
}
|
||||||
|
|
||||||
|
fingerprints := []*graphql.FingerprintInput{}
|
||||||
|
|
||||||
|
for _, f := range scene.Files.List() {
|
||||||
|
duration := f.Duration
|
||||||
|
|
||||||
|
if duration != 0 {
|
||||||
|
fingerprints = appendFingerprintsUnique(fingerprints, fileFingerprintsToInputGraphQL(f.Fingerprints, int(duration))...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
draft.Fingerprints = fingerprints
|
||||||
|
|
||||||
|
scenePerformers := d.Performers
|
||||||
|
|
||||||
|
inputPerformers := []*graphql.DraftEntityInput{}
|
||||||
|
for _, p := range scenePerformers {
|
||||||
|
performerDraft := graphql.DraftEntityInput{
|
||||||
|
Name: p.Name,
|
||||||
|
}
|
||||||
|
|
||||||
|
stashIDs := p.StashIDs.List()
|
||||||
|
for _, stashID := range stashIDs {
|
||||||
|
c := stashID
|
||||||
|
if stashID.Endpoint == endpoint {
|
||||||
|
performerDraft.ID = &c.StashID
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inputPerformers = append(inputPerformers, &performerDraft)
|
||||||
|
}
|
||||||
|
draft.Performers = inputPerformers
|
||||||
|
|
||||||
|
var tags []*graphql.DraftEntityInput
|
||||||
|
sceneTags := d.Tags
|
||||||
|
for _, tag := range sceneTags {
|
||||||
|
tags = append(tags, &graphql.DraftEntityInput{Name: tag.Name})
|
||||||
|
}
|
||||||
|
draft.Tags = tags
|
||||||
|
|
||||||
|
stashIDs := scene.StashIDs.List()
|
||||||
|
var stashID *string
|
||||||
|
for _, v := range stashIDs {
|
||||||
|
if v.Endpoint == endpoint {
|
||||||
|
vv := v.StashID
|
||||||
|
stashID = &vv
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
draft.ID = stashID
|
||||||
|
|
||||||
|
return draft
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileFingerprintsToInputGraphQL(fps models.Fingerprints, duration int) []*graphql.FingerprintInput {
|
||||||
|
var ret []*graphql.FingerprintInput
|
||||||
|
|
||||||
|
for _, f := range fps {
|
||||||
|
var i = &graphql.FingerprintInput{
|
||||||
|
Duration: duration,
|
||||||
|
}
|
||||||
|
switch f.Type {
|
||||||
|
case models.FingerprintTypeMD5:
|
||||||
|
i.Algorithm = graphql.FingerprintAlgorithmMd5
|
||||||
|
i.Hash = f.String()
|
||||||
|
case models.FingerprintTypeOshash:
|
||||||
|
i.Algorithm = graphql.FingerprintAlgorithmOshash
|
||||||
|
i.Hash = f.String()
|
||||||
|
case models.FingerprintTypePhash:
|
||||||
|
i.Algorithm = graphql.FingerprintAlgorithmPhash
|
||||||
|
i.Hash = utils.PhashToString(f.Int64())
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if !i.Algorithm.IsValid() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = appendFingerprintUnique(ret, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) SubmitFingerprints(ctx context.Context, scenes []*models.Scene) (bool, error) {
|
||||||
|
endpoint := c.box.Endpoint
|
||||||
|
|
||||||
|
var fingerprints []graphql.FingerprintSubmission
|
||||||
|
|
||||||
|
for _, scene := range scenes {
|
||||||
|
stashIDs := scene.StashIDs.List()
|
||||||
|
sceneStashID := ""
|
||||||
|
for _, stashID := range stashIDs {
|
||||||
|
if stashID.Endpoint == endpoint {
|
||||||
|
sceneStashID = stashID.StashID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if sceneStashID == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range scene.Files.List() {
|
||||||
|
duration := f.Duration
|
||||||
|
|
||||||
|
if duration == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fps := fileFingerprintsToInputGraphQL(f.Fingerprints, int(duration))
|
||||||
|
for _, fp := range fps {
|
||||||
|
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
|
||||||
|
SceneID: sceneStashID,
|
||||||
|
Fingerprint: fp,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.submitFingerprints(ctx, fingerprints)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) submitFingerprints(ctx context.Context, fingerprints []graphql.FingerprintSubmission) (bool, error) {
|
||||||
|
for _, fingerprint := range fingerprints {
|
||||||
|
_, err := c.client.SubmitFingerprint(ctx, fingerprint)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendFingerprintUnique(v []*graphql.FingerprintInput, toAdd *graphql.FingerprintInput) []*graphql.FingerprintInput {
|
||||||
|
for _, vv := range v {
|
||||||
|
if vv.Algorithm == toAdd.Algorithm && vv.Hash == toAdd.Hash {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(v, toAdd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendFingerprintsUnique(v []*graphql.FingerprintInput, toAdd ...*graphql.FingerprintInput) []*graphql.FingerprintInput {
|
||||||
|
for _, a := range toAdd {
|
||||||
|
v = appendFingerprintUnique(v, a)
|
||||||
|
}
|
||||||
|
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
|
@ -4,12 +4,33 @@ import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/stashapp/stash/pkg/match"
|
|
||||||
"github.com/stashapp/stash/pkg/models"
|
"github.com/stashapp/stash/pkg/models"
|
||||||
"github.com/stashapp/stash/pkg/scraper/stashbox/graphql"
|
"github.com/stashapp/stash/pkg/stashbox/graphql"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c Client) FindStashBoxStudio(ctx context.Context, query string) (*models.ScrapedStudio, error) {
|
func (c Client) resolveStudio(ctx context.Context, s *graphql.StudioFragment) (*models.ScrapedStudio, error) {
|
||||||
|
scraped := studioFragmentToScrapedStudio(*s)
|
||||||
|
|
||||||
|
if s.Parent != nil {
|
||||||
|
parentStudio, err := c.client.FindStudio(ctx, &s.Parent.ID, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if parentStudio.FindStudio == nil {
|
||||||
|
return scraped, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
scraped.Parent, err = c.resolveStudio(ctx, parentStudio.FindStudio)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return scraped, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Client) FindStudio(ctx context.Context, query string) (*models.ScrapedStudio, error) {
|
||||||
var studio *graphql.FindStudio
|
var studio *graphql.FindStudio
|
||||||
|
|
||||||
_, err := uuid.Parse(query)
|
_, err := uuid.Parse(query)
|
||||||
|
|
@ -27,32 +48,8 @@ func (c Client) FindStashBoxStudio(ctx context.Context, query string) (*models.S
|
||||||
|
|
||||||
var ret *models.ScrapedStudio
|
var ret *models.ScrapedStudio
|
||||||
if studio.FindStudio != nil {
|
if studio.FindStudio != nil {
|
||||||
r := c.repository
|
ret, err = c.resolveStudio(ctx, studio.FindStudio)
|
||||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
|
||||||
ret = studioFragmentToScrapedStudio(*studio.FindStudio)
|
|
||||||
|
|
||||||
err = match.ScrapedStudio(ctx, r.Studio, ret, &c.box.Endpoint)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if studio.FindStudio.Parent != nil {
|
|
||||||
parentStudio, err := c.client.FindStudio(ctx, &studio.FindStudio.Parent.ID, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if parentStudio.FindStudio != nil {
|
|
||||||
ret.Parent = studioFragmentToScrapedStudio(*parentStudio.FindStudio)
|
|
||||||
|
|
||||||
err = match.ScrapedStudio(ctx, r.Studio, ret.Parent, &c.box.Endpoint)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}); err != nil {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Loading…
Reference in a new issue