mirror of
https://github.com/stashapp/stash.git
synced 2025-12-06 08:26:00 +01:00
Separate graphql API from rest of the system (#2503)
* Move graphql generated files to api * Refactor identify options * Remove models.StashBoxes * Move ScraperSource to scraper package * Rename field strategy enums * Rename identify.TaskOptions to Options
This commit is contained in:
parent
9dcf03eb70
commit
7b5bd80515
109 changed files with 2684 additions and 791 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -16,7 +16,7 @@
|
|||
*.out
|
||||
|
||||
# GraphQL generated output
|
||||
pkg/models/generated_*.go
|
||||
internal/api/generated_*.go
|
||||
ui/v2.5/src/core/generated-*.tsx
|
||||
|
||||
####
|
||||
|
|
|
|||
126
gqlgen.yml
126
gqlgen.yml
|
|
@ -4,46 +4,110 @@ schema:
|
|||
- "graphql/schema/types/*.graphql"
|
||||
- "graphql/schema/*.graphql"
|
||||
exec:
|
||||
filename: pkg/models/generated_exec.go
|
||||
filename: internal/api/generated_exec.go
|
||||
model:
|
||||
filename: pkg/models/generated_models.go
|
||||
filename: internal/api/generated_models.go
|
||||
resolver:
|
||||
filename: internal/api/resolver.go
|
||||
type: Resolver
|
||||
|
||||
struct_tag: gqlgen
|
||||
|
||||
autobind:
|
||||
- github.com/stashapp/stash/pkg/models
|
||||
- github.com/stashapp/stash/pkg/plugin
|
||||
- github.com/stashapp/stash/pkg/scraper
|
||||
- github.com/stashapp/stash/internal/identify
|
||||
- github.com/stashapp/stash/internal/dlna
|
||||
- github.com/stashapp/stash/pkg/scraper/stashbox
|
||||
|
||||
models:
|
||||
# autobind on config causes generation issues
|
||||
# Scalars
|
||||
Timestamp:
|
||||
model: github.com/stashapp/stash/pkg/models.Timestamp
|
||||
# Objects
|
||||
Gallery:
|
||||
model: github.com/stashapp/stash/pkg/models.Gallery
|
||||
Image:
|
||||
model: github.com/stashapp/stash/pkg/models.Image
|
||||
ImageFileType:
|
||||
model: github.com/stashapp/stash/pkg/models.ImageFileType
|
||||
Performer:
|
||||
model: github.com/stashapp/stash/pkg/models.Performer
|
||||
Scene:
|
||||
model: github.com/stashapp/stash/pkg/models.Scene
|
||||
SceneMarker:
|
||||
model: github.com/stashapp/stash/pkg/models.SceneMarker
|
||||
ScrapedItem:
|
||||
model: github.com/stashapp/stash/pkg/models.ScrapedItem
|
||||
Studio:
|
||||
model: github.com/stashapp/stash/pkg/models.Studio
|
||||
Movie:
|
||||
model: github.com/stashapp/stash/pkg/models.Movie
|
||||
Tag:
|
||||
model: github.com/stashapp/stash/pkg/models.Tag
|
||||
SceneFileType:
|
||||
model: github.com/stashapp/stash/pkg/models.SceneFileType
|
||||
SavedFilter:
|
||||
model: github.com/stashapp/stash/pkg/models.SavedFilter
|
||||
StashID:
|
||||
model: github.com/stashapp/stash/pkg/models.StashID
|
||||
SceneCaption:
|
||||
model: github.com/stashapp/stash/pkg/models.SceneCaption
|
||||
StashConfig:
|
||||
model: github.com/stashapp/stash/internal/manager/config.StashConfig
|
||||
StashConfigInput:
|
||||
model: github.com/stashapp/stash/internal/manager/config.StashConfigInput
|
||||
StashBoxInput:
|
||||
model: github.com/stashapp/stash/internal/manager/config.StashBoxInput
|
||||
ConfigImageLightboxResult:
|
||||
model: github.com/stashapp/stash/internal/manager/config.ConfigImageLightboxResult
|
||||
ImageLightboxDisplayMode:
|
||||
model: github.com/stashapp/stash/internal/manager/config.ImageLightboxDisplayMode
|
||||
ImageLightboxScrollMode:
|
||||
model: github.com/stashapp/stash/internal/manager/config.ImageLightboxScrollMode
|
||||
ConfigDisableDropdownCreate:
|
||||
model: github.com/stashapp/stash/internal/manager/config.ConfigDisableDropdownCreate
|
||||
ScanMetadataOptions:
|
||||
model: github.com/stashapp/stash/internal/manager/config.ScanMetadataOptions
|
||||
AutoTagMetadataOptions:
|
||||
model: github.com/stashapp/stash/internal/manager/config.AutoTagMetadataOptions
|
||||
SceneParserInput:
|
||||
model: github.com/stashapp/stash/internal/manager.SceneParserInput
|
||||
SceneParserResult:
|
||||
model: github.com/stashapp/stash/internal/manager.SceneParserResult
|
||||
SceneMovieID:
|
||||
model: github.com/stashapp/stash/internal/manager.SceneMovieID
|
||||
SystemStatus:
|
||||
model: github.com/stashapp/stash/internal/manager.SystemStatus
|
||||
SystemStatusEnum:
|
||||
model: github.com/stashapp/stash/internal/manager.SystemStatusEnum
|
||||
ImportDuplicateEnum:
|
||||
model: github.com/stashapp/stash/internal/manager.ImportDuplicateEnum
|
||||
SetupInput:
|
||||
model: github.com/stashapp/stash/internal/manager.SetupInput
|
||||
MigrateInput:
|
||||
model: github.com/stashapp/stash/internal/manager.MigrateInput
|
||||
ScanMetadataInput:
|
||||
model: github.com/stashapp/stash/internal/manager.ScanMetadataInput
|
||||
GenerateMetadataInput:
|
||||
model: github.com/stashapp/stash/internal/manager.GenerateMetadataInput
|
||||
GeneratePreviewOptionsInput:
|
||||
model: github.com/stashapp/stash/internal/manager.GeneratePreviewOptionsInput
|
||||
AutoTagMetadataInput:
|
||||
model: github.com/stashapp/stash/internal/manager.AutoTagMetadataInput
|
||||
CleanMetadataInput:
|
||||
model: github.com/stashapp/stash/internal/manager.CleanMetadataInput
|
||||
StashBoxBatchPerformerTagInput:
|
||||
model: github.com/stashapp/stash/internal/manager.StashBoxBatchPerformerTagInput
|
||||
SceneStreamEndpoint:
|
||||
model: github.com/stashapp/stash/internal/manager.SceneStreamEndpoint
|
||||
ExportObjectTypeInput:
|
||||
model: github.com/stashapp/stash/internal/manager.ExportObjectTypeInput
|
||||
ExportObjectsInput:
|
||||
model: github.com/stashapp/stash/internal/manager.ExportObjectsInput
|
||||
ImportObjectsInput:
|
||||
model: github.com/stashapp/stash/internal/manager.ImportObjectsInput
|
||||
ScanMetaDataFilterInput:
|
||||
model: github.com/stashapp/stash/internal/manager.ScanMetaDataFilterInput
|
||||
# renamed types
|
||||
DLNAStatus:
|
||||
model: github.com/stashapp/stash/internal/dlna.Status
|
||||
DLNAIP:
|
||||
model: github.com/stashapp/stash/internal/dlna.Dlnaip
|
||||
IdentifySource:
|
||||
model: github.com/stashapp/stash/internal/identify.Source
|
||||
IdentifyMetadataTaskOptions:
|
||||
model: github.com/stashapp/stash/internal/identify.Options
|
||||
IdentifyMetadataInput:
|
||||
model: github.com/stashapp/stash/internal/identify.Options
|
||||
IdentifyMetadataOptions:
|
||||
model: github.com/stashapp/stash/internal/identify.MetadataOptions
|
||||
IdentifyFieldOptions:
|
||||
model: github.com/stashapp/stash/internal/identify.FieldOptions
|
||||
IdentifyFieldStrategy:
|
||||
model: github.com/stashapp/stash/internal/identify.FieldStrategy
|
||||
ScraperSource:
|
||||
model: github.com/stashapp/stash/pkg/scraper.Source
|
||||
# rebind inputs to types
|
||||
IdentifySourceInput:
|
||||
model: github.com/stashapp/stash/internal/identify.Source
|
||||
IdentifyFieldOptionsInput:
|
||||
model: github.com/stashapp/stash/internal/identify.FieldOptions
|
||||
IdentifyMetadataOptionsInput:
|
||||
model: github.com/stashapp/stash/internal/identify.MetadataOptions
|
||||
ScraperSourceInput:
|
||||
model: github.com/stashapp/stash/pkg/scraper.Source
|
||||
|
||||
|
|
|
|||
|
|
@ -38,37 +38,37 @@ func (r *Resolver) scraperCache() *scraper.Cache {
|
|||
return manager.GetInstance().ScraperCache
|
||||
}
|
||||
|
||||
func (r *Resolver) Gallery() models.GalleryResolver {
|
||||
func (r *Resolver) Gallery() GalleryResolver {
|
||||
return &galleryResolver{r}
|
||||
}
|
||||
func (r *Resolver) Mutation() models.MutationResolver {
|
||||
func (r *Resolver) Mutation() MutationResolver {
|
||||
return &mutationResolver{r}
|
||||
}
|
||||
func (r *Resolver) Performer() models.PerformerResolver {
|
||||
func (r *Resolver) Performer() PerformerResolver {
|
||||
return &performerResolver{r}
|
||||
}
|
||||
func (r *Resolver) Query() models.QueryResolver {
|
||||
func (r *Resolver) Query() QueryResolver {
|
||||
return &queryResolver{r}
|
||||
}
|
||||
func (r *Resolver) Scene() models.SceneResolver {
|
||||
func (r *Resolver) Scene() SceneResolver {
|
||||
return &sceneResolver{r}
|
||||
}
|
||||
func (r *Resolver) Image() models.ImageResolver {
|
||||
func (r *Resolver) Image() ImageResolver {
|
||||
return &imageResolver{r}
|
||||
}
|
||||
func (r *Resolver) SceneMarker() models.SceneMarkerResolver {
|
||||
func (r *Resolver) SceneMarker() SceneMarkerResolver {
|
||||
return &sceneMarkerResolver{r}
|
||||
}
|
||||
func (r *Resolver) Studio() models.StudioResolver {
|
||||
func (r *Resolver) Studio() StudioResolver {
|
||||
return &studioResolver{r}
|
||||
}
|
||||
func (r *Resolver) Movie() models.MovieResolver {
|
||||
func (r *Resolver) Movie() MovieResolver {
|
||||
return &movieResolver{r}
|
||||
}
|
||||
func (r *Resolver) Subscription() models.SubscriptionResolver {
|
||||
func (r *Resolver) Subscription() SubscriptionResolver {
|
||||
return &subscriptionResolver{r}
|
||||
}
|
||||
func (r *Resolver) Tag() models.TagResolver {
|
||||
func (r *Resolver) Tag() TagResolver {
|
||||
return &tagResolver{r}
|
||||
}
|
||||
|
||||
|
|
@ -125,8 +125,8 @@ func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *stri
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, error) {
|
||||
var ret models.StatsResultType
|
||||
func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) {
|
||||
var ret StatsResultType
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
scenesQB := repo.Scene()
|
||||
imageQB := repo.Image()
|
||||
|
|
@ -146,7 +146,7 @@ func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, err
|
|||
moviesCount, _ := moviesQB.Count()
|
||||
tagsCount, _ := tagsQB.Count()
|
||||
|
||||
ret = models.StatsResultType{
|
||||
ret = StatsResultType{
|
||||
SceneCount: scenesCount,
|
||||
ScenesSize: scenesSize,
|
||||
ScenesDuration: scenesDuration,
|
||||
|
|
@ -167,10 +167,10 @@ func (r *queryResolver) Stats(ctx context.Context) (*models.StatsResultType, err
|
|||
return &ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) {
|
||||
func (r *queryResolver) Version(ctx context.Context) (*Version, error) {
|
||||
version, hash, buildtime := GetVersion()
|
||||
|
||||
return &models.Version{
|
||||
return &Version{
|
||||
Version: &version,
|
||||
Hash: hash,
|
||||
BuildTime: buildtime,
|
||||
|
|
@ -178,7 +178,7 @@ func (r *queryResolver) Version(ctx context.Context) (*models.Version, error) {
|
|||
}
|
||||
|
||||
// Latestversion returns the latest git shorthash commit.
|
||||
func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion, error) {
|
||||
func (r *queryResolver) Latestversion(ctx context.Context) (*ShortVersion, error) {
|
||||
ver, url, err := GetLatestVersion(ctx, true)
|
||||
if err == nil {
|
||||
logger.Infof("Retrieved latest hash: %s", ver)
|
||||
|
|
@ -186,21 +186,21 @@ func (r *queryResolver) Latestversion(ctx context.Context) (*models.ShortVersion
|
|||
logger.Errorf("Error while retrieving latest hash: %s", err)
|
||||
}
|
||||
|
||||
return &models.ShortVersion{
|
||||
return &ShortVersion{
|
||||
Shorthash: ver,
|
||||
URL: url,
|
||||
}, err
|
||||
}
|
||||
|
||||
// Get scene marker tags which show up under the video.
|
||||
func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]*models.SceneMarkerTag, error) {
|
||||
func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]*SceneMarkerTag, error) {
|
||||
sceneID, err := strconv.Atoi(scene_id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var keys []int
|
||||
tags := make(map[int]*models.SceneMarkerTag)
|
||||
tags := make(map[int]*SceneMarkerTag)
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
sceneMarkers, err := repo.SceneMarker().FindBySceneID(sceneID)
|
||||
|
|
@ -216,7 +216,7 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
|
|||
}
|
||||
_, hasKey := tags[markerPrimaryTag.ID]
|
||||
if !hasKey {
|
||||
sceneMarkerTag := &models.SceneMarkerTag{Tag: markerPrimaryTag}
|
||||
sceneMarkerTag := &SceneMarkerTag{Tag: markerPrimaryTag}
|
||||
tags[markerPrimaryTag.ID] = sceneMarkerTag
|
||||
keys = append(keys, markerPrimaryTag.ID)
|
||||
}
|
||||
|
|
@ -235,7 +235,7 @@ func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([
|
|||
return a.SceneMarkers[0].Seconds < b.SceneMarkers[0].Seconds
|
||||
})
|
||||
|
||||
var result []*models.SceneMarkerTag
|
||||
var result []*SceneMarkerTag
|
||||
for _, key := range keys {
|
||||
result = append(result, tags[key])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,12 +33,12 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*models.Im
|
|||
}, nil
|
||||
}
|
||||
|
||||
func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*models.ImagePathsType, error) {
|
||||
func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePathsType, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
builder := urlbuilders.NewImageURLBuilder(baseURL, obj)
|
||||
thumbnailPath := builder.GetThumbnailURL()
|
||||
imagePath := builder.GetImageURL()
|
||||
return &models.ImagePathsType{
|
||||
return &ImagePathsType{
|
||||
Image: &imagePath,
|
||||
Thumbnail: &thumbnailPath,
|
||||
}, nil
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc
|
|||
}, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.ScenePathsType, error) {
|
||||
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) {
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
config := manager.GetInstance().Config
|
||||
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
|
||||
|
|
@ -101,7 +101,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
|
|||
captionBasePath := builder.GetCaptionURL()
|
||||
interactiveHeatmap := builder.GetInteractiveHeatmapURL()
|
||||
|
||||
return &models.ScenePathsType{
|
||||
return &ScenePathsType{
|
||||
Screenshot: &screenshotPath,
|
||||
Preview: &previewPath,
|
||||
Stream: &streamPath,
|
||||
|
|
@ -163,7 +163,7 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *mod
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*models.SceneMovie, err error) {
|
||||
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
qb := repo.Scene()
|
||||
mqb := repo.Movie()
|
||||
|
|
@ -180,7 +180,7 @@ func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*m
|
|||
}
|
||||
|
||||
sceneIdx := sm.SceneIndex
|
||||
sceneMovie := &models.SceneMovie{
|
||||
sceneMovie := &SceneMovie{
|
||||
Movie: movie,
|
||||
}
|
||||
|
||||
|
|
@ -252,7 +252,7 @@ func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*ti
|
|||
return &obj.FileModTime.Timestamp, nil
|
||||
}
|
||||
|
||||
func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*models.SceneStreamEndpoint, error) {
|
||||
func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) {
|
||||
config := manager.GetInstance().Config
|
||||
|
||||
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||
|
|
|
|||
|
|
@ -15,17 +15,17 @@ import (
|
|||
|
||||
var ErrOverriddenConfig = errors.New("cannot set overridden value")
|
||||
|
||||
func (r *mutationResolver) Setup(ctx context.Context, input models.SetupInput) (bool, error) {
|
||||
func (r *mutationResolver) Setup(ctx context.Context, input manager.SetupInput) (bool, error) {
|
||||
err := manager.GetInstance().Setup(ctx, input)
|
||||
return err == nil, err
|
||||
}
|
||||
|
||||
func (r *mutationResolver) Migrate(ctx context.Context, input models.MigrateInput) (bool, error) {
|
||||
func (r *mutationResolver) Migrate(ctx context.Context, input manager.MigrateInput) (bool, error) {
|
||||
err := manager.GetInstance().Migrate(ctx, input)
|
||||
return err == nil, err
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.ConfigGeneralInput) (*models.ConfigGeneralResult, error) {
|
||||
func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGeneralInput) (*ConfigGeneralResult, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
existingPaths := c.GetStashPaths()
|
||||
|
|
@ -281,7 +281,7 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input models.Co
|
|||
return makeConfigGeneralResult(), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.ConfigInterfaceInput) (*models.ConfigInterfaceResult, error) {
|
||||
func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigInterfaceInput) (*ConfigInterfaceResult, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
setBool := func(key string, v *bool) {
|
||||
|
|
@ -338,10 +338,10 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
|
|||
c.Set(config.ImageLightboxSlideshowDelay, *options.SlideshowDelay)
|
||||
}
|
||||
|
||||
setString(config.ImageLightboxDisplayMode, (*string)(options.DisplayMode))
|
||||
setString(config.ImageLightboxDisplayModeKey, (*string)(options.DisplayMode))
|
||||
setBool(config.ImageLightboxScaleUp, options.ScaleUp)
|
||||
setBool(config.ImageLightboxResetZoomOnNav, options.ResetZoomOnNav)
|
||||
setString(config.ImageLightboxScrollMode, (*string)(options.ScrollMode))
|
||||
setString(config.ImageLightboxScrollModeKey, (*string)(options.ScrollMode))
|
||||
|
||||
if options.ScrollAttemptsBeforeChange != nil {
|
||||
c.Set(config.ImageLightboxScrollAttemptsBeforeChange, *options.ScrollAttemptsBeforeChange)
|
||||
|
|
@ -376,7 +376,7 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input models.
|
|||
return makeConfigInterfaceResult(), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.ConfigDLNAInput) (*models.ConfigDLNAResult, error) {
|
||||
func (r *mutationResolver) ConfigureDlna(ctx context.Context, input ConfigDLNAInput) (*ConfigDLNAResult, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
if input.ServerName != nil {
|
||||
|
|
@ -413,7 +413,7 @@ func (r *mutationResolver) ConfigureDlna(ctx context.Context, input models.Confi
|
|||
return makeConfigDLNAResult(), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ConfigureScraping(ctx context.Context, input models.ConfigScrapingInput) (*models.ConfigScrapingResult, error) {
|
||||
func (r *mutationResolver) ConfigureScraping(ctx context.Context, input ConfigScrapingInput) (*ConfigScrapingResult, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
refreshScraperCache := false
|
||||
|
|
@ -445,7 +445,7 @@ func (r *mutationResolver) ConfigureScraping(ctx context.Context, input models.C
|
|||
return makeConfigScrapingResult(), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.ConfigDefaultSettingsInput) (*models.ConfigDefaultSettingsResult, error) {
|
||||
func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input ConfigDefaultSettingsInput) (*ConfigDefaultSettingsResult, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
if input.Identify != nil {
|
||||
|
|
@ -479,7 +479,7 @@ func (r *mutationResolver) ConfigureDefaults(ctx context.Context, input models.C
|
|||
return makeConfigDefaultsResult(), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input models.GenerateAPIKeyInput) (string, error) {
|
||||
func (r *mutationResolver) GenerateAPIKey(ctx context.Context, input GenerateAPIKeyInput) (string, error) {
|
||||
c := config.GetInstance()
|
||||
|
||||
var newAPIKey string
|
||||
|
|
|
|||
|
|
@ -5,10 +5,9 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) EnableDlna(ctx context.Context, input models.EnableDLNAInput) (bool, error) {
|
||||
func (r *mutationResolver) EnableDlna(ctx context.Context, input EnableDLNAInput) (bool, error) {
|
||||
err := manager.GetInstance().DLNAService.Start(parseMinutes(input.Duration))
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
@ -16,17 +15,17 @@ func (r *mutationResolver) EnableDlna(ctx context.Context, input models.EnableDL
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) DisableDlna(ctx context.Context, input models.DisableDLNAInput) (bool, error) {
|
||||
func (r *mutationResolver) DisableDlna(ctx context.Context, input DisableDLNAInput) (bool, error) {
|
||||
manager.GetInstance().DLNAService.Stop(parseMinutes(input.Duration))
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) AddTempDlnaip(ctx context.Context, input models.AddTempDLNAIPInput) (bool, error) {
|
||||
func (r *mutationResolver) AddTempDlnaip(ctx context.Context, input AddTempDLNAIPInput) (bool, error) {
|
||||
manager.GetInstance().DLNAService.AddTempDLNAIP(input.Address, parseMinutes(input.Duration))
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) RemoveTempDlnaip(ctx context.Context, input models.RemoveTempDLNAIPInput) (bool, error) {
|
||||
func (r *mutationResolver) RemoveTempDlnaip(ctx context.Context, input RemoveTempDLNAIPInput) (bool, error) {
|
||||
ret := manager.GetInstance().DLNAService.RemoveTempDLNAIP(input.Address)
|
||||
return ret, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ func (r *mutationResolver) getGallery(ctx context.Context, id int) (ret *models.
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) GalleryCreate(ctx context.Context, input models.GalleryCreateInput) (*models.Gallery, error) {
|
||||
func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreateInput) (*models.Gallery, error) {
|
||||
// name must be provided
|
||||
if input.Title == "" {
|
||||
return nil, errors.New("title must not be empty")
|
||||
|
|
@ -273,7 +273,7 @@ func (r *mutationResolver) galleryUpdate(input models.GalleryUpdateInput, transl
|
|||
return gallery, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.BulkGalleryUpdateInput) ([]*models.Gallery, error) {
|
||||
func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGalleryUpdateInput) ([]*models.Gallery, error) {
|
||||
// Populate gallery from the input
|
||||
updatedTime := time.Now()
|
||||
|
||||
|
|
@ -367,7 +367,7 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input models.B
|
|||
return newRet, nil
|
||||
}
|
||||
|
||||
func adjustGalleryPerformerIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustGalleryPerformerIDs(qb models.GalleryReader, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetPerformerIDs(galleryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -376,7 +376,7 @@ func adjustGalleryPerformerIDs(qb models.GalleryReader, galleryID int, ids model
|
|||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustGalleryTagIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustGalleryTagIDs(qb models.GalleryReader, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetTagIDs(galleryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -385,7 +385,7 @@ func adjustGalleryTagIDs(qb models.GalleryReader, galleryID int, ids models.Bulk
|
|||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustGallerySceneIDs(qb models.GalleryReader, galleryID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustGallerySceneIDs(qb models.GalleryReader, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetSceneIDs(galleryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -526,7 +526,7 @@ func isStashPath(path string) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.GalleryAddInput) (bool, error) {
|
||||
func (r *mutationResolver) AddGalleryImages(ctx context.Context, input GalleryAddInput) (bool, error) {
|
||||
galleryID, err := strconv.Atoi(input.GalleryID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
@ -566,7 +566,7 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input models.Ga
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input models.GalleryRemoveInput) (bool, error) {
|
||||
func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input GalleryRemoveInput) (bool, error) {
|
||||
galleryID, err := strconv.Atoi(input.GalleryID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ func (r *mutationResolver) getImage(ctx context.Context, id int) (ret *models.Im
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (ret *models.Image, err error) {
|
||||
func (r *mutationResolver) ImageUpdate(ctx context.Context, input ImageUpdateInput) (ret *models.Image, err error) {
|
||||
translator := changesetTranslator{
|
||||
inputMap: getUpdateInputMap(ctx),
|
||||
}
|
||||
|
|
@ -44,7 +44,7 @@ func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUp
|
|||
return r.getImage(ctx, ret.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) (ret []*models.Image, err error) {
|
||||
func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*ImageUpdateInput) (ret []*models.Image, err error) {
|
||||
inputMaps := getUpdateInputMaps(ctx)
|
||||
|
||||
// Start the transaction and save the image
|
||||
|
|
@ -86,7 +86,7 @@ func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.Ima
|
|||
return newRet, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) imageUpdate(input models.ImageUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Image, error) {
|
||||
func (r *mutationResolver) imageUpdate(input ImageUpdateInput, translator changesetTranslator, repo models.Repository) (*models.Image, error) {
|
||||
// Populate image from the input
|
||||
imageID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
|
|
@ -157,7 +157,7 @@ func (r *mutationResolver) updateImageTags(qb models.ImageReaderWriter, imageID
|
|||
return qb.UpdateTags(imageID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.BulkImageUpdateInput) (ret []*models.Image, err error) {
|
||||
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageUpdateInput) (ret []*models.Image, err error) {
|
||||
imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -251,7 +251,7 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input models.Bul
|
|||
return newRet, nil
|
||||
}
|
||||
|
||||
func adjustImageGalleryIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustImageGalleryIDs(qb models.ImageReader, imageID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetGalleryIDs(imageID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -260,7 +260,7 @@ func adjustImageGalleryIDs(qb models.ImageReader, imageID int, ids models.BulkUp
|
|||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustImagePerformerIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustImagePerformerIDs(qb models.ImageReader, imageID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetPerformerIDs(imageID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -269,7 +269,7 @@ func adjustImagePerformerIDs(qb models.ImageReader, imageID int, ids models.Bulk
|
|||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustImageTagIDs(qb models.ImageReader, imageID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustImageTagIDs(qb models.ImageReader, imageID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetTagIDs(imageID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -9,15 +9,15 @@ import (
|
|||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/internal/identify"
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) MetadataScan(ctx context.Context, input models.ScanMetadataInput) (string, error) {
|
||||
func (r *mutationResolver) MetadataScan(ctx context.Context, input manager.ScanMetadataInput) (string, error) {
|
||||
jobID, err := manager.GetInstance().Scan(ctx, input)
|
||||
|
||||
if err != nil {
|
||||
|
|
@ -36,7 +36,7 @@ func (r *mutationResolver) MetadataImport(ctx context.Context) (string, error) {
|
|||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ImportObjects(ctx context.Context, input models.ImportObjectsInput) (string, error) {
|
||||
func (r *mutationResolver) ImportObjects(ctx context.Context, input manager.ImportObjectsInput) (string, error) {
|
||||
t, err := manager.CreateImportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
|
@ -56,7 +56,7 @@ func (r *mutationResolver) MetadataExport(ctx context.Context) (string, error) {
|
|||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) ExportObjects(ctx context.Context, input models.ExportObjectsInput) (*string, error) {
|
||||
func (r *mutationResolver) ExportObjects(ctx context.Context, input manager.ExportObjectsInput) (*string, error) {
|
||||
t := manager.CreateExportTask(config.GetInstance().GetVideoFileNamingAlgorithm(), input)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
|
@ -75,7 +75,7 @@ func (r *mutationResolver) ExportObjects(ctx context.Context, input models.Expor
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.GenerateMetadataInput) (string, error) {
|
||||
func (r *mutationResolver) MetadataGenerate(ctx context.Context, input manager.GenerateMetadataInput) (string, error) {
|
||||
jobID, err := manager.GetInstance().Generate(ctx, input)
|
||||
|
||||
if err != nil {
|
||||
|
|
@ -85,19 +85,19 @@ func (r *mutationResolver) MetadataGenerate(ctx context.Context, input models.Ge
|
|||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input models.AutoTagMetadataInput) (string, error) {
|
||||
func (r *mutationResolver) MetadataAutoTag(ctx context.Context, input manager.AutoTagMetadataInput) (string, error) {
|
||||
jobID := manager.GetInstance().AutoTag(ctx, input)
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataIdentify(ctx context.Context, input models.IdentifyMetadataInput) (string, error) {
|
||||
func (r *mutationResolver) MetadataIdentify(ctx context.Context, input identify.Options) (string, error) {
|
||||
t := manager.CreateIdentifyJob(input)
|
||||
jobID := manager.GetInstance().JobManager.Add(ctx, "Identifying...", t)
|
||||
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MetadataClean(ctx context.Context, input models.CleanMetadataInput) (string, error) {
|
||||
func (r *mutationResolver) MetadataClean(ctx context.Context, input manager.CleanMetadataInput) (string, error) {
|
||||
jobID := manager.GetInstance().Clean(ctx, input)
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
|
@ -107,7 +107,7 @@ func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error
|
|||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BackupDatabase(ctx context.Context, input models.BackupDatabaseInput) (*string, error) {
|
||||
func (r *mutationResolver) BackupDatabase(ctx context.Context, input BackupDatabaseInput) (*string, error) {
|
||||
// if download is true, then backup to temporary file and return a link
|
||||
download := input.Download != nil && *input.Download
|
||||
mgr := manager.GetInstance()
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Mo
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCreateInput) (*models.Movie, error) {
|
||||
func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInput) (*models.Movie, error) {
|
||||
// generate checksum from movie name rather than image
|
||||
checksum := md5.FromString(input.Name)
|
||||
|
||||
|
|
@ -123,7 +123,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input models.MovieCr
|
|||
return r.getMovie(ctx, movie.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUpdateInput) (*models.Movie, error) {
|
||||
func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInput) (*models.Movie, error) {
|
||||
// Populate movie from the input
|
||||
movieID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
|
|
@ -223,7 +223,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input models.MovieUp
|
|||
return r.getMovie(ctx, movie.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input models.BulkMovieUpdateInput) ([]*models.Movie, error) {
|
||||
func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieUpdateInput) ([]*models.Movie, error) {
|
||||
movieIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -288,7 +288,7 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input models.Bul
|
|||
return newRet, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) MovieDestroy(ctx context.Context, input models.MovieDestroyInput) (bool, error) {
|
||||
func (r *mutationResolver) MovieDestroy(ctx context.Context, input MovieDestroyInput) (bool, error) {
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *model
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.PerformerCreateInput) (*models.Performer, error) {
|
||||
func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerCreateInput) (*models.Performer, error) {
|
||||
// generate checksum from performer name rather than image
|
||||
checksum := md5.FromString(input.Name)
|
||||
|
||||
|
|
@ -167,7 +167,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
|||
return r.getPerformer(ctx, performer.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
|
||||
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerUpdateInput) (*models.Performer, error) {
|
||||
// Populate performer from the input
|
||||
performerID, _ := strconv.Atoi(input.ID)
|
||||
updatedPerformer := models.PerformerPartial{
|
||||
|
|
@ -298,7 +298,7 @@ func (r *mutationResolver) updatePerformerTags(qb models.PerformerReaderWriter,
|
|||
return qb.UpdateTags(performerID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models.BulkPerformerUpdateInput) ([]*models.Performer, error) {
|
||||
func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPerformerUpdateInput) ([]*models.Performer, error) {
|
||||
performerIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -409,7 +409,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input models
|
|||
return newRet, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerDestroy(ctx context.Context, input models.PerformerDestroyInput) (bool, error) {
|
||||
func (r *mutationResolver) PerformerDestroy(ctx context.Context, input PerformerDestroyInput) (bool, error) {
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
|
|||
|
|
@ -5,10 +5,10 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*models.PluginArgInput) (string, error) {
|
||||
func (r *mutationResolver) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*plugin.PluginArgInput) (string, error) {
|
||||
m := manager.GetInstance()
|
||||
m.RunPluginTask(ctx, pluginID, taskName, args)
|
||||
return "todo", nil
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) SaveFilter(ctx context.Context, input models.SaveFilterInput) (ret *models.SavedFilter, err error) {
|
||||
func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput) (ret *models.SavedFilter, err error) {
|
||||
if strings.TrimSpace(input.Name) == "" {
|
||||
return nil, errors.New("name must be non-empty")
|
||||
}
|
||||
|
|
@ -42,7 +42,7 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input models.SaveFilt
|
|||
return ret, err
|
||||
}
|
||||
|
||||
func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input models.DestroyFilterInput) (bool, error) {
|
||||
func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input DestroyFilterInput) (bool, error) {
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
@ -57,7 +57,7 @@ func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input models.
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input models.SetDefaultFilterInput) (bool, error) {
|
||||
func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaultFilterInput) (bool, error) {
|
||||
if err := r.withTxn(ctx, func(repo models.Repository) error {
|
||||
qb := repo.SavedFilter()
|
||||
|
||||
|
|
|
|||
|
|
@ -232,7 +232,7 @@ func (r *mutationResolver) updateSceneGalleries(qb models.SceneReaderWriter, sce
|
|||
return qb.UpdateGalleries(sceneID, ids)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.BulkSceneUpdateInput) ([]*models.Scene, error) {
|
||||
func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneUpdateInput) ([]*models.Scene, error) {
|
||||
sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -343,9 +343,9 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input models.Bul
|
|||
return newRet, nil
|
||||
}
|
||||
|
||||
func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
|
||||
func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
|
||||
// if we are setting the ids, just return the ids
|
||||
if updateIDs.Mode == models.BulkUpdateIDModeSet {
|
||||
if updateIDs.Mode == BulkUpdateIDModeSet {
|
||||
existingIDs = []int{}
|
||||
for _, idStr := range updateIDs.Ids {
|
||||
id, _ := strconv.Atoi(idStr)
|
||||
|
|
@ -362,7 +362,7 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
|
|||
foundExisting := false
|
||||
for idx, existingID := range existingIDs {
|
||||
if existingID == id {
|
||||
if updateIDs.Mode == models.BulkUpdateIDModeRemove {
|
||||
if updateIDs.Mode == BulkUpdateIDModeRemove {
|
||||
// remove from the list
|
||||
existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...)
|
||||
}
|
||||
|
|
@ -372,7 +372,7 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
|
|||
}
|
||||
}
|
||||
|
||||
if !foundExisting && updateIDs.Mode != models.BulkUpdateIDModeRemove {
|
||||
if !foundExisting && updateIDs.Mode != BulkUpdateIDModeRemove {
|
||||
existingIDs = append(existingIDs, id)
|
||||
}
|
||||
}
|
||||
|
|
@ -380,7 +380,7 @@ func adjustIDs(existingIDs []int, updateIDs models.BulkUpdateIds) []int {
|
|||
return existingIDs
|
||||
}
|
||||
|
||||
func adjustScenePerformerIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustScenePerformerIDs(qb models.SceneReader, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetPerformerIDs(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -393,7 +393,7 @@ type tagIDsGetter interface {
|
|||
GetTagIDs(id int) ([]int, error)
|
||||
}
|
||||
|
||||
func adjustTagIDs(qb tagIDsGetter, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustTagIDs(qb tagIDsGetter, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetTagIDs(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -402,7 +402,7 @@ func adjustTagIDs(qb tagIDsGetter, sceneID int, ids models.BulkUpdateIds) (ret [
|
|||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustSceneGalleryIDs(qb models.SceneReader, sceneID int, ids models.BulkUpdateIds) (ret []int, err error) {
|
||||
func adjustSceneGalleryIDs(qb models.SceneReader, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
|
||||
ret, err = qb.GetGalleryIDs(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -411,14 +411,14 @@ func adjustSceneGalleryIDs(qb models.SceneReader, sceneID int, ids models.BulkUp
|
|||
return adjustIDs(ret, ids), nil
|
||||
}
|
||||
|
||||
func adjustSceneMovieIDs(qb models.SceneReader, sceneID int, updateIDs models.BulkUpdateIds) ([]models.MoviesScenes, error) {
|
||||
func adjustSceneMovieIDs(qb models.SceneReader, sceneID int, updateIDs BulkUpdateIds) ([]models.MoviesScenes, error) {
|
||||
existingMovies, err := qb.GetMovies(sceneID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if we are setting the ids, just return the ids
|
||||
if updateIDs.Mode == models.BulkUpdateIDModeSet {
|
||||
if updateIDs.Mode == BulkUpdateIDModeSet {
|
||||
existingMovies = []models.MoviesScenes{}
|
||||
for _, idStr := range updateIDs.Ids {
|
||||
id, _ := strconv.Atoi(idStr)
|
||||
|
|
@ -435,7 +435,7 @@ func adjustSceneMovieIDs(qb models.SceneReader, sceneID int, updateIDs models.Bu
|
|||
foundExisting := false
|
||||
for idx, existingMovie := range existingMovies {
|
||||
if existingMovie.MovieID == id {
|
||||
if updateIDs.Mode == models.BulkUpdateIDModeRemove {
|
||||
if updateIDs.Mode == BulkUpdateIDModeRemove {
|
||||
// remove from the list
|
||||
existingMovies = append(existingMovies[:idx], existingMovies[idx+1:]...)
|
||||
}
|
||||
|
|
@ -445,7 +445,7 @@ func adjustSceneMovieIDs(qb models.SceneReader, sceneID int, updateIDs models.Bu
|
|||
}
|
||||
}
|
||||
|
||||
if !foundExisting && updateIDs.Mode != models.BulkUpdateIDModeRemove {
|
||||
if !foundExisting && updateIDs.Mode != BulkUpdateIDModeRemove {
|
||||
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
|
||||
}
|
||||
}
|
||||
|
|
@ -574,7 +574,7 @@ func (r *mutationResolver) getSceneMarker(ctx context.Context, id int) (ret *mod
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) {
|
||||
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input SceneMarkerCreateInput) (*models.SceneMarker, error) {
|
||||
primaryTagID, err := strconv.Atoi(input.PrimaryTagID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -609,7 +609,7 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.S
|
|||
return r.getSceneMarker(ctx, ret.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.SceneMarkerUpdateInput) (*models.SceneMarker, error) {
|
||||
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMarkerUpdateInput) (*models.SceneMarker, error) {
|
||||
// Populate scene marker from the input
|
||||
sceneMarkerID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/scraper/stashbox"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input models.StashBoxFingerprintSubmissionInput) (bool, error) {
|
||||
func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input StashBoxFingerprintSubmissionInput) (bool, error) {
|
||||
boxes := config.GetInstance().GetStashBoxes()
|
||||
|
||||
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
|
||||
|
|
@ -23,12 +23,12 @@ func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input
|
|||
return client.SubmitStashBoxFingerprints(ctx, input.SceneIds, boxes[input.StashBoxIndex].Endpoint)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) (string, error) {
|
||||
func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchPerformerTagInput) (string, error) {
|
||||
jobID := manager.GetInstance().StashBoxBatchPerformerTag(ctx, input)
|
||||
return strconv.Itoa(jobID), nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input models.StashBoxDraftSubmissionInput) (*string, error) {
|
||||
func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
|
||||
boxes := config.GetInstance().GetStashBoxes()
|
||||
|
||||
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
|
||||
|
|
@ -58,7 +58,7 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input m
|
|||
return res, err
|
||||
}
|
||||
|
||||
func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, input models.StashBoxDraftSubmissionInput) (*string, error) {
|
||||
func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) {
|
||||
boxes := config.GetInstance().GetStashBoxes()
|
||||
|
||||
if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) {
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ func (r *mutationResolver) getStudio(ctx context.Context, id int) (ret *models.S
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioCreate(ctx context.Context, input models.StudioCreateInput) (*models.Studio, error) {
|
||||
func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateInput) (*models.Studio, error) {
|
||||
// generate checksum from studio name rather than image
|
||||
checksum := md5.FromString(input.Name)
|
||||
|
||||
|
|
@ -115,7 +115,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
|||
return r.getStudio(ctx, s.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
|
||||
func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateInput) (*models.Studio, error) {
|
||||
// Populate studio from the input
|
||||
studioID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
|
|
@ -207,7 +207,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
|||
return r.getStudio(ctx, s.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) StudioDestroy(ctx context.Context, input models.StudioDestroyInput) (bool, error) {
|
||||
func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestroyInput) (bool, error) {
|
||||
id, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ func (r *mutationResolver) getTag(ctx context.Context, id int) (ret *models.Tag,
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreateInput) (*models.Tag, error) {
|
||||
func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) (*models.Tag, error) {
|
||||
// Populate a new tag from the input
|
||||
currentTime := time.Now()
|
||||
newTag := models.Tag{
|
||||
|
|
@ -127,7 +127,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreate
|
|||
return r.getTag(ctx, t.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdateInput) (*models.Tag, error) {
|
||||
func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) {
|
||||
// Populate tag from the input
|
||||
tagID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
|
|
@ -252,7 +252,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdate
|
|||
return r.getTag(ctx, t.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestroyInput) (bool, error) {
|
||||
func (r *mutationResolver) TagDestroy(ctx context.Context, input TagDestroyInput) (bool, error) {
|
||||
tagID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
|
@ -295,7 +295,7 @@ func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bo
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) TagsMerge(ctx context.Context, input models.TagsMergeInput) (*models.Tag, error) {
|
||||
func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) (*models.Tag, error) {
|
||||
source, err := stringslice.StringSliceToIntSlice(input.Source)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -73,13 +73,13 @@ func TestTagCreate(t *testing.T) {
|
|||
expectedErr := errors.New("TagCreate error")
|
||||
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(nil, expectedErr)
|
||||
|
||||
_, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
|
||||
_, err := r.Mutation().TagCreate(context.TODO(), TagCreateInput{
|
||||
Name: existingTagName,
|
||||
})
|
||||
|
||||
assert.NotNil(t, err)
|
||||
|
||||
_, err = r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
|
||||
_, err = r.Mutation().TagCreate(context.TODO(), TagCreateInput{
|
||||
Name: errTagName,
|
||||
})
|
||||
|
||||
|
|
@ -98,7 +98,7 @@ func TestTagCreate(t *testing.T) {
|
|||
tagRW.On("Create", mock.AnythingOfType("models.Tag")).Return(newTag, nil)
|
||||
tagRW.On("Find", newTagID).Return(newTag, nil)
|
||||
|
||||
tag, err := r.Mutation().TagCreate(context.TODO(), models.TagCreateInput{
|
||||
tag, err := r.Mutation().TagCreate(context.TODO(), TagCreateInput{
|
||||
Name: tagName,
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -13,13 +13,13 @@ import (
|
|||
"golang.org/x/text/collate"
|
||||
)
|
||||
|
||||
func (r *queryResolver) Configuration(ctx context.Context) (*models.ConfigResult, error) {
|
||||
func (r *queryResolver) Configuration(ctx context.Context) (*ConfigResult, error) {
|
||||
return makeConfigResult(), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) Directory(ctx context.Context, path, locale *string) (*models.Directory, error) {
|
||||
func (r *queryResolver) Directory(ctx context.Context, path, locale *string) (*Directory, error) {
|
||||
|
||||
directory := &models.Directory{}
|
||||
directory := &Directory{}
|
||||
var err error
|
||||
|
||||
col := newCollator(locale, collate.IgnoreCase, collate.Numeric)
|
||||
|
|
@ -59,8 +59,8 @@ func getParent(path string) *string {
|
|||
}
|
||||
}
|
||||
|
||||
func makeConfigResult() *models.ConfigResult {
|
||||
return &models.ConfigResult{
|
||||
func makeConfigResult() *ConfigResult {
|
||||
return &ConfigResult{
|
||||
General: makeConfigGeneralResult(),
|
||||
Interface: makeConfigInterfaceResult(),
|
||||
Dlna: makeConfigDLNAResult(),
|
||||
|
|
@ -70,7 +70,7 @@ func makeConfigResult() *models.ConfigResult {
|
|||
}
|
||||
}
|
||||
|
||||
func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
||||
func makeConfigGeneralResult() *ConfigGeneralResult {
|
||||
config := config.GetInstance()
|
||||
logFile := config.GetLogFile()
|
||||
|
||||
|
|
@ -82,7 +82,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
|||
scraperUserAgent := config.GetScraperUserAgent()
|
||||
scraperCDPPath := config.GetScraperCDPPath()
|
||||
|
||||
return &models.ConfigGeneralResult{
|
||||
return &ConfigGeneralResult{
|
||||
Stashes: config.GetStashPaths(),
|
||||
DatabasePath: config.GetDatabasePath(),
|
||||
GeneratedPath: config.GetGeneratedPath(),
|
||||
|
|
@ -125,7 +125,7 @@ func makeConfigGeneralResult() *models.ConfigGeneralResult {
|
|||
}
|
||||
}
|
||||
|
||||
func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
|
||||
func makeConfigInterfaceResult() *ConfigInterfaceResult {
|
||||
config := config.GetInstance()
|
||||
menuItems := config.GetMenuItems()
|
||||
soundOnPreview := config.GetSoundOnPreview()
|
||||
|
|
@ -149,7 +149,7 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
|
|||
// FIXME - misnamed output field means we have redundant fields
|
||||
disableDropdownCreate := config.GetDisableDropdownCreate()
|
||||
|
||||
return &models.ConfigInterfaceResult{
|
||||
return &ConfigInterfaceResult{
|
||||
MenuItems: menuItems,
|
||||
SoundOnPreview: &soundOnPreview,
|
||||
WallShowTitle: &wallShowTitle,
|
||||
|
|
@ -177,10 +177,10 @@ func makeConfigInterfaceResult() *models.ConfigInterfaceResult {
|
|||
}
|
||||
}
|
||||
|
||||
func makeConfigDLNAResult() *models.ConfigDLNAResult {
|
||||
func makeConfigDLNAResult() *ConfigDLNAResult {
|
||||
config := config.GetInstance()
|
||||
|
||||
return &models.ConfigDLNAResult{
|
||||
return &ConfigDLNAResult{
|
||||
ServerName: config.GetDLNAServerName(),
|
||||
Enabled: config.GetDLNADefaultEnabled(),
|
||||
WhitelistedIPs: config.GetDLNADefaultIPWhitelist(),
|
||||
|
|
@ -188,13 +188,13 @@ func makeConfigDLNAResult() *models.ConfigDLNAResult {
|
|||
}
|
||||
}
|
||||
|
||||
func makeConfigScrapingResult() *models.ConfigScrapingResult {
|
||||
func makeConfigScrapingResult() *ConfigScrapingResult {
|
||||
config := config.GetInstance()
|
||||
|
||||
scraperUserAgent := config.GetScraperUserAgent()
|
||||
scraperCDPPath := config.GetScraperCDPPath()
|
||||
|
||||
return &models.ConfigScrapingResult{
|
||||
return &ConfigScrapingResult{
|
||||
ScraperUserAgent: &scraperUserAgent,
|
||||
ScraperCertCheck: config.GetScraperCertCheck(),
|
||||
ScraperCDPPath: &scraperCDPPath,
|
||||
|
|
@ -202,12 +202,12 @@ func makeConfigScrapingResult() *models.ConfigScrapingResult {
|
|||
}
|
||||
}
|
||||
|
||||
func makeConfigDefaultsResult() *models.ConfigDefaultSettingsResult {
|
||||
func makeConfigDefaultsResult() *ConfigDefaultSettingsResult {
|
||||
config := config.GetInstance()
|
||||
deleteFileDefault := config.GetDeleteFileDefault()
|
||||
deleteGeneratedDefault := config.GetDeleteGeneratedDefault()
|
||||
|
||||
return &models.ConfigDefaultSettingsResult{
|
||||
return &ConfigDefaultSettingsResult{
|
||||
Identify: config.GetDefaultIdentifySettings(),
|
||||
Scan: config.GetDefaultScanSettings(),
|
||||
AutoTag: config.GetDefaultAutoTagSettings(),
|
||||
|
|
@ -221,7 +221,7 @@ func makeConfigUIResult() map[string]interface{} {
|
|||
return config.GetInstance().GetUIConfiguration()
|
||||
}
|
||||
|
||||
func (r *queryResolver) ValidateStashBoxCredentials(ctx context.Context, input models.StashBoxInput) (*models.StashBoxValidationResult, error) {
|
||||
func (r *queryResolver) ValidateStashBoxCredentials(ctx context.Context, input config.StashBoxInput) (*StashBoxValidationResult, error) {
|
||||
client := stashbox.NewClient(models.StashBox{Endpoint: input.Endpoint, APIKey: input.APIKey}, r.txnManager)
|
||||
user, err := client.GetUser(ctx)
|
||||
|
||||
|
|
@ -248,7 +248,7 @@ func (r *queryResolver) ValidateStashBoxCredentials(ctx context.Context, input m
|
|||
}
|
||||
}
|
||||
|
||||
result := models.StashBoxValidationResult{
|
||||
result := StashBoxValidationResult{
|
||||
Valid: valid,
|
||||
Status: status,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,10 +3,10 @@ package api
|
|||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/internal/dlna"
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) DlnaStatus(ctx context.Context) (*models.DLNAStatus, error) {
|
||||
func (r *queryResolver) DlnaStatus(ctx context.Context) (*dlna.Status, error) {
|
||||
return manager.GetInstance().DLNAService.Status(), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,14 +23,14 @@ func (r *queryResolver) FindGallery(ctx context.Context, id string) (ret *models
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (ret *models.FindGalleriesResultType, err error) {
|
||||
func (r *queryResolver) FindGalleries(ctx context.Context, galleryFilter *models.GalleryFilterType, filter *models.FindFilterType) (ret *FindGalleriesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
galleries, total, err := repo.Gallery().Query(galleryFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindGalleriesResultType{
|
||||
ret = &FindGalleriesResultType{
|
||||
Count: total,
|
||||
Galleries: galleries,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str
|
|||
return image, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (ret *models.FindImagesResultType, err error) {
|
||||
func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.ImageFilterType, imageIds []int, filter *models.FindFilterType) (ret *FindImagesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
qb := repo.Image()
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ func (r *queryResolver) FindImages(ctx context.Context, imageFilter *models.Imag
|
|||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindImagesResultType{
|
||||
ret = &FindImagesResultType{
|
||||
Count: result.Count,
|
||||
Images: images,
|
||||
Megapixels: result.Megapixels,
|
||||
|
|
|
|||
|
|
@ -23,14 +23,14 @@ func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.M
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (ret *models.FindMoviesResultType, err error) {
|
||||
func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType) (ret *FindMoviesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
movies, total, err := repo.Movie().Query(movieFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindMoviesResultType{
|
||||
ret = &FindMoviesResultType{
|
||||
Count: total,
|
||||
Movies: movies,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,14 +23,14 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *models.FindPerformersResultType, err error) {
|
||||
func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *FindPerformersResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
performers, total, err := repo.Performer().Query(performerFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindPerformersResultType{
|
||||
ret = &FindPerformersResultType{
|
||||
Count: total,
|
||||
Performers: performers,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *str
|
|||
return scene, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneHashInput) (*models.Scene, error) {
|
||||
func (r *queryResolver) FindSceneByHash(ctx context.Context, input SceneHashInput) (*models.Scene, error) {
|
||||
var scene *models.Scene
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
|
|
@ -64,7 +64,7 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input models.SceneH
|
|||
return scene, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIDs []int, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
|
||||
func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.SceneFilterType, sceneIDs []int, filter *models.FindFilterType) (ret *FindScenesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
var scenes []*models.Scene
|
||||
var err error
|
||||
|
|
@ -101,7 +101,7 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
|
|||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindScenesResultType{
|
||||
ret = &FindScenesResultType{
|
||||
Count: result.Count,
|
||||
Scenes: scenes,
|
||||
Duration: result.TotalDuration,
|
||||
|
|
@ -116,7 +116,7 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (ret *models.FindScenesResultType, err error) {
|
||||
func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *models.FindFilterType) (ret *FindScenesResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
|
||||
sceneFilter := &models.SceneFilterType{}
|
||||
|
|
@ -156,7 +156,7 @@ func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *model
|
|||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindScenesResultType{
|
||||
ret = &FindScenesResultType{
|
||||
Count: result.Count,
|
||||
Scenes: scenes,
|
||||
Duration: result.TotalDuration,
|
||||
|
|
@ -171,7 +171,7 @@ func (r *queryResolver) FindScenesByPathRegex(ctx context.Context, filter *model
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config models.SceneParserInput) (ret *models.SceneParserResultType, err error) {
|
||||
func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.FindFilterType, config manager.SceneParserInput) (ret *SceneParserResultType, err error) {
|
||||
parser := manager.NewSceneFilenameParser(filter, config)
|
||||
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
|
|
@ -181,7 +181,7 @@ func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.
|
|||
return err
|
||||
}
|
||||
|
||||
ret = &models.SceneParserResultType{
|
||||
ret = &SceneParserResultType{
|
||||
Count: count,
|
||||
Results: result,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,13 +6,13 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (ret *models.FindSceneMarkersResultType, err error) {
|
||||
func (r *queryResolver) FindSceneMarkers(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, filter *models.FindFilterType) (ret *FindSceneMarkersResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
sceneMarkers, total, err := repo.SceneMarker().Query(sceneMarkerFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ret = &models.FindSceneMarkersResultType{
|
||||
ret = &FindSceneMarkersResultType{
|
||||
Count: total,
|
||||
SceneMarkers: sceneMarkers,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,14 +24,14 @@ func (r *queryResolver) FindStudio(ctx context.Context, id string) (ret *models.
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (ret *models.FindStudiosResultType, err error) {
|
||||
func (r *queryResolver) FindStudios(ctx context.Context, studioFilter *models.StudioFilterType, filter *models.FindFilterType) (ret *FindStudiosResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
studios, total, err := repo.Studio().Query(studioFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindStudiosResultType{
|
||||
ret = &FindStudiosResultType{
|
||||
Count: total,
|
||||
Studios: studios,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,14 +23,14 @@ func (r *queryResolver) FindTag(ctx context.Context, id string) (ret *models.Tag
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (ret *models.FindTagsResultType, err error) {
|
||||
func (r *queryResolver) FindTags(ctx context.Context, tagFilter *models.TagFilterType, filter *models.FindFilterType) (ret *FindTagsResultType, err error) {
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
tags, total, err := repo.Tag().Query(tagFilter, filter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret = &models.FindTagsResultType{
|
||||
ret = &FindTagsResultType{
|
||||
Count: total,
|
||||
Tags: tags,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,13 +6,12 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) JobQueue(ctx context.Context) ([]*models.Job, error) {
|
||||
func (r *queryResolver) JobQueue(ctx context.Context) ([]*Job, error) {
|
||||
queue := manager.GetInstance().JobManager.GetQueue()
|
||||
|
||||
var ret []*models.Job
|
||||
var ret []*Job
|
||||
for _, j := range queue {
|
||||
ret = append(ret, jobToJobModel(j))
|
||||
}
|
||||
|
|
@ -20,7 +19,7 @@ func (r *queryResolver) JobQueue(ctx context.Context) ([]*models.Job, error) {
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) FindJob(ctx context.Context, input models.FindJobInput) (*models.Job, error) {
|
||||
func (r *queryResolver) FindJob(ctx context.Context, input FindJobInput) (*Job, error) {
|
||||
jobID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -33,10 +32,10 @@ func (r *queryResolver) FindJob(ctx context.Context, input models.FindJobInput)
|
|||
return jobToJobModel(*j), nil
|
||||
}
|
||||
|
||||
func jobToJobModel(j job.Job) *models.Job {
|
||||
ret := &models.Job{
|
||||
func jobToJobModel(j job.Job) *Job {
|
||||
ret := &Job{
|
||||
ID: strconv.Itoa(j.ID),
|
||||
Status: models.JobStatus(j.Status),
|
||||
Status: JobStatus(j.Status),
|
||||
Description: j.Description,
|
||||
SubTasks: j.Details,
|
||||
StartTime: j.StartTime,
|
||||
|
|
|
|||
|
|
@ -4,16 +4,15 @@ import (
|
|||
"context"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) Logs(ctx context.Context) ([]*models.LogEntry, error) {
|
||||
func (r *queryResolver) Logs(ctx context.Context) ([]*LogEntry, error) {
|
||||
logger := manager.GetInstance().Logger
|
||||
logCache := logger.GetLogCache()
|
||||
ret := make([]*models.LogEntry, len(logCache))
|
||||
ret := make([]*LogEntry, len(logCache))
|
||||
|
||||
for i, entry := range logCache {
|
||||
ret[i] = &models.LogEntry{
|
||||
ret[i] = &LogEntry{
|
||||
Time: entry.Time,
|
||||
Level: getLogLevel(entry.Type),
|
||||
Message: entry.Message,
|
||||
|
|
|
|||
|
|
@ -4,9 +4,8 @@ import (
|
|||
"context"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) SystemStatus(ctx context.Context) (*models.SystemStatus, error) {
|
||||
func (r *queryResolver) SystemStatus(ctx context.Context) (*manager.SystemStatus, error) {
|
||||
return manager.GetInstance().GetSystemStatus(), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,13 +4,13 @@ import (
|
|||
"context"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
)
|
||||
|
||||
func (r *queryResolver) Plugins(ctx context.Context) ([]*models.Plugin, error) {
|
||||
func (r *queryResolver) Plugins(ctx context.Context) ([]*plugin.Plugin, error) {
|
||||
return manager.GetInstance().PluginCache.ListPlugins(), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) PluginTasks(ctx context.Context) ([]*models.PluginTask, error) {
|
||||
func (r *queryResolver) PluginTasks(ctx context.Context) ([]*plugin.PluginTask, error) {
|
||||
return manager.GetInstance().PluginCache.ListPluginTasks(), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*models.SceneStreamEndpoint, error) {
|
||||
func (r *queryResolver) SceneStreams(ctx context.Context, id *string) ([]*manager.SceneStreamEndpoint, error) {
|
||||
// find the scene
|
||||
var scene *models.Scene
|
||||
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
|
||||
|
|
|
|||
|
|
@ -17,13 +17,13 @@ import (
|
|||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||
)
|
||||
|
||||
func (r *queryResolver) ScrapeURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||
func (r *queryResolver) ScrapeURL(ctx context.Context, url string, ty scraper.ScrapeContentType) (scraper.ScrapedContent, error) {
|
||||
return r.scraperCache().ScrapeURL(ctx, url, ty)
|
||||
}
|
||||
|
||||
// deprecated
|
||||
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
|
||||
content, err := r.scraperCache().ScrapeName(ctx, scraper.FreeonesScraperID, query, models.ScrapeContentTypePerformer)
|
||||
content, err := r.scraperCache().ScrapeName(ctx, scraper.FreeonesScraperID, query, scraper.ScrapeContentTypePerformer)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -44,24 +44,24 @@ func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query s
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ListScrapers(ctx context.Context, types []models.ScrapeContentType) ([]*models.Scraper, error) {
|
||||
func (r *queryResolver) ListScrapers(ctx context.Context, types []scraper.ScrapeContentType) ([]*scraper.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers(types), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypePerformer}), nil
|
||||
func (r *queryResolver) ListPerformerScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypePerformer}), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeScene}), nil
|
||||
func (r *queryResolver) ListSceneScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypeScene}), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeGallery}), nil
|
||||
func (r *queryResolver) ListGalleryScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypeGallery}), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*models.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]models.ScrapeContentType{models.ScrapeContentTypeMovie}), nil
|
||||
func (r *queryResolver) ListMovieScrapers(ctx context.Context) ([]*scraper.Scraper, error) {
|
||||
return r.scraperCache().ListScrapers([]scraper.ScrapeContentType{scraper.ScrapeContentTypeMovie}), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID string, query string) ([]*models.ScrapedPerformer, error) {
|
||||
|
|
@ -69,7 +69,7 @@ func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID strin
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypePerformer)
|
||||
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, scraper.ScrapeContentTypePerformer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -77,7 +77,7 @@ func (r *queryResolver) ScrapePerformerList(ctx context.Context, scraperID strin
|
|||
return marshalScrapedPerformers(content)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer models.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
||||
func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, scrapedPerformer scraper.ScrapedPerformerInput) (*models.ScrapedPerformer, error) {
|
||||
content, err := r.scraperCache().ScrapeFragment(ctx, scraperID, scraper.Input{Performer: &scrapedPerformer})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -86,7 +86,7 @@ func (r *queryResolver) ScrapePerformer(ctx context.Context, scraperID string, s
|
|||
}
|
||||
|
||||
func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*models.ScrapedPerformer, error) {
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypePerformer)
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypePerformer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -94,12 +94,12 @@ func (r *queryResolver) ScrapePerformerURL(ctx context.Context, url string) (*mo
|
|||
return marshalScrapedPerformer(content)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*models.ScrapedScene, error) {
|
||||
func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string, query string) ([]*scraper.ScrapedScene, error) {
|
||||
if query == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, models.ScrapeContentTypeScene)
|
||||
content, err := r.scraperCache().ScrapeName(ctx, scraperID, query, scraper.ScrapeContentTypeScene)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -113,13 +113,13 @@ func (r *queryResolver) ScrapeSceneQuery(ctx context.Context, scraperID string,
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*models.ScrapedScene, error) {
|
||||
func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene models.SceneUpdateInput) (*scraper.ScrapedScene, error) {
|
||||
id, err := strconv.Atoi(scene.ID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: scene.ID is not an integer: '%s'", ErrInput, scene.ID)
|
||||
}
|
||||
|
||||
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeScene)
|
||||
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, scraper.ScrapeContentTypeScene)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -129,13 +129,13 @@ func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene
|
|||
return nil, err
|
||||
}
|
||||
|
||||
filterSceneTags([]*models.ScrapedScene{ret})
|
||||
filterSceneTags([]*scraper.ScrapedScene{ret})
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// filterSceneTags removes tags matching excluded tag patterns from the provided scraped scenes
|
||||
func filterSceneTags(scenes []*models.ScrapedScene) {
|
||||
func filterSceneTags(scenes []*scraper.ScrapedScene) {
|
||||
excludePatterns := manager.GetInstance().Config.GetScraperExcludeTagPatterns()
|
||||
var excludeRegexps []*regexp.Regexp
|
||||
|
||||
|
|
@ -179,8 +179,8 @@ func filterSceneTags(scenes []*models.ScrapedScene) {
|
|||
}
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models.ScrapedScene, error) {
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeScene)
|
||||
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scraper.ScrapedScene, error) {
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeScene)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -190,18 +190,18 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*models
|
|||
return nil, err
|
||||
}
|
||||
|
||||
filterSceneTags([]*models.ScrapedScene{ret})
|
||||
filterSceneTags([]*scraper.ScrapedScene{ret})
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*models.ScrapedGallery, error) {
|
||||
func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gallery models.GalleryUpdateInput) (*scraper.ScrapedGallery, error) {
|
||||
id, err := strconv.Atoi(gallery.ID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, gallery.ID)
|
||||
}
|
||||
|
||||
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, models.ScrapeContentTypeGallery)
|
||||
content, err := r.scraperCache().ScrapeID(ctx, scraperID, id, scraper.ScrapeContentTypeGallery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -209,8 +209,8 @@ func (r *queryResolver) ScrapeGallery(ctx context.Context, scraperID string, gal
|
|||
return marshalScrapedGallery(content)
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*models.ScrapedGallery, error) {
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeGallery)
|
||||
func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*scraper.ScrapedGallery, error) {
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeGallery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -219,7 +219,7 @@ func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*mode
|
|||
}
|
||||
|
||||
func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) {
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, models.ScrapeContentTypeMovie)
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeMovie)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -237,8 +237,8 @@ func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) {
|
|||
return stashbox.NewClient(*boxes[index], r.txnManager), nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleSceneInput) ([]*models.ScrapedScene, error) {
|
||||
var ret []*models.ScrapedScene
|
||||
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.Source, input ScrapeSingleSceneInput) ([]*scraper.ScrapedScene, error) {
|
||||
var ret []*scraper.ScrapedScene
|
||||
|
||||
var sceneID int
|
||||
if input.SceneID != nil {
|
||||
|
|
@ -252,22 +252,22 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
|
|||
switch {
|
||||
case source.ScraperID != nil:
|
||||
var err error
|
||||
var c models.ScrapedContent
|
||||
var content []models.ScrapedContent
|
||||
var c scraper.ScrapedContent
|
||||
var content []scraper.ScrapedContent
|
||||
|
||||
switch {
|
||||
case input.SceneID != nil:
|
||||
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, sceneID, models.ScrapeContentTypeScene)
|
||||
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, sceneID, scraper.ScrapeContentTypeScene)
|
||||
if c != nil {
|
||||
content = []models.ScrapedContent{c}
|
||||
content = []scraper.ScrapedContent{c}
|
||||
}
|
||||
case input.SceneInput != nil:
|
||||
c, err = r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Scene: input.SceneInput})
|
||||
if c != nil {
|
||||
content = []models.ScrapedContent{c}
|
||||
content = []scraper.ScrapedContent{c}
|
||||
}
|
||||
case input.Query != nil:
|
||||
content, err = r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypeScene)
|
||||
content, err = r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, scraper.ScrapeContentTypeScene)
|
||||
default:
|
||||
err = fmt.Errorf("%w: scene_id, scene_input, or query must be set", ErrInput)
|
||||
}
|
||||
|
|
@ -307,7 +307,7 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source models.Scr
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiScenesInput) ([][]*models.ScrapedScene, error) {
|
||||
func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.Source, input ScrapeMultiScenesInput) ([][]*scraper.ScrapedScene, error) {
|
||||
if source.ScraperID != nil {
|
||||
return nil, ErrNotImplemented
|
||||
} else if source.StashBoxIndex != nil {
|
||||
|
|
@ -327,7 +327,7 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source models.Scr
|
|||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
|
||||
func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scraper.Source, input ScrapeSinglePerformerInput) ([]*models.ScrapedPerformer, error) {
|
||||
if source.ScraperID != nil {
|
||||
if input.PerformerInput != nil {
|
||||
performer, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Performer: input.PerformerInput})
|
||||
|
|
@ -335,11 +335,11 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return marshalScrapedPerformers([]models.ScrapedContent{performer})
|
||||
return marshalScrapedPerformers([]scraper.ScrapedContent{performer})
|
||||
}
|
||||
|
||||
if input.Query != nil {
|
||||
content, err := r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, models.ScrapeContentTypePerformer)
|
||||
content, err := r.scraperCache().ScrapeName(ctx, *source.ScraperID, *input.Query, scraper.ScrapeContentTypePerformer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -354,7 +354,7 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
|
|||
return nil, err
|
||||
}
|
||||
|
||||
var ret []*models.StashBoxPerformerQueryResult
|
||||
var ret []*stashbox.StashBoxPerformerQueryResult
|
||||
switch {
|
||||
case input.PerformerID != nil:
|
||||
ret, err = client.FindStashBoxPerformersByNames(ctx, []string{*input.PerformerID})
|
||||
|
|
@ -378,7 +378,7 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source models
|
|||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) {
|
||||
func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source scraper.Source, input ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) {
|
||||
if source.ScraperID != nil {
|
||||
return nil, ErrNotImplemented
|
||||
} else if source.StashBoxIndex != nil {
|
||||
|
|
@ -393,7 +393,7 @@ func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source models
|
|||
return nil, errors.New("scraper_id or stash_box_index must be set")
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleGalleryInput) ([]*models.ScrapedGallery, error) {
|
||||
func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper.Source, input ScrapeSingleGalleryInput) ([]*scraper.ScrapedGallery, error) {
|
||||
if source.StashBoxIndex != nil {
|
||||
return nil, ErrNotSupported
|
||||
}
|
||||
|
|
@ -402,7 +402,7 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.S
|
|||
return nil, fmt.Errorf("%w: scraper_id must be set", ErrInput)
|
||||
}
|
||||
|
||||
var c models.ScrapedContent
|
||||
var c scraper.ScrapedContent
|
||||
|
||||
switch {
|
||||
case input.GalleryID != nil:
|
||||
|
|
@ -410,22 +410,22 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source models.S
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: gallery id is not an integer: '%s'", ErrInput, *input.GalleryID)
|
||||
}
|
||||
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, galleryID, models.ScrapeContentTypeGallery)
|
||||
c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, galleryID, scraper.ScrapeContentTypeGallery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return marshalScrapedGalleries([]models.ScrapedContent{c})
|
||||
return marshalScrapedGalleries([]scraper.ScrapedContent{c})
|
||||
case input.GalleryInput != nil:
|
||||
c, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Gallery: input.GalleryInput})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return marshalScrapedGalleries([]models.ScrapedContent{c})
|
||||
return marshalScrapedGalleries([]scraper.ScrapedContent{c})
|
||||
default:
|
||||
return nil, ErrNotImplemented
|
||||
}
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source models.ScraperSourceInput, input models.ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {
|
||||
func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source scraper.Source, input ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) {
|
||||
return nil, ErrNotSupported
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,18 +5,17 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func makeJobStatusUpdate(t models.JobStatusUpdateType, j job.Job) *models.JobStatusUpdate {
|
||||
return &models.JobStatusUpdate{
|
||||
func makeJobStatusUpdate(t JobStatusUpdateType, j job.Job) *JobStatusUpdate {
|
||||
return &JobStatusUpdate{
|
||||
Type: t,
|
||||
Job: jobToJobModel(j),
|
||||
}
|
||||
}
|
||||
|
||||
func (r *subscriptionResolver) JobsSubscribe(ctx context.Context) (<-chan *models.JobStatusUpdate, error) {
|
||||
msg := make(chan *models.JobStatusUpdate, 100)
|
||||
func (r *subscriptionResolver) JobsSubscribe(ctx context.Context) (<-chan *JobStatusUpdate, error) {
|
||||
msg := make(chan *JobStatusUpdate, 100)
|
||||
|
||||
subscription := manager.GetInstance().JobManager.Subscribe(ctx)
|
||||
|
||||
|
|
@ -24,11 +23,11 @@ func (r *subscriptionResolver) JobsSubscribe(ctx context.Context) (<-chan *model
|
|||
for {
|
||||
select {
|
||||
case j := <-subscription.NewJob:
|
||||
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeAdd, j)
|
||||
msg <- makeJobStatusUpdate(JobStatusUpdateTypeAdd, j)
|
||||
case j := <-subscription.RemovedJob:
|
||||
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeRemove, j)
|
||||
msg <- makeJobStatusUpdate(JobStatusUpdateTypeRemove, j)
|
||||
case j := <-subscription.UpdatedJob:
|
||||
msg <- makeJobStatusUpdate(models.JobStatusUpdateTypeUpdate, j)
|
||||
msg <- makeJobStatusUpdate(JobStatusUpdateTypeUpdate, j)
|
||||
case <-ctx.Done():
|
||||
close(msg)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -5,33 +5,32 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/internal/log"
|
||||
"github.com/stashapp/stash/internal/manager"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
func getLogLevel(logType string) models.LogLevel {
|
||||
func getLogLevel(logType string) LogLevel {
|
||||
switch logType {
|
||||
case "progress":
|
||||
return models.LogLevelProgress
|
||||
return LogLevelProgress
|
||||
case "trace":
|
||||
return models.LogLevelTrace
|
||||
return LogLevelTrace
|
||||
case "debug":
|
||||
return models.LogLevelDebug
|
||||
return LogLevelDebug
|
||||
case "info":
|
||||
return models.LogLevelInfo
|
||||
return LogLevelInfo
|
||||
case "warn":
|
||||
return models.LogLevelWarning
|
||||
return LogLevelWarning
|
||||
case "error":
|
||||
return models.LogLevelError
|
||||
return LogLevelError
|
||||
default:
|
||||
return models.LogLevelDebug
|
||||
return LogLevelDebug
|
||||
}
|
||||
}
|
||||
|
||||
func logEntriesFromLogItems(logItems []log.LogItem) []*models.LogEntry {
|
||||
ret := make([]*models.LogEntry, len(logItems))
|
||||
func logEntriesFromLogItems(logItems []log.LogItem) []*LogEntry {
|
||||
ret := make([]*LogEntry, len(logItems))
|
||||
|
||||
for i, entry := range logItems {
|
||||
ret[i] = &models.LogEntry{
|
||||
ret[i] = &LogEntry{
|
||||
Time: entry.Time,
|
||||
Level: getLogLevel(entry.Type),
|
||||
Message: entry.Message,
|
||||
|
|
@ -41,8 +40,8 @@ func logEntriesFromLogItems(logItems []log.LogItem) []*models.LogEntry {
|
|||
return ret
|
||||
}
|
||||
|
||||
func (r *subscriptionResolver) LoggingSubscribe(ctx context.Context) (<-chan []*models.LogEntry, error) {
|
||||
ret := make(chan []*models.LogEntry, 100)
|
||||
func (r *subscriptionResolver) LoggingSubscribe(ctx context.Context) (<-chan []*LogEntry, error) {
|
||||
ret := make(chan []*LogEntry, 100)
|
||||
stop := make(chan int, 1)
|
||||
logger := manager.GetInstance().Logger
|
||||
logSub := logger.SubscribeToLog(stop)
|
||||
|
|
|
|||
|
|
@ -4,12 +4,13 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/scraper"
|
||||
)
|
||||
|
||||
// marshalScrapedScenes converts ScrapedContent into ScrapedScene. If conversion fails, an
|
||||
// error is returned to the caller.
|
||||
func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedScene, error) {
|
||||
var ret []*models.ScrapedScene
|
||||
func marshalScrapedScenes(content []scraper.ScrapedContent) ([]*scraper.ScrapedScene, error) {
|
||||
var ret []*scraper.ScrapedScene
|
||||
for _, c := range content {
|
||||
if c == nil {
|
||||
// graphql schema requires scenes to be non-nil
|
||||
|
|
@ -17,9 +18,9 @@ func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedSce
|
|||
}
|
||||
|
||||
switch s := c.(type) {
|
||||
case *models.ScrapedScene:
|
||||
case *scraper.ScrapedScene:
|
||||
ret = append(ret, s)
|
||||
case models.ScrapedScene:
|
||||
case scraper.ScrapedScene:
|
||||
ret = append(ret, &s)
|
||||
default:
|
||||
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedScene", models.ErrConversion)
|
||||
|
|
@ -31,7 +32,7 @@ func marshalScrapedScenes(content []models.ScrapedContent) ([]*models.ScrapedSce
|
|||
|
||||
// marshalScrapedPerformers converts ScrapedContent into ScrapedPerformer. If conversion
|
||||
// fails, an error is returned to the caller.
|
||||
func marshalScrapedPerformers(content []models.ScrapedContent) ([]*models.ScrapedPerformer, error) {
|
||||
func marshalScrapedPerformers(content []scraper.ScrapedContent) ([]*models.ScrapedPerformer, error) {
|
||||
var ret []*models.ScrapedPerformer
|
||||
for _, c := range content {
|
||||
if c == nil {
|
||||
|
|
@ -54,8 +55,8 @@ func marshalScrapedPerformers(content []models.ScrapedContent) ([]*models.Scrape
|
|||
|
||||
// marshalScrapedGalleries converts ScrapedContent into ScrapedGallery. If
|
||||
// conversion fails, an error is returned.
|
||||
func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.ScrapedGallery, error) {
|
||||
var ret []*models.ScrapedGallery
|
||||
func marshalScrapedGalleries(content []scraper.ScrapedContent) ([]*scraper.ScrapedGallery, error) {
|
||||
var ret []*scraper.ScrapedGallery
|
||||
for _, c := range content {
|
||||
if c == nil {
|
||||
// graphql schema requires galleries to be non-nil
|
||||
|
|
@ -63,9 +64,9 @@ func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.Scraped
|
|||
}
|
||||
|
||||
switch g := c.(type) {
|
||||
case *models.ScrapedGallery:
|
||||
case *scraper.ScrapedGallery:
|
||||
ret = append(ret, g)
|
||||
case models.ScrapedGallery:
|
||||
case scraper.ScrapedGallery:
|
||||
ret = append(ret, &g)
|
||||
default:
|
||||
return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedGallery", models.ErrConversion)
|
||||
|
|
@ -77,7 +78,7 @@ func marshalScrapedGalleries(content []models.ScrapedContent) ([]*models.Scraped
|
|||
|
||||
// marshalScrapedMovies converts ScrapedContent into ScrapedMovie. If conversion
|
||||
// fails, an error is returned.
|
||||
func marshalScrapedMovies(content []models.ScrapedContent) ([]*models.ScrapedMovie, error) {
|
||||
func marshalScrapedMovies(content []scraper.ScrapedContent) ([]*models.ScrapedMovie, error) {
|
||||
var ret []*models.ScrapedMovie
|
||||
for _, c := range content {
|
||||
if c == nil {
|
||||
|
|
@ -99,8 +100,8 @@ func marshalScrapedMovies(content []models.ScrapedContent) ([]*models.ScrapedMov
|
|||
}
|
||||
|
||||
// marshalScrapedPerformer will marshal a single performer
|
||||
func marshalScrapedPerformer(content models.ScrapedContent) (*models.ScrapedPerformer, error) {
|
||||
p, err := marshalScrapedPerformers([]models.ScrapedContent{content})
|
||||
func marshalScrapedPerformer(content scraper.ScrapedContent) (*models.ScrapedPerformer, error) {
|
||||
p, err := marshalScrapedPerformers([]scraper.ScrapedContent{content})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -109,8 +110,8 @@ func marshalScrapedPerformer(content models.ScrapedContent) (*models.ScrapedPerf
|
|||
}
|
||||
|
||||
// marshalScrapedScene will marshal a single scraped scene
|
||||
func marshalScrapedScene(content models.ScrapedContent) (*models.ScrapedScene, error) {
|
||||
s, err := marshalScrapedScenes([]models.ScrapedContent{content})
|
||||
func marshalScrapedScene(content scraper.ScrapedContent) (*scraper.ScrapedScene, error) {
|
||||
s, err := marshalScrapedScenes([]scraper.ScrapedContent{content})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -119,8 +120,8 @@ func marshalScrapedScene(content models.ScrapedContent) (*models.ScrapedScene, e
|
|||
}
|
||||
|
||||
// marshalScrapedGallery will marshal a single scraped gallery
|
||||
func marshalScrapedGallery(content models.ScrapedContent) (*models.ScrapedGallery, error) {
|
||||
g, err := marshalScrapedGalleries([]models.ScrapedContent{content})
|
||||
func marshalScrapedGallery(content scraper.ScrapedContent) (*scraper.ScrapedGallery, error) {
|
||||
g, err := marshalScrapedGalleries([]scraper.ScrapedContent{content})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -129,8 +130,8 @@ func marshalScrapedGallery(content models.ScrapedContent) (*models.ScrapedGaller
|
|||
}
|
||||
|
||||
// marshalScrapedMovie will marshal a single scraped movie
|
||||
func marshalScrapedMovie(content models.ScrapedContent) (*models.ScrapedMovie, error) {
|
||||
m, err := marshalScrapedMovies([]models.ScrapedContent{content})
|
||||
func marshalScrapedMovie(content scraper.ScrapedContent) (*models.ScrapedMovie, error) {
|
||||
m, err := marshalScrapedMovies([]scraper.ScrapedContent{content})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ import (
|
|||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/ui"
|
||||
)
|
||||
|
||||
|
|
@ -81,7 +80,7 @@ func Start() error {
|
|||
hookExecutor: pluginCache,
|
||||
}
|
||||
|
||||
gqlSrv := gqlHandler.New(models.NewExecutableSchema(models.Config{Resolvers: resolver}))
|
||||
gqlSrv := gqlHandler.New(NewExecutableSchema(Config{Resolvers: resolver}))
|
||||
gqlSrv.SetRecoverFunc(recoverFunc)
|
||||
gqlSrv.AddTransport(gqlTransport.Websocket{
|
||||
Upgrader: websocket.Upgrader{
|
||||
|
|
|
|||
|
|
@ -12,6 +12,20 @@ import (
|
|||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type Status struct {
|
||||
Running bool `json:"running"`
|
||||
// If not currently running, time until it will be started. If running, time until it will be stopped
|
||||
Until *time.Time `json:"until"`
|
||||
RecentIPAddresses []string `json:"recentIPAddresses"`
|
||||
AllowedIPAddresses []*Dlnaip `json:"allowedIPAddresses"`
|
||||
}
|
||||
|
||||
type Dlnaip struct {
|
||||
IPAddress string `json:"ipAddress"`
|
||||
// Time until IP will be no longer allowed/disallowed
|
||||
Until *time.Time `json:"until"`
|
||||
}
|
||||
|
||||
type dmsConfig struct {
|
||||
Path string
|
||||
IfNames []string
|
||||
|
|
@ -273,11 +287,11 @@ func (s *Service) IsRunning() bool {
|
|||
return s.running
|
||||
}
|
||||
|
||||
func (s *Service) Status() *models.DLNAStatus {
|
||||
func (s *Service) Status() *Status {
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
ret := &models.DLNAStatus{
|
||||
ret := &Status{
|
||||
Running: s.running,
|
||||
RecentIPAddresses: s.ipWhitelistMgr.getRecent(),
|
||||
AllowedIPAddresses: s.ipWhitelistMgr.getTempAllowed(),
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import (
|
|||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||
)
|
||||
|
||||
|
|
@ -59,11 +58,11 @@ func (m *ipWhitelistManager) getRecent() []string {
|
|||
return m.recentIPAddresses
|
||||
}
|
||||
|
||||
func (m *ipWhitelistManager) getTempAllowed() []*models.Dlnaip {
|
||||
func (m *ipWhitelistManager) getTempAllowed() []*Dlnaip {
|
||||
m.mutex.Lock()
|
||||
defer m.mutex.Unlock()
|
||||
|
||||
var ret []*models.Dlnaip
|
||||
var ret []*Dlnaip
|
||||
|
||||
now := time.Now()
|
||||
removeExpired := false
|
||||
|
|
@ -73,7 +72,7 @@ func (m *ipWhitelistManager) getTempAllowed() []*models.Dlnaip {
|
|||
continue
|
||||
}
|
||||
|
||||
ret = append(ret, &models.Dlnaip{
|
||||
ret = append(ret, &Dlnaip{
|
||||
IPAddress: a.pattern,
|
||||
Until: a.until,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -8,11 +8,12 @@ import (
|
|||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/scene"
|
||||
"github.com/stashapp/stash/pkg/scraper"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type SceneScraper interface {
|
||||
ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error)
|
||||
ScrapeScene(ctx context.Context, sceneID int) (*scraper.ScrapedScene, error)
|
||||
}
|
||||
|
||||
type SceneUpdatePostHookExecutor interface {
|
||||
|
|
@ -21,13 +22,13 @@ type SceneUpdatePostHookExecutor interface {
|
|||
|
||||
type ScraperSource struct {
|
||||
Name string
|
||||
Options *models.IdentifyMetadataOptionsInput
|
||||
Options *MetadataOptions
|
||||
Scraper SceneScraper
|
||||
RemoteSite string
|
||||
}
|
||||
|
||||
type SceneIdentifier struct {
|
||||
DefaultOptions *models.IdentifyMetadataOptionsInput
|
||||
DefaultOptions *MetadataOptions
|
||||
Sources []ScraperSource
|
||||
ScreenshotSetter scene.ScreenshotSetter
|
||||
SceneUpdatePostHookExecutor SceneUpdatePostHookExecutor
|
||||
|
|
@ -53,7 +54,7 @@ func (t *SceneIdentifier) Identify(ctx context.Context, txnManager models.Transa
|
|||
}
|
||||
|
||||
type scrapeResult struct {
|
||||
result *models.ScrapedScene
|
||||
result *scraper.ScrapedScene
|
||||
source ScraperSource
|
||||
}
|
||||
|
||||
|
|
@ -84,7 +85,7 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
|
|||
ID: s.ID,
|
||||
}
|
||||
|
||||
options := []models.IdentifyMetadataOptionsInput{}
|
||||
options := []MetadataOptions{}
|
||||
if result.source.Options != nil {
|
||||
options = append(options, *result.source.Options)
|
||||
}
|
||||
|
|
@ -208,9 +209,9 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager models.Tra
|
|||
return nil
|
||||
}
|
||||
|
||||
func getFieldOptions(options []models.IdentifyMetadataOptionsInput) map[string]*models.IdentifyFieldOptionsInput {
|
||||
func getFieldOptions(options []MetadataOptions) map[string]*FieldOptions {
|
||||
// prefer source-specific field strategies, then the defaults
|
||||
ret := make(map[string]*models.IdentifyFieldOptionsInput)
|
||||
ret := make(map[string]*FieldOptions)
|
||||
for _, oo := range options {
|
||||
for _, f := range oo.FieldOptions {
|
||||
if _, found := ret[f.Field]; !found {
|
||||
|
|
@ -222,7 +223,7 @@ func getFieldOptions(options []models.IdentifyMetadataOptionsInput) map[string]*
|
|||
return ret
|
||||
}
|
||||
|
||||
func getScenePartial(scene *models.Scene, scraped *models.ScrapedScene, fieldOptions map[string]*models.IdentifyFieldOptionsInput, setOrganized bool) models.ScenePartial {
|
||||
func getScenePartial(scene *models.Scene, scraped *scraper.ScrapedScene, fieldOptions map[string]*FieldOptions, setOrganized bool) models.ScenePartial {
|
||||
partial := models.ScenePartial{
|
||||
ID: scene.ID,
|
||||
}
|
||||
|
|
@ -259,17 +260,17 @@ func getScenePartial(scene *models.Scene, scraped *models.ScrapedScene, fieldOpt
|
|||
return partial
|
||||
}
|
||||
|
||||
func shouldSetSingleValueField(strategy *models.IdentifyFieldOptionsInput, hasExistingValue bool) bool {
|
||||
func shouldSetSingleValueField(strategy *FieldOptions, hasExistingValue bool) bool {
|
||||
// if unset then default to MERGE
|
||||
fs := models.IdentifyFieldStrategyMerge
|
||||
fs := FieldStrategyMerge
|
||||
|
||||
if strategy != nil && strategy.Strategy.IsValid() {
|
||||
fs = strategy.Strategy
|
||||
}
|
||||
|
||||
if fs == models.IdentifyFieldStrategyIgnore {
|
||||
if fs == FieldStrategyIgnore {
|
||||
return false
|
||||
}
|
||||
|
||||
return !hasExistingValue || fs == models.IdentifyFieldStrategyOverwrite
|
||||
return !hasExistingValue || fs == FieldStrategyOverwrite
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,16 +8,17 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stashapp/stash/pkg/scraper"
|
||||
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
type mockSceneScraper struct {
|
||||
errIDs []int
|
||||
results map[int]*models.ScrapedScene
|
||||
results map[int]*scraper.ScrapedScene
|
||||
}
|
||||
|
||||
func (s mockSceneScraper) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
|
||||
func (s mockSceneScraper) ScrapeScene(ctx context.Context, sceneID int) (*scraper.ScrapedScene, error) {
|
||||
if intslice.IntInclude(s.errIDs, sceneID) {
|
||||
return nil, errors.New("scrape scene error")
|
||||
}
|
||||
|
|
@ -42,12 +43,12 @@ func TestSceneIdentifier_Identify(t *testing.T) {
|
|||
|
||||
var scrapedTitle = "scrapedTitle"
|
||||
|
||||
defaultOptions := &models.IdentifyMetadataOptionsInput{}
|
||||
defaultOptions := &MetadataOptions{}
|
||||
sources := []ScraperSource{
|
||||
{
|
||||
Scraper: mockSceneScraper{
|
||||
errIDs: []int{errID1},
|
||||
results: map[int]*models.ScrapedScene{
|
||||
results: map[int]*scraper.ScrapedScene{
|
||||
found1ID: {
|
||||
Title: &scrapedTitle,
|
||||
},
|
||||
|
|
@ -57,7 +58,7 @@ func TestSceneIdentifier_Identify(t *testing.T) {
|
|||
{
|
||||
Scraper: mockSceneScraper{
|
||||
errIDs: []int{errID2},
|
||||
results: map[int]*models.ScrapedScene{
|
||||
results: map[int]*scraper.ScrapedScene{
|
||||
found2ID: {
|
||||
Title: &scrapedTitle,
|
||||
},
|
||||
|
|
@ -150,7 +151,7 @@ func TestSceneIdentifier_modifyScene(t *testing.T) {
|
|||
args{
|
||||
&models.Scene{},
|
||||
&scrapeResult{
|
||||
result: &models.ScrapedScene{},
|
||||
result: &scraper.ScrapedScene{},
|
||||
},
|
||||
},
|
||||
false,
|
||||
|
|
@ -173,55 +174,55 @@ func Test_getFieldOptions(t *testing.T) {
|
|||
)
|
||||
|
||||
type args struct {
|
||||
options []models.IdentifyMetadataOptionsInput
|
||||
options []MetadataOptions
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want map[string]*models.IdentifyFieldOptionsInput
|
||||
want map[string]*FieldOptions
|
||||
}{
|
||||
{
|
||||
"simple",
|
||||
args{
|
||||
[]models.IdentifyMetadataOptionsInput{
|
||||
[]MetadataOptions{
|
||||
{
|
||||
FieldOptions: []*models.IdentifyFieldOptionsInput{
|
||||
FieldOptions: []*FieldOptions{
|
||||
{
|
||||
Field: inFirst,
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
{
|
||||
Field: inBoth,
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
FieldOptions: []*models.IdentifyFieldOptionsInput{
|
||||
FieldOptions: []*FieldOptions{
|
||||
{
|
||||
Field: inSecond,
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
Strategy: FieldStrategyMerge,
|
||||
},
|
||||
{
|
||||
Field: inBoth,
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
Strategy: FieldStrategyMerge,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
map[string]*models.IdentifyFieldOptionsInput{
|
||||
map[string]*FieldOptions{
|
||||
inFirst: {
|
||||
Field: inFirst,
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
inSecond: {
|
||||
Field: inSecond,
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
Strategy: FieldStrategyMerge,
|
||||
},
|
||||
inBoth: {
|
||||
Field: inBoth,
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -275,22 +276,22 @@ func Test_getScenePartial(t *testing.T) {
|
|||
URL: models.NullStringPtr(scrapedURL),
|
||||
}
|
||||
|
||||
scrapedScene := &models.ScrapedScene{
|
||||
scrapedScene := &scraper.ScrapedScene{
|
||||
Title: &scrapedTitle,
|
||||
Date: &scrapedDate,
|
||||
Details: &scrapedDetails,
|
||||
URL: &scrapedURL,
|
||||
}
|
||||
|
||||
scrapedUnchangedScene := &models.ScrapedScene{
|
||||
scrapedUnchangedScene := &scraper.ScrapedScene{
|
||||
Title: &originalTitle,
|
||||
Date: &originalDate,
|
||||
Details: &originalDetails,
|
||||
URL: &originalURL,
|
||||
}
|
||||
|
||||
makeFieldOptions := func(input *models.IdentifyFieldOptionsInput) map[string]*models.IdentifyFieldOptionsInput {
|
||||
return map[string]*models.IdentifyFieldOptionsInput{
|
||||
makeFieldOptions := func(input *FieldOptions) map[string]*FieldOptions {
|
||||
return map[string]*FieldOptions{
|
||||
"title": input,
|
||||
"date": input,
|
||||
"details": input,
|
||||
|
|
@ -298,22 +299,22 @@ func Test_getScenePartial(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
overwriteAll := makeFieldOptions(&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
overwriteAll := makeFieldOptions(&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
})
|
||||
ignoreAll := makeFieldOptions(&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
ignoreAll := makeFieldOptions(&FieldOptions{
|
||||
Strategy: FieldStrategyIgnore,
|
||||
})
|
||||
mergeAll := makeFieldOptions(&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
mergeAll := makeFieldOptions(&FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
})
|
||||
|
||||
setOrganised := true
|
||||
|
||||
type args struct {
|
||||
scene *models.Scene
|
||||
scraped *models.ScrapedScene
|
||||
fieldOptions map[string]*models.IdentifyFieldOptionsInput
|
||||
scraped *scraper.ScrapedScene
|
||||
fieldOptions map[string]*FieldOptions
|
||||
setOrganized bool
|
||||
}
|
||||
tests := []struct {
|
||||
|
|
@ -407,7 +408,7 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
const invalid = "invalid"
|
||||
|
||||
type args struct {
|
||||
strategy *models.IdentifyFieldOptionsInput
|
||||
strategy *FieldOptions
|
||||
hasExistingValue bool
|
||||
}
|
||||
tests := []struct {
|
||||
|
|
@ -418,8 +419,8 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"ignore",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
false,
|
||||
},
|
||||
|
|
@ -428,8 +429,8 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"merge existing",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
},
|
||||
true,
|
||||
},
|
||||
|
|
@ -438,8 +439,8 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"merge absent",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
},
|
||||
false,
|
||||
},
|
||||
|
|
@ -448,8 +449,8 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"overwrite",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
},
|
||||
true,
|
||||
},
|
||||
|
|
@ -458,7 +459,7 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"nil (merge) existing",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{},
|
||||
&FieldOptions{},
|
||||
true,
|
||||
},
|
||||
false,
|
||||
|
|
@ -466,7 +467,7 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"nil (merge) absent",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{},
|
||||
&FieldOptions{},
|
||||
false,
|
||||
},
|
||||
true,
|
||||
|
|
@ -474,7 +475,7 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"invalid (merge) existing",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
&FieldOptions{
|
||||
Strategy: invalid,
|
||||
},
|
||||
true,
|
||||
|
|
@ -484,7 +485,7 @@ func Test_shouldSetSingleValueField(t *testing.T) {
|
|||
{
|
||||
"invalid (merge) absent",
|
||||
args{
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
&FieldOptions{
|
||||
Strategy: invalid,
|
||||
},
|
||||
false,
|
||||
|
|
|
|||
92
internal/identify/options.go
Normal file
92
internal/identify/options.go
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
package identify
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/scraper"
|
||||
)
|
||||
|
||||
type Source struct {
|
||||
Source *scraper.Source `json:"source"`
|
||||
// Options defined for a source override the defaults
|
||||
Options *MetadataOptions `json:"options"`
|
||||
}
|
||||
|
||||
type Options struct {
|
||||
// An ordered list of sources to identify items with. Only the first source that finds a match is used.
|
||||
Sources []*Source `json:"sources"`
|
||||
// Options defined here override the configured defaults
|
||||
Options *MetadataOptions `json:"options"`
|
||||
// scene ids to identify
|
||||
SceneIDs []string `json:"sceneIDs"`
|
||||
// paths of scenes to identify - ignored if scene ids are set
|
||||
Paths []string `json:"paths"`
|
||||
}
|
||||
|
||||
type MetadataOptions struct {
|
||||
// any fields missing from here are defaulted to MERGE and createMissing false
|
||||
FieldOptions []*FieldOptions `json:"fieldOptions"`
|
||||
// defaults to true if not provided
|
||||
SetCoverImage *bool `json:"setCoverImage"`
|
||||
SetOrganized *bool `json:"setOrganized"`
|
||||
// defaults to true if not provided
|
||||
IncludeMalePerformers *bool `json:"includeMalePerformers"`
|
||||
}
|
||||
|
||||
type FieldOptions struct {
|
||||
Field string `json:"field"`
|
||||
Strategy FieldStrategy `json:"strategy"`
|
||||
// creates missing objects if needed - only applicable for performers, tags and studios
|
||||
CreateMissing *bool `json:"createMissing"`
|
||||
}
|
||||
|
||||
type FieldStrategy string
|
||||
|
||||
const (
|
||||
// Never sets the field value
|
||||
FieldStrategyIgnore FieldStrategy = "IGNORE"
|
||||
// For multi-value fields, merge with existing.
|
||||
// For single-value fields, ignore if already set
|
||||
FieldStrategyMerge FieldStrategy = "MERGE"
|
||||
// Always replaces the value if a value is found.
|
||||
// For multi-value fields, any existing values are removed and replaced with the
|
||||
// scraped values.
|
||||
FieldStrategyOverwrite FieldStrategy = "OVERWRITE"
|
||||
)
|
||||
|
||||
var AllFieldStrategy = []FieldStrategy{
|
||||
FieldStrategyIgnore,
|
||||
FieldStrategyMerge,
|
||||
FieldStrategyOverwrite,
|
||||
}
|
||||
|
||||
func (e FieldStrategy) IsValid() bool {
|
||||
switch e {
|
||||
case FieldStrategyIgnore, FieldStrategyMerge, FieldStrategyOverwrite:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e FieldStrategy) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *FieldStrategy) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = FieldStrategy(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid IdentifyFieldStrategy", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e FieldStrategy) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
|
@ -18,7 +18,7 @@ type sceneRelationships struct {
|
|||
repo models.Repository
|
||||
scene *models.Scene
|
||||
result *scrapeResult
|
||||
fieldOptions map[string]*models.IdentifyFieldOptionsInput
|
||||
fieldOptions map[string]*FieldOptions
|
||||
}
|
||||
|
||||
func (g sceneRelationships) studio() (*int64, error) {
|
||||
|
|
@ -61,7 +61,7 @@ func (g sceneRelationships) performers(ignoreMale bool) ([]int, error) {
|
|||
}
|
||||
|
||||
createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing)
|
||||
strategy := models.IdentifyFieldStrategyMerge
|
||||
strategy := FieldStrategyMerge
|
||||
if fieldStrategy != nil {
|
||||
strategy = fieldStrategy.Strategy
|
||||
}
|
||||
|
|
@ -75,7 +75,7 @@ func (g sceneRelationships) performers(ignoreMale bool) ([]int, error) {
|
|||
return nil, fmt.Errorf("error getting scene performers: %w", err)
|
||||
}
|
||||
|
||||
if strategy == models.IdentifyFieldStrategyMerge {
|
||||
if strategy == FieldStrategyMerge {
|
||||
// add to existing
|
||||
performerIDs = originalPerformerIDs
|
||||
}
|
||||
|
|
@ -115,7 +115,7 @@ func (g sceneRelationships) tags() ([]int, error) {
|
|||
}
|
||||
|
||||
createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing)
|
||||
strategy := models.IdentifyFieldStrategyMerge
|
||||
strategy := FieldStrategyMerge
|
||||
if fieldStrategy != nil {
|
||||
strategy = fieldStrategy.Strategy
|
||||
}
|
||||
|
|
@ -126,7 +126,7 @@ func (g sceneRelationships) tags() ([]int, error) {
|
|||
return nil, fmt.Errorf("error getting scene tags: %w", err)
|
||||
}
|
||||
|
||||
if strategy == models.IdentifyFieldStrategyMerge {
|
||||
if strategy == FieldStrategyMerge {
|
||||
// add to existing
|
||||
tagIDs = originalTagIDs
|
||||
}
|
||||
|
|
@ -176,7 +176,7 @@ func (g sceneRelationships) stashIDs() ([]models.StashID, error) {
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
strategy := models.IdentifyFieldStrategyMerge
|
||||
strategy := FieldStrategyMerge
|
||||
if fieldStrategy != nil {
|
||||
strategy = fieldStrategy.Strategy
|
||||
}
|
||||
|
|
@ -193,7 +193,7 @@ func (g sceneRelationships) stashIDs() ([]models.StashID, error) {
|
|||
originalStashIDs = append(originalStashIDs, *stashID)
|
||||
}
|
||||
|
||||
if strategy == models.IdentifyFieldStrategyMerge {
|
||||
if strategy == FieldStrategyMerge {
|
||||
// add to existing
|
||||
stashIDs = originalStashIDs
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/mocks"
|
||||
"github.com/stashapp/stash/pkg/scraper"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
|
@ -19,8 +20,8 @@ func Test_sceneRelationships_studio(t *testing.T) {
|
|||
invalidStoredID := "invalidStoredID"
|
||||
createMissing := true
|
||||
|
||||
defaultOptions := &models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
defaultOptions := &FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
}
|
||||
|
||||
repo := mocks.NewTransactionManager()
|
||||
|
|
@ -30,13 +31,13 @@ func Test_sceneRelationships_studio(t *testing.T) {
|
|||
|
||||
tr := sceneRelationships{
|
||||
repo: repo,
|
||||
fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput),
|
||||
fieldOptions: make(map[string]*FieldOptions),
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
scene *models.Scene
|
||||
fieldOptions *models.IdentifyFieldOptionsInput
|
||||
fieldOptions *FieldOptions
|
||||
result *models.ScrapedStudio
|
||||
want *int64
|
||||
wantErr bool
|
||||
|
|
@ -52,8 +53,8 @@ func Test_sceneRelationships_studio(t *testing.T) {
|
|||
{
|
||||
"ignore",
|
||||
&models.Scene{},
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
&models.ScrapedStudio{
|
||||
StoredID: &validStoredID,
|
||||
|
|
@ -104,8 +105,8 @@ func Test_sceneRelationships_studio(t *testing.T) {
|
|||
{
|
||||
"create missing",
|
||||
&models.Scene{},
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
CreateMissing: &createMissing,
|
||||
},
|
||||
&models.ScrapedStudio{},
|
||||
|
|
@ -118,7 +119,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
|
|||
tr.scene = tt.scene
|
||||
tr.fieldOptions["studio"] = tt.fieldOptions
|
||||
tr.result = &scrapeResult{
|
||||
result: &models.ScrapedScene{
|
||||
result: &scraper.ScrapedScene{
|
||||
Studio: tt.result,
|
||||
},
|
||||
}
|
||||
|
|
@ -151,8 +152,8 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||
female := models.GenderEnumFemale.String()
|
||||
male := models.GenderEnumMale.String()
|
||||
|
||||
defaultOptions := &models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
defaultOptions := &FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
}
|
||||
|
||||
repo := mocks.NewTransactionManager()
|
||||
|
|
@ -162,13 +163,13 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||
|
||||
tr := sceneRelationships{
|
||||
repo: repo,
|
||||
fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput),
|
||||
fieldOptions: make(map[string]*FieldOptions),
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
sceneID int
|
||||
fieldOptions *models.IdentifyFieldOptionsInput
|
||||
fieldOptions *FieldOptions
|
||||
scraped []*models.ScrapedPerformer
|
||||
ignoreMale bool
|
||||
want []int
|
||||
|
|
@ -177,8 +178,8 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||
{
|
||||
"ignore",
|
||||
sceneID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
[]*models.ScrapedPerformer{
|
||||
{
|
||||
|
|
@ -255,8 +256,8 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||
{
|
||||
"overwrite",
|
||||
sceneWithPerformerID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
},
|
||||
[]*models.ScrapedPerformer{
|
||||
{
|
||||
|
|
@ -271,8 +272,8 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||
{
|
||||
"ignore male (not male)",
|
||||
sceneWithPerformerID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
},
|
||||
[]*models.ScrapedPerformer{
|
||||
{
|
||||
|
|
@ -288,8 +289,8 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||
{
|
||||
"error getting tag ID",
|
||||
sceneID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
CreateMissing: &createMissing,
|
||||
},
|
||||
[]*models.ScrapedPerformer{
|
||||
|
|
@ -310,7 +311,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
|
|||
}
|
||||
tr.fieldOptions["performers"] = tt.fieldOptions
|
||||
tr.result = &scrapeResult{
|
||||
result: &models.ScrapedScene{
|
||||
result: &scraper.ScrapedScene{
|
||||
Performers: tt.scraped,
|
||||
},
|
||||
}
|
||||
|
|
@ -342,8 +343,8 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
validName := "validName"
|
||||
invalidName := "invalidName"
|
||||
|
||||
defaultOptions := &models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
defaultOptions := &FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
}
|
||||
|
||||
repo := mocks.NewTransactionManager()
|
||||
|
|
@ -362,13 +363,13 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
|
||||
tr := sceneRelationships{
|
||||
repo: repo,
|
||||
fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput),
|
||||
fieldOptions: make(map[string]*FieldOptions),
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
sceneID int
|
||||
fieldOptions *models.IdentifyFieldOptionsInput
|
||||
fieldOptions *FieldOptions
|
||||
scraped []*models.ScrapedTag
|
||||
want []int
|
||||
wantErr bool
|
||||
|
|
@ -376,8 +377,8 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
{
|
||||
"ignore",
|
||||
sceneID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
[]*models.ScrapedTag{
|
||||
{
|
||||
|
|
@ -434,8 +435,8 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
{
|
||||
"overwrite",
|
||||
sceneWithTagID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
},
|
||||
[]*models.ScrapedTag{
|
||||
{
|
||||
|
|
@ -449,8 +450,8 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
{
|
||||
"error getting tag ID",
|
||||
sceneID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
},
|
||||
[]*models.ScrapedTag{
|
||||
{
|
||||
|
|
@ -464,8 +465,8 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
{
|
||||
"create missing",
|
||||
sceneID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
CreateMissing: &createMissing,
|
||||
},
|
||||
[]*models.ScrapedTag{
|
||||
|
|
@ -479,8 +480,8 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
{
|
||||
"error creating",
|
||||
sceneID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
CreateMissing: &createMissing,
|
||||
},
|
||||
[]*models.ScrapedTag{
|
||||
|
|
@ -499,7 +500,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
|
|||
}
|
||||
tr.fieldOptions["tags"] = tt.fieldOptions
|
||||
tr.result = &scrapeResult{
|
||||
result: &models.ScrapedScene{
|
||||
result: &scraper.ScrapedScene{
|
||||
Tags: tt.scraped,
|
||||
},
|
||||
}
|
||||
|
|
@ -529,8 +530,8 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
|||
remoteSiteID := "remoteSiteID"
|
||||
newRemoteSiteID := "newRemoteSiteID"
|
||||
|
||||
defaultOptions := &models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyMerge,
|
||||
defaultOptions := &FieldOptions{
|
||||
Strategy: FieldStrategyMerge,
|
||||
}
|
||||
|
||||
repo := mocks.NewTransactionManager()
|
||||
|
|
@ -545,13 +546,13 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
|||
|
||||
tr := sceneRelationships{
|
||||
repo: repo,
|
||||
fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput),
|
||||
fieldOptions: make(map[string]*FieldOptions),
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
sceneID int
|
||||
fieldOptions *models.IdentifyFieldOptionsInput
|
||||
fieldOptions *FieldOptions
|
||||
endpoint string
|
||||
remoteSiteID *string
|
||||
want []models.StashID
|
||||
|
|
@ -560,8 +561,8 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
|||
{
|
||||
"ignore",
|
||||
sceneID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyIgnore,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyIgnore,
|
||||
},
|
||||
newEndpoint,
|
||||
&remoteSiteID,
|
||||
|
|
@ -639,8 +640,8 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
|||
{
|
||||
"overwrite",
|
||||
sceneWithStashID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
},
|
||||
newEndpoint,
|
||||
&newRemoteSiteID,
|
||||
|
|
@ -655,8 +656,8 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
|||
{
|
||||
"overwrite same",
|
||||
sceneWithStashID,
|
||||
&models.IdentifyFieldOptionsInput{
|
||||
Strategy: models.IdentifyFieldStrategyOverwrite,
|
||||
&FieldOptions{
|
||||
Strategy: FieldStrategyOverwrite,
|
||||
},
|
||||
existingEndpoint,
|
||||
&remoteSiteID,
|
||||
|
|
@ -674,7 +675,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
|
|||
source: ScraperSource{
|
||||
RemoteSite: tt.endpoint,
|
||||
},
|
||||
result: &models.ScrapedScene{
|
||||
result: &scraper.ScrapedScene{
|
||||
RemoteSiteID: tt.remoteSiteID,
|
||||
},
|
||||
}
|
||||
|
|
@ -712,7 +713,7 @@ func Test_sceneRelationships_cover(t *testing.T) {
|
|||
|
||||
tr := sceneRelationships{
|
||||
repo: repo,
|
||||
fieldOptions: make(map[string]*models.IdentifyFieldOptionsInput),
|
||||
fieldOptions: make(map[string]*FieldOptions),
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
|
@ -764,7 +765,7 @@ func Test_sceneRelationships_cover(t *testing.T) {
|
|||
ID: tt.sceneID,
|
||||
}
|
||||
tr.result = &scrapeResult{
|
||||
result: &models.ScrapedScene{
|
||||
result: &scraper.ScrapedScene{
|
||||
Image: tt.image,
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import (
|
|||
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/stashapp/stash/internal/identify"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
"github.com/stashapp/stash/pkg/hash"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
|
|
@ -147,10 +148,10 @@ const (
|
|||
// Image lightbox options
|
||||
legacyImageLightboxSlideshowDelay = "slideshow_delay"
|
||||
ImageLightboxSlideshowDelay = "image_lightbox.slideshow_delay"
|
||||
ImageLightboxDisplayMode = "image_lightbox.display_mode"
|
||||
ImageLightboxDisplayModeKey = "image_lightbox.display_mode"
|
||||
ImageLightboxScaleUp = "image_lightbox.scale_up"
|
||||
ImageLightboxResetZoomOnNav = "image_lightbox.reset_zoom_on_nav"
|
||||
ImageLightboxScrollMode = "image_lightbox.scroll_mode"
|
||||
ImageLightboxScrollModeKey = "image_lightbox.scroll_mode"
|
||||
ImageLightboxScrollAttemptsBeforeChange = "image_lightbox.scroll_attempts_before_change"
|
||||
|
||||
UI = "ui"
|
||||
|
|
@ -460,14 +461,27 @@ func (i *Instance) getStringMapString(key string) map[string]string {
|
|||
return i.viper(key).GetStringMapString(key)
|
||||
}
|
||||
|
||||
type StashConfig struct {
|
||||
Path string `json:"path"`
|
||||
ExcludeVideo bool `json:"excludeVideo"`
|
||||
ExcludeImage bool `json:"excludeImage"`
|
||||
}
|
||||
|
||||
// Stash configuration details
|
||||
type StashConfigInput struct {
|
||||
Path string `json:"path"`
|
||||
ExcludeVideo bool `json:"excludeVideo"`
|
||||
ExcludeImage bool `json:"excludeImage"`
|
||||
}
|
||||
|
||||
// GetStathPaths returns the configured stash library paths.
|
||||
// Works opposite to the usual case - it will return the override
|
||||
// value only if the main value is not set.
|
||||
func (i *Instance) GetStashPaths() []*models.StashConfig {
|
||||
func (i *Instance) GetStashPaths() []*StashConfig {
|
||||
i.RLock()
|
||||
defer i.RUnlock()
|
||||
|
||||
var ret []*models.StashConfig
|
||||
var ret []*StashConfig
|
||||
|
||||
v := i.main
|
||||
if !v.IsSet(Stash) {
|
||||
|
|
@ -479,7 +493,7 @@ func (i *Instance) GetStashPaths() []*models.StashConfig {
|
|||
ss := v.GetStringSlice(Stash)
|
||||
ret = nil
|
||||
for _, path := range ss {
|
||||
toAdd := &models.StashConfig{
|
||||
toAdd := &StashConfig{
|
||||
Path: path,
|
||||
}
|
||||
ret = append(ret, toAdd)
|
||||
|
|
@ -610,8 +624,8 @@ func (i *Instance) GetScraperExcludeTagPatterns() []string {
|
|||
return i.getStringSlice(ScraperExcludeTagPatterns)
|
||||
}
|
||||
|
||||
func (i *Instance) GetStashBoxes() models.StashBoxes {
|
||||
var boxes models.StashBoxes
|
||||
func (i *Instance) GetStashBoxes() []*models.StashBox {
|
||||
var boxes []*models.StashBox
|
||||
if err := i.unmarshalKey(StashBoxes, &boxes); err != nil {
|
||||
logger.Warnf("error in unmarshalkey: %v", err)
|
||||
}
|
||||
|
|
@ -797,7 +811,13 @@ func (i *Instance) ValidateCredentials(username string, password string) bool {
|
|||
|
||||
var stashBoxRe = regexp.MustCompile("^http.*graphql$")
|
||||
|
||||
func (i *Instance) ValidateStashBoxes(boxes []*models.StashBoxInput) error {
|
||||
type StashBoxInput struct {
|
||||
Endpoint string `json:"endpoint"`
|
||||
APIKey string `json:"api_key"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
func (i *Instance) ValidateStashBoxes(boxes []*StashBoxInput) error {
|
||||
isMulti := len(boxes) > 1
|
||||
|
||||
for _, box := range boxes {
|
||||
|
|
@ -933,18 +953,18 @@ func (i *Instance) getSlideshowDelay() int {
|
|||
return ret
|
||||
}
|
||||
|
||||
func (i *Instance) GetImageLightboxOptions() models.ConfigImageLightboxResult {
|
||||
func (i *Instance) GetImageLightboxOptions() ConfigImageLightboxResult {
|
||||
i.RLock()
|
||||
defer i.RUnlock()
|
||||
|
||||
delay := i.getSlideshowDelay()
|
||||
|
||||
ret := models.ConfigImageLightboxResult{
|
||||
ret := ConfigImageLightboxResult{
|
||||
SlideshowDelay: &delay,
|
||||
}
|
||||
|
||||
if v := i.viperWith(ImageLightboxDisplayMode); v != nil {
|
||||
mode := models.ImageLightboxDisplayMode(v.GetString(ImageLightboxDisplayMode))
|
||||
if v := i.viperWith(ImageLightboxDisplayModeKey); v != nil {
|
||||
mode := ImageLightboxDisplayMode(v.GetString(ImageLightboxDisplayModeKey))
|
||||
ret.DisplayMode = &mode
|
||||
}
|
||||
if v := i.viperWith(ImageLightboxScaleUp); v != nil {
|
||||
|
|
@ -955,8 +975,8 @@ func (i *Instance) GetImageLightboxOptions() models.ConfigImageLightboxResult {
|
|||
value := v.GetBool(ImageLightboxResetZoomOnNav)
|
||||
ret.ResetZoomOnNav = &value
|
||||
}
|
||||
if v := i.viperWith(ImageLightboxScrollMode); v != nil {
|
||||
mode := models.ImageLightboxScrollMode(v.GetString(ImageLightboxScrollMode))
|
||||
if v := i.viperWith(ImageLightboxScrollModeKey); v != nil {
|
||||
mode := ImageLightboxScrollMode(v.GetString(ImageLightboxScrollModeKey))
|
||||
ret.ScrollMode = &mode
|
||||
}
|
||||
if v := i.viperWith(ImageLightboxScrollAttemptsBeforeChange); v != nil {
|
||||
|
|
@ -966,8 +986,8 @@ func (i *Instance) GetImageLightboxOptions() models.ConfigImageLightboxResult {
|
|||
return ret
|
||||
}
|
||||
|
||||
func (i *Instance) GetDisableDropdownCreate() *models.ConfigDisableDropdownCreate {
|
||||
return &models.ConfigDisableDropdownCreate{
|
||||
func (i *Instance) GetDisableDropdownCreate() *ConfigDisableDropdownCreate {
|
||||
return &ConfigDisableDropdownCreate{
|
||||
Performer: i.getBool(DisableDropdownCreatePerformer),
|
||||
Studio: i.getBool(DisableDropdownCreateStudio),
|
||||
Tag: i.getBool(DisableDropdownCreateTag),
|
||||
|
|
@ -1056,13 +1076,13 @@ func (i *Instance) GetDeleteGeneratedDefault() bool {
|
|||
// GetDefaultIdentifySettings returns the default Identify task settings.
|
||||
// Returns nil if the settings could not be unmarshalled, or if it
|
||||
// has not been set.
|
||||
func (i *Instance) GetDefaultIdentifySettings() *models.IdentifyMetadataTaskOptions {
|
||||
func (i *Instance) GetDefaultIdentifySettings() *identify.Options {
|
||||
i.RLock()
|
||||
defer i.RUnlock()
|
||||
v := i.viper(DefaultIdentifySettings)
|
||||
|
||||
if v.IsSet(DefaultIdentifySettings) {
|
||||
var ret models.IdentifyMetadataTaskOptions
|
||||
var ret identify.Options
|
||||
if err := v.UnmarshalKey(DefaultIdentifySettings, &ret); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1075,13 +1095,13 @@ func (i *Instance) GetDefaultIdentifySettings() *models.IdentifyMetadataTaskOpti
|
|||
// GetDefaultScanSettings returns the default Scan task settings.
|
||||
// Returns nil if the settings could not be unmarshalled, or if it
|
||||
// has not been set.
|
||||
func (i *Instance) GetDefaultScanSettings() *models.ScanMetadataOptions {
|
||||
func (i *Instance) GetDefaultScanSettings() *ScanMetadataOptions {
|
||||
i.RLock()
|
||||
defer i.RUnlock()
|
||||
v := i.viper(DefaultScanSettings)
|
||||
|
||||
if v.IsSet(DefaultScanSettings) {
|
||||
var ret models.ScanMetadataOptions
|
||||
var ret ScanMetadataOptions
|
||||
if err := v.UnmarshalKey(DefaultScanSettings, &ret); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1094,13 +1114,13 @@ func (i *Instance) GetDefaultScanSettings() *models.ScanMetadataOptions {
|
|||
// GetDefaultAutoTagSettings returns the default Scan task settings.
|
||||
// Returns nil if the settings could not be unmarshalled, or if it
|
||||
// has not been set.
|
||||
func (i *Instance) GetDefaultAutoTagSettings() *models.AutoTagMetadataOptions {
|
||||
func (i *Instance) GetDefaultAutoTagSettings() *AutoTagMetadataOptions {
|
||||
i.RLock()
|
||||
defer i.RUnlock()
|
||||
v := i.viper(DefaultAutoTagSettings)
|
||||
|
||||
if v.IsSet(DefaultAutoTagSettings) {
|
||||
var ret models.AutoTagMetadataOptions
|
||||
var ret AutoTagMetadataOptions
|
||||
if err := v.UnmarshalKey(DefaultAutoTagSettings, &ret); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
27
internal/manager/config/tasks.go
Normal file
27
internal/manager/config/tasks.go
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
package config
|
||||
|
||||
type ScanMetadataOptions struct {
|
||||
// Set name, date, details from metadata (if present)
|
||||
UseFileMetadata bool `json:"useFileMetadata"`
|
||||
// Strip file extension from title
|
||||
StripFileExtension bool `json:"stripFileExtension"`
|
||||
// Generate previews during scan
|
||||
ScanGeneratePreviews bool `json:"scanGeneratePreviews"`
|
||||
// Generate image previews during scan
|
||||
ScanGenerateImagePreviews bool `json:"scanGenerateImagePreviews"`
|
||||
// Generate sprites during scan
|
||||
ScanGenerateSprites bool `json:"scanGenerateSprites"`
|
||||
// Generate phashes during scan
|
||||
ScanGeneratePhashes bool `json:"scanGeneratePhashes"`
|
||||
// Generate image thumbnails during scan
|
||||
ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"`
|
||||
}
|
||||
|
||||
type AutoTagMetadataOptions struct {
|
||||
// IDs of performers to tag files with, or "*" for all
|
||||
Performers []string `json:"performers"`
|
||||
// IDs of studios to tag files with, or "*" for all
|
||||
Studios []string `json:"studios"`
|
||||
// IDs of tags to tag files with, or "*" for all
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
106
internal/manager/config/ui.go
Normal file
106
internal/manager/config/ui.go
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type ConfigImageLightboxResult struct {
|
||||
SlideshowDelay *int `json:"slideshowDelay"`
|
||||
DisplayMode *ImageLightboxDisplayMode `json:"displayMode"`
|
||||
ScaleUp *bool `json:"scaleUp"`
|
||||
ResetZoomOnNav *bool `json:"resetZoomOnNav"`
|
||||
ScrollMode *ImageLightboxScrollMode `json:"scrollMode"`
|
||||
ScrollAttemptsBeforeChange int `json:"scrollAttemptsBeforeChange"`
|
||||
}
|
||||
|
||||
type ImageLightboxDisplayMode string
|
||||
|
||||
const (
|
||||
ImageLightboxDisplayModeOriginal ImageLightboxDisplayMode = "ORIGINAL"
|
||||
ImageLightboxDisplayModeFitXy ImageLightboxDisplayMode = "FIT_XY"
|
||||
ImageLightboxDisplayModeFitX ImageLightboxDisplayMode = "FIT_X"
|
||||
)
|
||||
|
||||
var AllImageLightboxDisplayMode = []ImageLightboxDisplayMode{
|
||||
ImageLightboxDisplayModeOriginal,
|
||||
ImageLightboxDisplayModeFitXy,
|
||||
ImageLightboxDisplayModeFitX,
|
||||
}
|
||||
|
||||
func (e ImageLightboxDisplayMode) IsValid() bool {
|
||||
switch e {
|
||||
case ImageLightboxDisplayModeOriginal, ImageLightboxDisplayModeFitXy, ImageLightboxDisplayModeFitX:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ImageLightboxDisplayMode) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ImageLightboxDisplayMode) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ImageLightboxDisplayMode(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ImageLightboxDisplayMode", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ImageLightboxDisplayMode) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type ImageLightboxScrollMode string
|
||||
|
||||
const (
|
||||
ImageLightboxScrollModeZoom ImageLightboxScrollMode = "ZOOM"
|
||||
ImageLightboxScrollModePanY ImageLightboxScrollMode = "PAN_Y"
|
||||
)
|
||||
|
||||
var AllImageLightboxScrollMode = []ImageLightboxScrollMode{
|
||||
ImageLightboxScrollModeZoom,
|
||||
ImageLightboxScrollModePanY,
|
||||
}
|
||||
|
||||
func (e ImageLightboxScrollMode) IsValid() bool {
|
||||
switch e {
|
||||
case ImageLightboxScrollModeZoom, ImageLightboxScrollModePanY:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ImageLightboxScrollMode) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ImageLightboxScrollMode) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ImageLightboxScrollMode(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ImageLightboxScrollMode", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ImageLightboxScrollMode) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type ConfigDisableDropdownCreate struct {
|
||||
Performer bool `json:"performer"`
|
||||
Tag bool `json:"tag"`
|
||||
Studio bool `json:"studio"`
|
||||
}
|
||||
|
|
@ -16,6 +16,32 @@ import (
|
|||
"github.com/stashapp/stash/pkg/tag"
|
||||
)
|
||||
|
||||
type SceneParserInput struct {
|
||||
IgnoreWords []string `json:"ignoreWords"`
|
||||
WhitespaceCharacters *string `json:"whitespaceCharacters"`
|
||||
CapitalizeTitle *bool `json:"capitalizeTitle"`
|
||||
IgnoreOrganized *bool `json:"ignoreOrganized"`
|
||||
}
|
||||
|
||||
type SceneParserResult struct {
|
||||
Scene *models.Scene `json:"scene"`
|
||||
Title *string `json:"title"`
|
||||
Details *string `json:"details"`
|
||||
URL *string `json:"url"`
|
||||
Date *string `json:"date"`
|
||||
Rating *int `json:"rating"`
|
||||
StudioID *string `json:"studio_id"`
|
||||
GalleryIds []string `json:"gallery_ids"`
|
||||
PerformerIds []string `json:"performer_ids"`
|
||||
Movies []*SceneMovieID `json:"movies"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
}
|
||||
|
||||
type SceneMovieID struct {
|
||||
MovieID string `json:"movie_id"`
|
||||
SceneIndex *string `json:"scene_index"`
|
||||
}
|
||||
|
||||
type parserField struct {
|
||||
field string
|
||||
fieldRegex *regexp.Regexp
|
||||
|
|
@ -402,7 +428,7 @@ func (m parseMapper) parse(scene *models.Scene) *sceneHolder {
|
|||
|
||||
type SceneFilenameParser struct {
|
||||
Pattern string
|
||||
ParserInput models.SceneParserInput
|
||||
ParserInput SceneParserInput
|
||||
Filter *models.FindFilterType
|
||||
whitespaceRE *regexp.Regexp
|
||||
performerCache map[string]*models.Performer
|
||||
|
|
@ -411,7 +437,7 @@ type SceneFilenameParser struct {
|
|||
tagCache map[string]*models.Tag
|
||||
}
|
||||
|
||||
func NewSceneFilenameParser(filter *models.FindFilterType, config models.SceneParserInput) *SceneFilenameParser {
|
||||
func NewSceneFilenameParser(filter *models.FindFilterType, config SceneParserInput) *SceneFilenameParser {
|
||||
p := &SceneFilenameParser{
|
||||
Pattern: *filter.Q,
|
||||
ParserInput: config,
|
||||
|
|
@ -444,7 +470,7 @@ func (p *SceneFilenameParser) initWhiteSpaceRegex() {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) Parse(repo models.ReaderRepository) ([]*models.SceneParserResult, int, error) {
|
||||
func (p *SceneFilenameParser) Parse(repo models.ReaderRepository) ([]*SceneParserResult, int, error) {
|
||||
// perform the query to find the scenes
|
||||
mapper, err := newParseMapper(p.Pattern, p.ParserInput.IgnoreWords)
|
||||
|
||||
|
|
@ -476,13 +502,13 @@ func (p *SceneFilenameParser) Parse(repo models.ReaderRepository) ([]*models.Sce
|
|||
return ret, total, nil
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) parseScenes(repo models.ReaderRepository, scenes []*models.Scene, mapper *parseMapper) []*models.SceneParserResult {
|
||||
var ret []*models.SceneParserResult
|
||||
func (p *SceneFilenameParser) parseScenes(repo models.ReaderRepository, scenes []*models.Scene, mapper *parseMapper) []*SceneParserResult {
|
||||
var ret []*SceneParserResult
|
||||
for _, scene := range scenes {
|
||||
sceneHolder := mapper.parse(scene)
|
||||
|
||||
if sceneHolder != nil {
|
||||
r := &models.SceneParserResult{
|
||||
r := &SceneParserResult{
|
||||
Scene: scene,
|
||||
}
|
||||
p.setParserResult(repo, *sceneHolder, r)
|
||||
|
|
@ -589,7 +615,7 @@ func (p *SceneFilenameParser) queryTag(qb models.TagReader, tagName string) *mod
|
|||
return ret
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setPerformers(qb models.PerformerReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setPerformers(qb models.PerformerReader, h sceneHolder, result *SceneParserResult) {
|
||||
// query for each performer
|
||||
performersSet := make(map[int]bool)
|
||||
for _, performerName := range h.performers {
|
||||
|
|
@ -605,7 +631,7 @@ func (p *SceneFilenameParser) setPerformers(qb models.PerformerReader, h sceneHo
|
|||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setTags(qb models.TagReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setTags(qb models.TagReader, h sceneHolder, result *SceneParserResult) {
|
||||
// query for each performer
|
||||
tagsSet := make(map[int]bool)
|
||||
for _, tagName := range h.tags {
|
||||
|
|
@ -621,7 +647,7 @@ func (p *SceneFilenameParser) setTags(qb models.TagReader, h sceneHolder, result
|
|||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setStudio(qb models.StudioReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setStudio(qb models.StudioReader, h sceneHolder, result *SceneParserResult) {
|
||||
// query for each performer
|
||||
if h.studio != "" {
|
||||
studio := p.queryStudio(qb, h.studio)
|
||||
|
|
@ -632,7 +658,7 @@ func (p *SceneFilenameParser) setStudio(qb models.StudioReader, h sceneHolder, r
|
|||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setMovies(qb models.MovieReader, h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setMovies(qb models.MovieReader, h sceneHolder, result *SceneParserResult) {
|
||||
// query for each movie
|
||||
moviesSet := make(map[int]bool)
|
||||
for _, movieName := range h.movies {
|
||||
|
|
@ -640,7 +666,7 @@ func (p *SceneFilenameParser) setMovies(qb models.MovieReader, h sceneHolder, re
|
|||
movie := p.queryMovie(qb, movieName)
|
||||
if movie != nil {
|
||||
if _, found := moviesSet[movie.ID]; !found {
|
||||
result.Movies = append(result.Movies, &models.SceneMovieID{
|
||||
result.Movies = append(result.Movies, &SceneMovieID{
|
||||
MovieID: strconv.Itoa(movie.ID),
|
||||
})
|
||||
moviesSet[movie.ID] = true
|
||||
|
|
@ -650,7 +676,7 @@ func (p *SceneFilenameParser) setMovies(qb models.MovieReader, h sceneHolder, re
|
|||
}
|
||||
}
|
||||
|
||||
func (p *SceneFilenameParser) setParserResult(repo models.ReaderRepository, h sceneHolder, result *models.SceneParserResult) {
|
||||
func (p *SceneFilenameParser) setParserResult(repo models.ReaderRepository, h sceneHolder, result *SceneParserResult) {
|
||||
if h.result.Title.Valid {
|
||||
title := h.result.Title.String
|
||||
title = p.replaceWhitespaceCharacters(title)
|
||||
|
|
|
|||
|
|
@ -2,11 +2,55 @@ package manager
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type ImportDuplicateEnum string
|
||||
|
||||
const (
|
||||
ImportDuplicateEnumIgnore ImportDuplicateEnum = "IGNORE"
|
||||
ImportDuplicateEnumOverwrite ImportDuplicateEnum = "OVERWRITE"
|
||||
ImportDuplicateEnumFail ImportDuplicateEnum = "FAIL"
|
||||
)
|
||||
|
||||
var AllImportDuplicateEnum = []ImportDuplicateEnum{
|
||||
ImportDuplicateEnumIgnore,
|
||||
ImportDuplicateEnumOverwrite,
|
||||
ImportDuplicateEnumFail,
|
||||
}
|
||||
|
||||
func (e ImportDuplicateEnum) IsValid() bool {
|
||||
switch e {
|
||||
case ImportDuplicateEnumIgnore, ImportDuplicateEnumOverwrite, ImportDuplicateEnumFail:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ImportDuplicateEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ImportDuplicateEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ImportDuplicateEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ImportDuplicateEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ImportDuplicateEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type importer interface {
|
||||
PreImport() error
|
||||
PostImport(id int) error
|
||||
|
|
@ -16,7 +60,7 @@ type importer interface {
|
|||
Update(id int) error
|
||||
}
|
||||
|
||||
func performImport(i importer, duplicateBehaviour models.ImportDuplicateEnum) error {
|
||||
func performImport(i importer, duplicateBehaviour ImportDuplicateEnum) error {
|
||||
if err := i.PreImport(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -31,9 +75,9 @@ func performImport(i importer, duplicateBehaviour models.ImportDuplicateEnum) er
|
|||
var id int
|
||||
|
||||
if existing != nil {
|
||||
if duplicateBehaviour == models.ImportDuplicateEnumFail {
|
||||
if duplicateBehaviour == ImportDuplicateEnumFail {
|
||||
return fmt.Errorf("existing object with name '%s'", name)
|
||||
} else if duplicateBehaviour == models.ImportDuplicateEnumIgnore {
|
||||
} else if duplicateBehaviour == ImportDuplicateEnumIgnore {
|
||||
logger.Info("Skipping existing object")
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,9 +4,11 @@ import (
|
|||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime/pprof"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
|
@ -30,6 +32,67 @@ import (
|
|||
"github.com/stashapp/stash/ui"
|
||||
)
|
||||
|
||||
type SystemStatus struct {
|
||||
DatabaseSchema *int `json:"databaseSchema"`
|
||||
DatabasePath *string `json:"databasePath"`
|
||||
ConfigPath *string `json:"configPath"`
|
||||
AppSchema int `json:"appSchema"`
|
||||
Status SystemStatusEnum `json:"status"`
|
||||
}
|
||||
|
||||
type SystemStatusEnum string
|
||||
|
||||
const (
|
||||
SystemStatusEnumSetup SystemStatusEnum = "SETUP"
|
||||
SystemStatusEnumNeedsMigration SystemStatusEnum = "NEEDS_MIGRATION"
|
||||
SystemStatusEnumOk SystemStatusEnum = "OK"
|
||||
)
|
||||
|
||||
var AllSystemStatusEnum = []SystemStatusEnum{
|
||||
SystemStatusEnumSetup,
|
||||
SystemStatusEnumNeedsMigration,
|
||||
SystemStatusEnumOk,
|
||||
}
|
||||
|
||||
func (e SystemStatusEnum) IsValid() bool {
|
||||
switch e {
|
||||
case SystemStatusEnumSetup, SystemStatusEnumNeedsMigration, SystemStatusEnumOk:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e SystemStatusEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *SystemStatusEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = SystemStatusEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid SystemStatusEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e SystemStatusEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type SetupInput struct {
|
||||
// Empty to indicate $HOME/.stash/config.yml default
|
||||
ConfigLocation string `json:"configLocation"`
|
||||
Stashes []*config.StashConfigInput `json:"stashes"`
|
||||
// Empty to indicate default
|
||||
DatabaseFile string `json:"databaseFile"`
|
||||
// Empty to indicate default
|
||||
GeneratedLocation string `json:"generatedLocation"`
|
||||
}
|
||||
|
||||
type Manager struct {
|
||||
Config *config.Instance
|
||||
Logger *log.Logger
|
||||
|
|
@ -354,7 +417,7 @@ func (s *Manager) RefreshScraperCache() {
|
|||
s.ScraperCache = s.initScraperCache()
|
||||
}
|
||||
|
||||
func setSetupDefaults(input *models.SetupInput) {
|
||||
func setSetupDefaults(input *SetupInput) {
|
||||
if input.ConfigLocation == "" {
|
||||
input.ConfigLocation = filepath.Join(fsutil.GetHomeDirectory(), ".stash", "config.yml")
|
||||
}
|
||||
|
|
@ -369,7 +432,7 @@ func setSetupDefaults(input *models.SetupInput) {
|
|||
}
|
||||
}
|
||||
|
||||
func (s *Manager) Setup(ctx context.Context, input models.SetupInput) error {
|
||||
func (s *Manager) Setup(ctx context.Context, input SetupInput) error {
|
||||
setSetupDefaults(&input)
|
||||
c := s.Config
|
||||
|
||||
|
|
@ -433,7 +496,11 @@ func (s *Manager) validateFFMPEG() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *Manager) Migrate(ctx context.Context, input models.MigrateInput) error {
|
||||
type MigrateInput struct {
|
||||
BackupPath string `json:"backupPath"`
|
||||
}
|
||||
|
||||
func (s *Manager) Migrate(ctx context.Context, input MigrateInput) error {
|
||||
// always backup so that we can roll back to the previous version if
|
||||
// migration fails
|
||||
backupPath := input.BackupPath
|
||||
|
|
@ -473,20 +540,20 @@ func (s *Manager) Migrate(ctx context.Context, input models.MigrateInput) error
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *Manager) GetSystemStatus() *models.SystemStatus {
|
||||
status := models.SystemStatusEnumOk
|
||||
func (s *Manager) GetSystemStatus() *SystemStatus {
|
||||
status := SystemStatusEnumOk
|
||||
dbSchema := int(database.Version())
|
||||
dbPath := database.DatabasePath()
|
||||
appSchema := int(database.AppSchemaVersion())
|
||||
configFile := s.Config.GetConfigFile()
|
||||
|
||||
if s.Config.IsNewSystem() {
|
||||
status = models.SystemStatusEnumSetup
|
||||
status = SystemStatusEnumSetup
|
||||
} else if dbSchema < appSchema {
|
||||
status = models.SystemStatusEnumNeedsMigration
|
||||
status = SystemStatusEnumNeedsMigration
|
||||
}
|
||||
|
||||
return &models.SystemStatus{
|
||||
return &SystemStatus{
|
||||
DatabaseSchema: &dbSchema,
|
||||
DatabasePath: &dbPath,
|
||||
AppSchema: appSchema,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"fmt"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
|
|
@ -34,12 +35,12 @@ func isImage(pathname string) bool {
|
|||
return fsutil.MatchExtension(pathname, imgExt)
|
||||
}
|
||||
|
||||
func getScanPaths(inputPaths []string) []*models.StashConfig {
|
||||
func getScanPaths(inputPaths []string) []*config.StashConfig {
|
||||
if len(inputPaths) == 0 {
|
||||
return config.GetInstance().GetStashPaths()
|
||||
}
|
||||
|
||||
var ret []*models.StashConfig
|
||||
var ret []*config.StashConfig
|
||||
for _, p := range inputPaths {
|
||||
s := getStashFromDirPath(p)
|
||||
if s == nil {
|
||||
|
|
@ -62,7 +63,22 @@ func (s *Manager) ScanSubscribe(ctx context.Context) <-chan bool {
|
|||
return s.scanSubs.subscribe(ctx)
|
||||
}
|
||||
|
||||
func (s *Manager) Scan(ctx context.Context, input models.ScanMetadataInput) (int, error) {
|
||||
type ScanMetadataInput struct {
|
||||
Paths []string `json:"paths"`
|
||||
|
||||
config.ScanMetadataOptions
|
||||
|
||||
// Filter options for the scan
|
||||
Filter *ScanMetaDataFilterInput `json:"filter"`
|
||||
}
|
||||
|
||||
// Filter options for meta data scannning
|
||||
type ScanMetaDataFilterInput struct {
|
||||
// If set, files with a modification time before this time point are ignored by the scan
|
||||
MinModTime *time.Time `json:"minModTime"`
|
||||
}
|
||||
|
||||
func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error) {
|
||||
if err := s.validateFFMPEG(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
|
@ -88,7 +104,7 @@ func (s *Manager) Import(ctx context.Context) (int, error) {
|
|||
txnManager: s.TxnManager,
|
||||
BaseDir: metadataPath,
|
||||
Reset: true,
|
||||
DuplicateBehaviour: models.ImportDuplicateEnumFail,
|
||||
DuplicateBehaviour: ImportDuplicateEnumFail,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
fileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
|
||||
}
|
||||
|
|
@ -131,7 +147,7 @@ func (s *Manager) RunSingleTask(ctx context.Context, t Task) int {
|
|||
return s.JobManager.Add(ctx, t.GetDescription(), j)
|
||||
}
|
||||
|
||||
func (s *Manager) Generate(ctx context.Context, input models.GenerateMetadataInput) (int, error) {
|
||||
func (s *Manager) Generate(ctx context.Context, input GenerateMetadataInput) (int, error) {
|
||||
if err := s.validateFFMPEG(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
|
@ -193,7 +209,18 @@ func (s *Manager) generateScreenshot(ctx context.Context, sceneId string, at *fl
|
|||
return s.JobManager.Add(ctx, fmt.Sprintf("Generating screenshot for scene id %s", sceneId), j)
|
||||
}
|
||||
|
||||
func (s *Manager) AutoTag(ctx context.Context, input models.AutoTagMetadataInput) int {
|
||||
type AutoTagMetadataInput struct {
|
||||
// Paths to tag, null for all files
|
||||
Paths []string `json:"paths"`
|
||||
// IDs of performers to tag files with, or "*" for all
|
||||
Performers []string `json:"performers"`
|
||||
// IDs of studios to tag files with, or "*" for all
|
||||
Studios []string `json:"studios"`
|
||||
// IDs of tags to tag files with, or "*" for all
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
func (s *Manager) AutoTag(ctx context.Context, input AutoTagMetadataInput) int {
|
||||
j := autoTagJob{
|
||||
txnManager: s.TxnManager,
|
||||
input: input,
|
||||
|
|
@ -202,7 +229,13 @@ func (s *Manager) AutoTag(ctx context.Context, input models.AutoTagMetadataInput
|
|||
return s.JobManager.Add(ctx, "Auto-tagging...", &j)
|
||||
}
|
||||
|
||||
func (s *Manager) Clean(ctx context.Context, input models.CleanMetadataInput) int {
|
||||
type CleanMetadataInput struct {
|
||||
Paths []string `json:"paths"`
|
||||
// Do a dry run. Don't delete any files
|
||||
DryRun bool `json:"dryRun"`
|
||||
}
|
||||
|
||||
func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
|
||||
j := cleanJob{
|
||||
txnManager: s.TxnManager,
|
||||
input: input,
|
||||
|
|
@ -260,7 +293,21 @@ func (s *Manager) MigrateHash(ctx context.Context) int {
|
|||
return s.JobManager.Add(ctx, "Migrating scene hashes...", j)
|
||||
}
|
||||
|
||||
func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, input models.StashBoxBatchPerformerTagInput) int {
|
||||
// If neither performer_ids nor performer_names are set, tag all performers
|
||||
type StashBoxBatchPerformerTagInput struct {
|
||||
// Stash endpoint to use for the performer tagging
|
||||
Endpoint int `json:"endpoint"`
|
||||
// Fields to exclude when executing the performer tagging
|
||||
ExcludeFields []string `json:"exclude_fields"`
|
||||
// Refresh performers already tagged by StashBox if true. Only tag performers with no StashBox tagging if false
|
||||
Refresh bool `json:"refresh"`
|
||||
// If set, only tag these performer ids
|
||||
PerformerIds []string `json:"performer_ids"`
|
||||
// If set, only tag these performer names
|
||||
PerformerNames []string `json:"performer_names"`
|
||||
}
|
||||
|
||||
func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, input StashBoxBatchPerformerTagInput) int {
|
||||
j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) {
|
||||
logger.Infof("Initiating stash-box batch performer tag")
|
||||
|
||||
|
|
|
|||
|
|
@ -50,20 +50,26 @@ func includeSceneStreamPath(scene *models.Scene, streamingResolution models.Stre
|
|||
return int64(maxStreamingResolution.GetMinResolution()) >= minResolution
|
||||
}
|
||||
|
||||
func makeStreamEndpoint(streamURL string, streamingResolution models.StreamingResolutionEnum, mimeType, label string) *models.SceneStreamEndpoint {
|
||||
return &models.SceneStreamEndpoint{
|
||||
type SceneStreamEndpoint struct {
|
||||
URL string `json:"url"`
|
||||
MimeType *string `json:"mime_type"`
|
||||
Label *string `json:"label"`
|
||||
}
|
||||
|
||||
func makeStreamEndpoint(streamURL string, streamingResolution models.StreamingResolutionEnum, mimeType, label string) *SceneStreamEndpoint {
|
||||
return &SceneStreamEndpoint{
|
||||
URL: fmt.Sprintf("%s?resolution=%s", streamURL, streamingResolution.String()),
|
||||
MimeType: &mimeType,
|
||||
Label: &label,
|
||||
}
|
||||
}
|
||||
|
||||
func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreamingTranscodeSize models.StreamingResolutionEnum) ([]*models.SceneStreamEndpoint, error) {
|
||||
func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreamingTranscodeSize models.StreamingResolutionEnum) ([]*SceneStreamEndpoint, error) {
|
||||
if scene == nil {
|
||||
return nil, fmt.Errorf("nil scene")
|
||||
}
|
||||
|
||||
var ret []*models.SceneStreamEndpoint
|
||||
var ret []*SceneStreamEndpoint
|
||||
mimeWebm := ffmpeg.MimeWebm
|
||||
mimeHLS := ffmpeg.MimeHLS
|
||||
mimeMp4 := ffmpeg.MimeMp4
|
||||
|
|
@ -82,7 +88,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
|||
|
||||
if HasTranscode(scene, config.GetInstance().GetVideoFileNamingAlgorithm()) || ffmpeg.IsValidAudioForContainer(audioCodec, container) {
|
||||
label := "Direct stream"
|
||||
ret = append(ret, &models.SceneStreamEndpoint{
|
||||
ret = append(ret, &SceneStreamEndpoint{
|
||||
URL: directStreamURL,
|
||||
MimeType: &mimeMp4,
|
||||
Label: &label,
|
||||
|
|
@ -92,7 +98,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
|||
// only add mkv stream endpoint if the scene container is an mkv already
|
||||
if container == ffmpeg.Matroska {
|
||||
label := "mkv"
|
||||
ret = append(ret, &models.SceneStreamEndpoint{
|
||||
ret = append(ret, &SceneStreamEndpoint{
|
||||
URL: directStreamURL + ".mkv",
|
||||
// set mkv to mp4 to trick the client, since many clients won't try mkv
|
||||
MimeType: &mimeMp4,
|
||||
|
|
@ -115,8 +121,8 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
|||
mp4LabelStandard := "MP4 Standard (480p)" // "STANDARD"
|
||||
mp4LabelLow := "MP4 Low (240p)" // "LOW"
|
||||
|
||||
var webmStreams []*models.SceneStreamEndpoint
|
||||
var mp4Streams []*models.SceneStreamEndpoint
|
||||
var webmStreams []*SceneStreamEndpoint
|
||||
var mp4Streams []*SceneStreamEndpoint
|
||||
|
||||
webmURL := directStreamURL + ".webm"
|
||||
mp4URL := directStreamURL + ".mp4"
|
||||
|
|
@ -149,7 +155,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
|||
ret = append(ret, webmStreams...)
|
||||
ret = append(ret, mp4Streams...)
|
||||
|
||||
defaultStreams := []*models.SceneStreamEndpoint{
|
||||
defaultStreams := []*SceneStreamEndpoint{
|
||||
{
|
||||
URL: directStreamURL + ".webm",
|
||||
MimeType: &mimeWebm,
|
||||
|
|
@ -159,7 +165,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
|
|||
|
||||
ret = append(ret, defaultStreams...)
|
||||
|
||||
hls := models.SceneStreamEndpoint{
|
||||
hls := SceneStreamEndpoint{
|
||||
URL: directStreamURL + ".m3u8",
|
||||
MimeType: &mimeHLS,
|
||||
Label: &labelHLS,
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import (
|
|||
|
||||
type autoTagJob struct {
|
||||
txnManager models.TransactionManager
|
||||
input models.AutoTagMetadataInput
|
||||
input AutoTagMetadataInput
|
||||
|
||||
cache match.Cache
|
||||
}
|
||||
|
|
@ -40,7 +40,7 @@ func (j *autoTagJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||
logger.Infof("Finished autotag after %s", time.Since(begin).String())
|
||||
}
|
||||
|
||||
func (j *autoTagJob) isFileBasedAutoTag(input models.AutoTagMetadataInput) bool {
|
||||
func (j *autoTagJob) isFileBasedAutoTag(input AutoTagMetadataInput) bool {
|
||||
const wildcard = "*"
|
||||
performerIds := input.Performers
|
||||
studioIds := input.Studios
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import (
|
|||
|
||||
type cleanJob struct {
|
||||
txnManager models.TransactionManager
|
||||
input models.CleanMetadataInput
|
||||
input CleanMetadataInput
|
||||
scanSubs *subscriptionManager
|
||||
}
|
||||
|
||||
|
|
@ -488,7 +488,7 @@ func (j *cleanJob) deleteImage(ctx context.Context, imageID int) {
|
|||
}, nil)
|
||||
}
|
||||
|
||||
func getStashFromPath(pathToCheck string) *models.StashConfig {
|
||||
func getStashFromPath(pathToCheck string) *config.StashConfig {
|
||||
for _, s := range config.GetInstance().GetStashPaths() {
|
||||
if fsutil.IsPathInDir(s.Path, filepath.Dir(pathToCheck)) {
|
||||
return s
|
||||
|
|
@ -497,7 +497,7 @@ func getStashFromPath(pathToCheck string) *models.StashConfig {
|
|||
return nil
|
||||
}
|
||||
|
||||
func getStashFromDirPath(pathToCheck string) *models.StashConfig {
|
||||
func getStashFromDirPath(pathToCheck string) *config.StashConfig {
|
||||
for _, s := range config.GetInstance().GetStashPaths() {
|
||||
if fsutil.IsPathInDir(s.Path, pathToCheck) {
|
||||
return s
|
||||
|
|
|
|||
|
|
@ -54,12 +54,28 @@ type ExportTask struct {
|
|||
DownloadHash string
|
||||
}
|
||||
|
||||
type ExportObjectTypeInput struct {
|
||||
Ids []string `json:"ids"`
|
||||
All *bool `json:"all"`
|
||||
}
|
||||
|
||||
type ExportObjectsInput struct {
|
||||
Scenes *ExportObjectTypeInput `json:"scenes"`
|
||||
Images *ExportObjectTypeInput `json:"images"`
|
||||
Studios *ExportObjectTypeInput `json:"studios"`
|
||||
Performers *ExportObjectTypeInput `json:"performers"`
|
||||
Tags *ExportObjectTypeInput `json:"tags"`
|
||||
Movies *ExportObjectTypeInput `json:"movies"`
|
||||
Galleries *ExportObjectTypeInput `json:"galleries"`
|
||||
IncludeDependencies *bool `json:"includeDependencies"`
|
||||
}
|
||||
|
||||
type exportSpec struct {
|
||||
IDs []int
|
||||
all bool
|
||||
}
|
||||
|
||||
func newExportSpec(input *models.ExportObjectTypeInput) *exportSpec {
|
||||
func newExportSpec(input *ExportObjectTypeInput) *exportSpec {
|
||||
if input == nil {
|
||||
return &exportSpec{}
|
||||
}
|
||||
|
|
@ -77,7 +93,7 @@ func newExportSpec(input *models.ExportObjectTypeInput) *exportSpec {
|
|||
return ret
|
||||
}
|
||||
|
||||
func CreateExportTask(a models.HashAlgorithm, input models.ExportObjectsInput) *ExportTask {
|
||||
func CreateExportTask(a models.HashAlgorithm, input ExportObjectsInput) *ExportTask {
|
||||
includeDeps := false
|
||||
if input.IncludeDependencies != nil {
|
||||
includeDeps = *input.IncludeDependencies
|
||||
|
|
|
|||
|
|
@ -17,11 +17,45 @@ import (
|
|||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
type GenerateMetadataInput struct {
|
||||
Sprites *bool `json:"sprites"`
|
||||
Previews *bool `json:"previews"`
|
||||
ImagePreviews *bool `json:"imagePreviews"`
|
||||
PreviewOptions *GeneratePreviewOptionsInput `json:"previewOptions"`
|
||||
Markers *bool `json:"markers"`
|
||||
MarkerImagePreviews *bool `json:"markerImagePreviews"`
|
||||
MarkerScreenshots *bool `json:"markerScreenshots"`
|
||||
Transcodes *bool `json:"transcodes"`
|
||||
// Generate transcodes even if not required
|
||||
ForceTranscodes *bool `json:"forceTranscodes"`
|
||||
Phashes *bool `json:"phashes"`
|
||||
InteractiveHeatmapsSpeeds *bool `json:"interactiveHeatmapsSpeeds"`
|
||||
// scene ids to generate for
|
||||
SceneIDs []string `json:"sceneIDs"`
|
||||
// marker ids to generate for
|
||||
MarkerIDs []string `json:"markerIDs"`
|
||||
// overwrite existing media
|
||||
Overwrite *bool `json:"overwrite"`
|
||||
}
|
||||
|
||||
type GeneratePreviewOptionsInput struct {
|
||||
// Number of segments in a preview file
|
||||
PreviewSegments *int `json:"previewSegments"`
|
||||
// Preview segment duration, in seconds
|
||||
PreviewSegmentDuration *float64 `json:"previewSegmentDuration"`
|
||||
// Duration of start of video to exclude when generating previews
|
||||
PreviewExcludeStart *string `json:"previewExcludeStart"`
|
||||
// Duration of end of video to exclude when generating previews
|
||||
PreviewExcludeEnd *string `json:"previewExcludeEnd"`
|
||||
// Preset when generating preview
|
||||
PreviewPreset *models.PreviewPreset `json:"previewPreset"`
|
||||
}
|
||||
|
||||
const generateQueueSize = 200000
|
||||
|
||||
type GenerateJob struct {
|
||||
txnManager models.TransactionManager
|
||||
input models.GenerateMetadataInput
|
||||
input GenerateMetadataInput
|
||||
|
||||
overwrite bool
|
||||
fileNamingAlgo models.HashAlgorithm
|
||||
|
|
@ -194,7 +228,7 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que
|
|||
return totals
|
||||
}
|
||||
|
||||
func getGeneratePreviewOptions(optionsInput models.GeneratePreviewOptionsInput) generate.PreviewOptions {
|
||||
func getGeneratePreviewOptions(optionsInput GeneratePreviewOptionsInput) generate.PreviewOptions {
|
||||
config := config.GetInstance()
|
||||
|
||||
ret := generate.PreviewOptions{
|
||||
|
|
@ -246,7 +280,7 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
|
|||
|
||||
generatePreviewOptions := j.input.PreviewOptions
|
||||
if generatePreviewOptions == nil {
|
||||
generatePreviewOptions = &models.GeneratePreviewOptionsInput{}
|
||||
generatePreviewOptions = &GeneratePreviewOptionsInput{}
|
||||
}
|
||||
options := getGeneratePreviewOptions(*generatePreviewOptions)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import (
|
|||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/internal/identify"
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
|
|
@ -20,13 +21,13 @@ var ErrInput = errors.New("invalid request input")
|
|||
type IdentifyJob struct {
|
||||
txnManager models.TransactionManager
|
||||
postHookExecutor identify.SceneUpdatePostHookExecutor
|
||||
input models.IdentifyMetadataInput
|
||||
input identify.Options
|
||||
|
||||
stashBoxes models.StashBoxes
|
||||
stashBoxes []*models.StashBox
|
||||
progress *job.Progress
|
||||
}
|
||||
|
||||
func CreateIdentifyJob(input models.IdentifyMetadataInput) *IdentifyJob {
|
||||
func CreateIdentifyJob(input identify.Options) *IdentifyJob {
|
||||
return &IdentifyJob{
|
||||
txnManager: instance.TxnManager,
|
||||
postHookExecutor: instance.PluginCache,
|
||||
|
|
@ -192,7 +193,7 @@ func (j *IdentifyJob) getSources() ([]identify.ScraperSource, error) {
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (j *IdentifyJob) getStashBox(src *models.ScraperSourceInput) (*models.StashBox, error) {
|
||||
func (j *IdentifyJob) getStashBox(src *scraper.Source) (*models.StashBox, error) {
|
||||
if src.ScraperID != nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
|
@ -202,7 +203,38 @@ func (j *IdentifyJob) getStashBox(src *models.ScraperSourceInput) (*models.Stash
|
|||
return nil, fmt.Errorf("%w: stash_box_index or stash_box_endpoint or scraper_id must be set", ErrInput)
|
||||
}
|
||||
|
||||
return j.stashBoxes.ResolveStashBox(*src)
|
||||
return resolveStashBox(j.stashBoxes, *src)
|
||||
}
|
||||
|
||||
func resolveStashBox(sb []*models.StashBox, source scraper.Source) (*models.StashBox, error) {
|
||||
if source.StashBoxIndex != nil {
|
||||
index := source.StashBoxIndex
|
||||
if *index < 0 || *index >= len(sb) {
|
||||
return nil, fmt.Errorf("%w: invalid stash_box_index: %d", models.ErrScraperSource, index)
|
||||
}
|
||||
|
||||
return sb[*index], nil
|
||||
}
|
||||
|
||||
if source.StashBoxEndpoint != nil {
|
||||
var ret *models.StashBox
|
||||
endpoint := *source.StashBoxEndpoint
|
||||
for _, b := range sb {
|
||||
if strings.EqualFold(endpoint, b.Endpoint) {
|
||||
ret = b
|
||||
}
|
||||
}
|
||||
|
||||
if ret == nil {
|
||||
return nil, fmt.Errorf(`%w: stash-box with endpoint "%s"`, models.ErrNotFound, endpoint)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// neither stash-box inputs were provided, so assume it is a scraper
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
type stashboxSource struct {
|
||||
|
|
@ -210,7 +242,7 @@ type stashboxSource struct {
|
|||
endpoint string
|
||||
}
|
||||
|
||||
func (s stashboxSource) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
|
||||
func (s stashboxSource) ScrapeScene(ctx context.Context, sceneID int) (*scraper.ScrapedScene, error) {
|
||||
results, err := s.FindStashBoxSceneByFingerprints(ctx, sceneID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error querying stash-box using scene ID %d: %w", sceneID, err)
|
||||
|
|
@ -232,8 +264,8 @@ type scraperSource struct {
|
|||
scraperID string
|
||||
}
|
||||
|
||||
func (s scraperSource) ScrapeScene(ctx context.Context, sceneID int) (*models.ScrapedScene, error) {
|
||||
content, err := s.cache.ScrapeID(ctx, s.scraperID, sceneID, models.ScrapeContentTypeScene)
|
||||
func (s scraperSource) ScrapeScene(ctx context.Context, sceneID int) (*scraper.ScrapedScene, error) {
|
||||
content, err := s.cache.ScrapeID(ctx, s.scraperID, sceneID, scraper.ScrapeContentTypeScene)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -243,7 +275,7 @@ func (s scraperSource) ScrapeScene(ctx context.Context, sceneID int) (*models.Sc
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
if scene, ok := content.(models.ScrapedScene); ok {
|
||||
if scene, ok := content.(scraper.ScrapedScene); ok {
|
||||
return &scene, nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import (
|
|||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/fsutil"
|
||||
|
|
@ -35,7 +36,7 @@ type ImportTask struct {
|
|||
BaseDir string
|
||||
TmpZip string
|
||||
Reset bool
|
||||
DuplicateBehaviour models.ImportDuplicateEnum
|
||||
DuplicateBehaviour ImportDuplicateEnum
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
mappings *jsonschema.Mappings
|
||||
|
|
@ -43,7 +44,13 @@ type ImportTask struct {
|
|||
fileNamingAlgorithm models.HashAlgorithm
|
||||
}
|
||||
|
||||
func CreateImportTask(a models.HashAlgorithm, input models.ImportObjectsInput) (*ImportTask, error) {
|
||||
type ImportObjectsInput struct {
|
||||
File graphql.Upload `json:"file"`
|
||||
DuplicateBehaviour ImportDuplicateEnum `json:"duplicateBehaviour"`
|
||||
MissingRefBehaviour models.ImportMissingRefEnum `json:"missingRefBehaviour"`
|
||||
}
|
||||
|
||||
func CreateImportTask(a models.HashAlgorithm, input ImportObjectsInput) (*ImportTask, error) {
|
||||
baseDir, err := instance.Paths.Generated.TempDir("import")
|
||||
if err != nil {
|
||||
logger.Errorf("error creating temporary directory for import: %s", err.Error())
|
||||
|
|
@ -101,7 +108,7 @@ func (t *ImportTask) Start(ctx context.Context) {
|
|||
|
||||
// set default behaviour if not provided
|
||||
if !t.DuplicateBehaviour.IsValid() {
|
||||
t.DuplicateBehaviour = models.ImportDuplicateEnumFail
|
||||
t.DuplicateBehaviour = ImportDuplicateEnumFail
|
||||
}
|
||||
if !t.MissingRefBehaviour.IsValid() {
|
||||
t.MissingRefBehaviour = models.ImportMissingRefEnumFail
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ import (
|
|||
|
||||
"github.com/stashapp/stash/pkg/job"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin"
|
||||
)
|
||||
|
||||
func (s *Manager) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*models.PluginArgInput) int {
|
||||
func (s *Manager) RunPluginTask(ctx context.Context, pluginID string, taskName string, args []*plugin.PluginArgInput) int {
|
||||
j := job.MakeJobExec(func(jobCtx context.Context, progress *job.Progress) {
|
||||
pluginProgress := make(chan float64)
|
||||
task, err := s.PluginCache.CreateTask(ctx, pluginID, taskName, args, pluginProgress)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ const scanQueueSize = 200000
|
|||
|
||||
type ScanJob struct {
|
||||
txnManager models.TransactionManager
|
||||
input models.ScanMetadataInput
|
||||
input ScanMetadataInput
|
||||
subscriptions *subscriptionManager
|
||||
}
|
||||
|
||||
|
|
@ -88,15 +88,15 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||
task := ScanTask{
|
||||
TxnManager: j.txnManager,
|
||||
file: file.FSFile(f.path, f.info),
|
||||
UseFileMetadata: utils.IsTrue(input.UseFileMetadata),
|
||||
StripFileExtension: utils.IsTrue(input.StripFileExtension),
|
||||
UseFileMetadata: input.UseFileMetadata,
|
||||
StripFileExtension: input.StripFileExtension,
|
||||
fileNamingAlgorithm: fileNamingAlgo,
|
||||
calculateMD5: calculateMD5,
|
||||
GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews),
|
||||
GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews),
|
||||
GenerateSprite: utils.IsTrue(input.ScanGenerateSprites),
|
||||
GeneratePhash: utils.IsTrue(input.ScanGeneratePhashes),
|
||||
GenerateThumbnails: utils.IsTrue(input.ScanGenerateThumbnails),
|
||||
GeneratePreview: input.ScanGeneratePreviews,
|
||||
GenerateImagePreview: input.ScanGenerateImagePreviews,
|
||||
GenerateSprite: input.ScanGenerateSprites,
|
||||
GeneratePhash: input.ScanGeneratePhashes,
|
||||
GenerateThumbnails: input.ScanGenerateThumbnails,
|
||||
progress: progress,
|
||||
CaseSensitiveFs: f.caseSensitiveFs,
|
||||
mutexManager: mutexManager,
|
||||
|
|
@ -145,7 +145,7 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
|
|||
j.subscriptions.notify()
|
||||
}
|
||||
|
||||
func (j *ScanJob) queueFiles(ctx context.Context, paths []*models.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) {
|
||||
func (j *ScanJob) queueFiles(ctx context.Context, paths []*config.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) {
|
||||
defer close(scanQueue)
|
||||
|
||||
var minModTime time.Time
|
||||
|
|
@ -322,7 +322,7 @@ func (t *ScanTask) Start(ctx context.Context) {
|
|||
iwg.Add()
|
||||
|
||||
go t.progress.ExecuteTask(fmt.Sprintf("Generating preview for %s", path), func() {
|
||||
options := getGeneratePreviewOptions(models.GeneratePreviewOptionsInput{})
|
||||
options := getGeneratePreviewOptions(GeneratePreviewOptionsInput{})
|
||||
const overwrite = false
|
||||
|
||||
g := &generate.Generator{
|
||||
|
|
@ -349,7 +349,7 @@ func (t *ScanTask) Start(ctx context.Context) {
|
|||
iwg.Wait()
|
||||
}
|
||||
|
||||
func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error {
|
||||
func walkFilesToScan(s *config.StashConfig, f filepath.WalkFunc) error {
|
||||
config := config.GetInstance()
|
||||
vidExt := config.GetVideoExtensions()
|
||||
imgExt := config.GetImageExtensions()
|
||||
|
|
|
|||
|
|
@ -1,46 +0,0 @@
|
|||
package models
|
||||
|
||||
type ResolutionRange struct {
|
||||
min, max int
|
||||
}
|
||||
|
||||
var resolutionRanges = map[ResolutionEnum]ResolutionRange{
|
||||
ResolutionEnumVeryLow: {144, 239},
|
||||
ResolutionEnumLow: {240, 359},
|
||||
ResolutionEnumR360p: {360, 479},
|
||||
ResolutionEnumStandard: {480, 539},
|
||||
ResolutionEnumWebHd: {540, 719},
|
||||
ResolutionEnumStandardHd: {720, 1079},
|
||||
ResolutionEnumFullHd: {1080, 1439},
|
||||
ResolutionEnumQuadHd: {1440, 1919},
|
||||
ResolutionEnumVrHd: {1920, 2159},
|
||||
ResolutionEnumFourK: {2160, 2879},
|
||||
ResolutionEnumFiveK: {2880, 3383},
|
||||
ResolutionEnumSixK: {3384, 4319},
|
||||
ResolutionEnumEightK: {4320, 8639},
|
||||
}
|
||||
|
||||
// GetMaxResolution returns the maximum width or height that media must be
|
||||
// to qualify as this resolution.
|
||||
func (r *ResolutionEnum) GetMaxResolution() int {
|
||||
return resolutionRanges[*r].max
|
||||
}
|
||||
|
||||
// GetMinResolution returns the minimum width or height that media must be
|
||||
// to qualify as this resolution.
|
||||
func (r ResolutionEnum) GetMinResolution() int {
|
||||
return resolutionRanges[r].min
|
||||
}
|
||||
|
||||
var streamingResolutionMax = map[StreamingResolutionEnum]int{
|
||||
StreamingResolutionEnumLow: resolutionRanges[ResolutionEnumLow].min,
|
||||
StreamingResolutionEnumStandard: resolutionRanges[ResolutionEnumStandard].min,
|
||||
StreamingResolutionEnumStandardHd: resolutionRanges[ResolutionEnumStandardHd].min,
|
||||
StreamingResolutionEnumFullHd: resolutionRanges[ResolutionEnumFullHd].min,
|
||||
StreamingResolutionEnumFourK: resolutionRanges[ResolutionEnumFourK].min,
|
||||
StreamingResolutionEnumOriginal: 0,
|
||||
}
|
||||
|
||||
func (r StreamingResolutionEnum) GetMaxResolution() int {
|
||||
return streamingResolutionMax[r]
|
||||
}
|
||||
108
pkg/models/filter.go
Normal file
108
pkg/models/filter.go
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type CriterionModifier string
|
||||
|
||||
const (
|
||||
// =
|
||||
CriterionModifierEquals CriterionModifier = "EQUALS"
|
||||
// !=
|
||||
CriterionModifierNotEquals CriterionModifier = "NOT_EQUALS"
|
||||
// >
|
||||
CriterionModifierGreaterThan CriterionModifier = "GREATER_THAN"
|
||||
// <
|
||||
CriterionModifierLessThan CriterionModifier = "LESS_THAN"
|
||||
// IS NULL
|
||||
CriterionModifierIsNull CriterionModifier = "IS_NULL"
|
||||
// IS NOT NULL
|
||||
CriterionModifierNotNull CriterionModifier = "NOT_NULL"
|
||||
// INCLUDES ALL
|
||||
CriterionModifierIncludesAll CriterionModifier = "INCLUDES_ALL"
|
||||
CriterionModifierIncludes CriterionModifier = "INCLUDES"
|
||||
CriterionModifierExcludes CriterionModifier = "EXCLUDES"
|
||||
// MATCHES REGEX
|
||||
CriterionModifierMatchesRegex CriterionModifier = "MATCHES_REGEX"
|
||||
// NOT MATCHES REGEX
|
||||
CriterionModifierNotMatchesRegex CriterionModifier = "NOT_MATCHES_REGEX"
|
||||
// >= AND <=
|
||||
CriterionModifierBetween CriterionModifier = "BETWEEN"
|
||||
// < OR >
|
||||
CriterionModifierNotBetween CriterionModifier = "NOT_BETWEEN"
|
||||
)
|
||||
|
||||
var AllCriterionModifier = []CriterionModifier{
|
||||
CriterionModifierEquals,
|
||||
CriterionModifierNotEquals,
|
||||
CriterionModifierGreaterThan,
|
||||
CriterionModifierLessThan,
|
||||
CriterionModifierIsNull,
|
||||
CriterionModifierNotNull,
|
||||
CriterionModifierIncludesAll,
|
||||
CriterionModifierIncludes,
|
||||
CriterionModifierExcludes,
|
||||
CriterionModifierMatchesRegex,
|
||||
CriterionModifierNotMatchesRegex,
|
||||
CriterionModifierBetween,
|
||||
CriterionModifierNotBetween,
|
||||
}
|
||||
|
||||
func (e CriterionModifier) IsValid() bool {
|
||||
switch e {
|
||||
case CriterionModifierEquals, CriterionModifierNotEquals, CriterionModifierGreaterThan, CriterionModifierLessThan, CriterionModifierIsNull, CriterionModifierNotNull, CriterionModifierIncludesAll, CriterionModifierIncludes, CriterionModifierExcludes, CriterionModifierMatchesRegex, CriterionModifierNotMatchesRegex, CriterionModifierBetween, CriterionModifierNotBetween:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e CriterionModifier) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *CriterionModifier) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = CriterionModifier(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid CriterionModifier", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e CriterionModifier) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type StringCriterionInput struct {
|
||||
Value string `json:"value"`
|
||||
Modifier CriterionModifier `json:"modifier"`
|
||||
}
|
||||
|
||||
type IntCriterionInput struct {
|
||||
Value int `json:"value"`
|
||||
Value2 *int `json:"value2"`
|
||||
Modifier CriterionModifier `json:"modifier"`
|
||||
}
|
||||
|
||||
type ResolutionCriterionInput struct {
|
||||
Value ResolutionEnum `json:"value"`
|
||||
Modifier CriterionModifier `json:"modifier"`
|
||||
}
|
||||
|
||||
type HierarchicalMultiCriterionInput struct {
|
||||
Value []string `json:"value"`
|
||||
Modifier CriterionModifier `json:"modifier"`
|
||||
Depth *int `json:"depth"`
|
||||
}
|
||||
|
||||
type MultiCriterionInput struct {
|
||||
Value []string `json:"value"`
|
||||
Modifier CriterionModifier `json:"modifier"`
|
||||
}
|
||||
|
|
@ -1,9 +1,65 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// PerPageAll is the value used for perPage to indicate all results should be
|
||||
// returned.
|
||||
const PerPageAll = -1
|
||||
|
||||
type SortDirectionEnum string
|
||||
|
||||
const (
|
||||
SortDirectionEnumAsc SortDirectionEnum = "ASC"
|
||||
SortDirectionEnumDesc SortDirectionEnum = "DESC"
|
||||
)
|
||||
|
||||
var AllSortDirectionEnum = []SortDirectionEnum{
|
||||
SortDirectionEnumAsc,
|
||||
SortDirectionEnumDesc,
|
||||
}
|
||||
|
||||
func (e SortDirectionEnum) IsValid() bool {
|
||||
switch e {
|
||||
case SortDirectionEnumAsc, SortDirectionEnumDesc:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e SortDirectionEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *SortDirectionEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = SortDirectionEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid SortDirectionEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e SortDirectionEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type FindFilterType struct {
|
||||
Q *string `json:"q"`
|
||||
Page *int `json:"page"`
|
||||
// use per_page = -1 to indicate all results. Defaults to 25.
|
||||
PerPage *int `json:"per_page"`
|
||||
Sort *string `json:"sort"`
|
||||
Direction *SortDirectionEnum `json:"direction"`
|
||||
}
|
||||
|
||||
func (ff FindFilterType) GetSort(defaultSort string) string {
|
||||
var sort string
|
||||
if ff.Sort == nil {
|
||||
|
|
@ -1,5 +1,71 @@
|
|||
package models
|
||||
|
||||
type GalleryFilterType struct {
|
||||
And *GalleryFilterType `json:"AND"`
|
||||
Or *GalleryFilterType `json:"OR"`
|
||||
Not *GalleryFilterType `json:"NOT"`
|
||||
Title *StringCriterionInput `json:"title"`
|
||||
Details *StringCriterionInput `json:"details"`
|
||||
// Filter by file checksum
|
||||
Checksum *StringCriterionInput `json:"checksum"`
|
||||
// Filter by path
|
||||
Path *StringCriterionInput `json:"path"`
|
||||
// Filter to only include galleries missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter to include/exclude galleries that were created from zip
|
||||
IsZip *bool `json:"is_zip"`
|
||||
// Filter by rating
|
||||
Rating *IntCriterionInput `json:"rating"`
|
||||
// Filter by organized
|
||||
Organized *bool `json:"organized"`
|
||||
// Filter by average image resolution
|
||||
AverageResolution *ResolutionCriterionInput `json:"average_resolution"`
|
||||
// Filter to only include galleries with this studio
|
||||
Studios *HierarchicalMultiCriterionInput `json:"studios"`
|
||||
// Filter to only include galleries with these tags
|
||||
Tags *HierarchicalMultiCriterionInput `json:"tags"`
|
||||
// Filter by tag count
|
||||
TagCount *IntCriterionInput `json:"tag_count"`
|
||||
// Filter to only include galleries with performers with these tags
|
||||
PerformerTags *HierarchicalMultiCriterionInput `json:"performer_tags"`
|
||||
// Filter to only include galleries with these performers
|
||||
Performers *MultiCriterionInput `json:"performers"`
|
||||
// Filter by performer count
|
||||
PerformerCount *IntCriterionInput `json:"performer_count"`
|
||||
// Filter galleries that have performers that have been favorited
|
||||
PerformerFavorite *bool `json:"performer_favorite"`
|
||||
// Filter galleries by performer age at time of gallery
|
||||
PerformerAge *IntCriterionInput `json:"performer_age"`
|
||||
// Filter by number of images in this gallery
|
||||
ImageCount *IntCriterionInput `json:"image_count"`
|
||||
// Filter by url
|
||||
URL *StringCriterionInput `json:"url"`
|
||||
}
|
||||
|
||||
type GalleryUpdateInput struct {
|
||||
ClientMutationID *string `json:"clientMutationId"`
|
||||
ID string `json:"id"`
|
||||
Title *string `json:"title"`
|
||||
URL *string `json:"url"`
|
||||
Date *string `json:"date"`
|
||||
Details *string `json:"details"`
|
||||
Rating *int `json:"rating"`
|
||||
Organized *bool `json:"organized"`
|
||||
SceneIds []string `json:"scene_ids"`
|
||||
StudioID *string `json:"studio_id"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
PerformerIds []string `json:"performer_ids"`
|
||||
}
|
||||
|
||||
type GalleryDestroyInput struct {
|
||||
Ids []string `json:"ids"`
|
||||
// If true, then the zip file will be deleted if the gallery is zip-file-based.
|
||||
// If gallery is folder-based, then any files not associated with other
|
||||
// galleries will be deleted, along with the folder, if it is not empty.
|
||||
DeleteFile *bool `json:"delete_file"`
|
||||
DeleteGenerated *bool `json:"delete_generated"`
|
||||
}
|
||||
|
||||
type GalleryReader interface {
|
||||
Find(id int) (*Gallery, error)
|
||||
FindMany(ids []int) ([]*Gallery, error)
|
||||
|
|
|
|||
91
pkg/models/generate.go
Normal file
91
pkg/models/generate.go
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type GenerateMetadataOptions struct {
|
||||
Sprites *bool `json:"sprites"`
|
||||
Previews *bool `json:"previews"`
|
||||
ImagePreviews *bool `json:"imagePreviews"`
|
||||
PreviewOptions *GeneratePreviewOptions `json:"previewOptions"`
|
||||
Markers *bool `json:"markers"`
|
||||
MarkerImagePreviews *bool `json:"markerImagePreviews"`
|
||||
MarkerScreenshots *bool `json:"markerScreenshots"`
|
||||
Transcodes *bool `json:"transcodes"`
|
||||
Phashes *bool `json:"phashes"`
|
||||
InteractiveHeatmapsSpeeds *bool `json:"interactiveHeatmapsSpeeds"`
|
||||
}
|
||||
|
||||
type GeneratePreviewOptions struct {
|
||||
// Number of segments in a preview file
|
||||
PreviewSegments *int `json:"previewSegments"`
|
||||
// Preview segment duration, in seconds
|
||||
PreviewSegmentDuration *float64 `json:"previewSegmentDuration"`
|
||||
// Duration of start of video to exclude when generating previews
|
||||
PreviewExcludeStart *string `json:"previewExcludeStart"`
|
||||
// Duration of end of video to exclude when generating previews
|
||||
PreviewExcludeEnd *string `json:"previewExcludeEnd"`
|
||||
// Preset when generating preview
|
||||
PreviewPreset *PreviewPreset `json:"previewPreset"`
|
||||
}
|
||||
|
||||
type PreviewPreset string
|
||||
|
||||
const (
|
||||
// X264_ULTRAFAST
|
||||
PreviewPresetUltrafast PreviewPreset = "ultrafast"
|
||||
// X264_VERYFAST
|
||||
PreviewPresetVeryfast PreviewPreset = "veryfast"
|
||||
// X264_FAST
|
||||
PreviewPresetFast PreviewPreset = "fast"
|
||||
// X264_MEDIUM
|
||||
PreviewPresetMedium PreviewPreset = "medium"
|
||||
// X264_SLOW
|
||||
PreviewPresetSlow PreviewPreset = "slow"
|
||||
// X264_SLOWER
|
||||
PreviewPresetSlower PreviewPreset = "slower"
|
||||
// X264_VERYSLOW
|
||||
PreviewPresetVeryslow PreviewPreset = "veryslow"
|
||||
)
|
||||
|
||||
var AllPreviewPreset = []PreviewPreset{
|
||||
PreviewPresetUltrafast,
|
||||
PreviewPresetVeryfast,
|
||||
PreviewPresetFast,
|
||||
PreviewPresetMedium,
|
||||
PreviewPresetSlow,
|
||||
PreviewPresetSlower,
|
||||
PreviewPresetVeryslow,
|
||||
}
|
||||
|
||||
func (e PreviewPreset) IsValid() bool {
|
||||
switch e {
|
||||
case PreviewPresetUltrafast, PreviewPresetVeryfast, PreviewPresetFast, PreviewPresetMedium, PreviewPresetSlow, PreviewPresetSlower, PreviewPresetVeryslow:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e PreviewPreset) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *PreviewPreset) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = PreviewPreset(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid PreviewPreset", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e PreviewPreset) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
|
@ -1,5 +1,54 @@
|
|||
package models
|
||||
|
||||
type ImageFilterType struct {
|
||||
And *ImageFilterType `json:"AND"`
|
||||
Or *ImageFilterType `json:"OR"`
|
||||
Not *ImageFilterType `json:"NOT"`
|
||||
Title *StringCriterionInput `json:"title"`
|
||||
// Filter by file checksum
|
||||
Checksum *StringCriterionInput `json:"checksum"`
|
||||
// Filter by path
|
||||
Path *StringCriterionInput `json:"path"`
|
||||
// Filter by rating
|
||||
Rating *IntCriterionInput `json:"rating"`
|
||||
// Filter by organized
|
||||
Organized *bool `json:"organized"`
|
||||
// Filter by o-counter
|
||||
OCounter *IntCriterionInput `json:"o_counter"`
|
||||
// Filter by resolution
|
||||
Resolution *ResolutionCriterionInput `json:"resolution"`
|
||||
// Filter to only include images missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter to only include images with this studio
|
||||
Studios *HierarchicalMultiCriterionInput `json:"studios"`
|
||||
// Filter to only include images with these tags
|
||||
Tags *HierarchicalMultiCriterionInput `json:"tags"`
|
||||
// Filter by tag count
|
||||
TagCount *IntCriterionInput `json:"tag_count"`
|
||||
// Filter to only include images with performers with these tags
|
||||
PerformerTags *HierarchicalMultiCriterionInput `json:"performer_tags"`
|
||||
// Filter to only include images with these performers
|
||||
Performers *MultiCriterionInput `json:"performers"`
|
||||
// Filter by performer count
|
||||
PerformerCount *IntCriterionInput `json:"performer_count"`
|
||||
// Filter images that have performers that have been favorited
|
||||
PerformerFavorite *bool `json:"performer_favorite"`
|
||||
// Filter to only include images with these galleries
|
||||
Galleries *MultiCriterionInput `json:"galleries"`
|
||||
}
|
||||
|
||||
type ImageDestroyInput struct {
|
||||
ID string `json:"id"`
|
||||
DeleteFile *bool `json:"delete_file"`
|
||||
DeleteGenerated *bool `json:"delete_generated"`
|
||||
}
|
||||
|
||||
type ImagesDestroyInput struct {
|
||||
Ids []string `json:"ids"`
|
||||
DeleteFile *bool `json:"delete_file"`
|
||||
DeleteGenerated *bool `json:"delete_generated"`
|
||||
}
|
||||
|
||||
type ImageQueryOptions struct {
|
||||
QueryOptions
|
||||
ImageFilter *ImageFilterType
|
||||
|
|
|
|||
50
pkg/models/import.go
Normal file
50
pkg/models/import.go
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type ImportMissingRefEnum string
|
||||
|
||||
const (
|
||||
ImportMissingRefEnumIgnore ImportMissingRefEnum = "IGNORE"
|
||||
ImportMissingRefEnumFail ImportMissingRefEnum = "FAIL"
|
||||
ImportMissingRefEnumCreate ImportMissingRefEnum = "CREATE"
|
||||
)
|
||||
|
||||
var AllImportMissingRefEnum = []ImportMissingRefEnum{
|
||||
ImportMissingRefEnumIgnore,
|
||||
ImportMissingRefEnumFail,
|
||||
ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
func (e ImportMissingRefEnum) IsValid() bool {
|
||||
switch e {
|
||||
case ImportMissingRefEnumIgnore, ImportMissingRefEnumFail, ImportMissingRefEnumCreate:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ImportMissingRefEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ImportMissingRefEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ImportMissingRefEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ImportMissingRefEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ImportMissingRefEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
|
@ -1,6 +1,53 @@
|
|||
package models
|
||||
|
||||
import "time"
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type HashAlgorithm string
|
||||
|
||||
const (
|
||||
HashAlgorithmMd5 HashAlgorithm = "MD5"
|
||||
// oshash
|
||||
HashAlgorithmOshash HashAlgorithm = "OSHASH"
|
||||
)
|
||||
|
||||
var AllHashAlgorithm = []HashAlgorithm{
|
||||
HashAlgorithmMd5,
|
||||
HashAlgorithmOshash,
|
||||
}
|
||||
|
||||
func (e HashAlgorithm) IsValid() bool {
|
||||
switch e {
|
||||
case HashAlgorithmMd5, HashAlgorithmOshash:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e HashAlgorithm) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *HashAlgorithm) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = HashAlgorithm(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid HashAlgorithm", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e HashAlgorithm) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
|
|
|
|||
|
|
@ -1,5 +1,64 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type FilterMode string
|
||||
|
||||
const (
|
||||
FilterModeScenes FilterMode = "SCENES"
|
||||
FilterModePerformers FilterMode = "PERFORMERS"
|
||||
FilterModeStudios FilterMode = "STUDIOS"
|
||||
FilterModeGalleries FilterMode = "GALLERIES"
|
||||
FilterModeSceneMarkers FilterMode = "SCENE_MARKERS"
|
||||
FilterModeMovies FilterMode = "MOVIES"
|
||||
FilterModeTags FilterMode = "TAGS"
|
||||
FilterModeImages FilterMode = "IMAGES"
|
||||
)
|
||||
|
||||
var AllFilterMode = []FilterMode{
|
||||
FilterModeScenes,
|
||||
FilterModePerformers,
|
||||
FilterModeStudios,
|
||||
FilterModeGalleries,
|
||||
FilterModeSceneMarkers,
|
||||
FilterModeMovies,
|
||||
FilterModeTags,
|
||||
FilterModeImages,
|
||||
}
|
||||
|
||||
func (e FilterMode) IsValid() bool {
|
||||
switch e {
|
||||
case FilterModeScenes, FilterModePerformers, FilterModeStudios, FilterModeGalleries, FilterModeSceneMarkers, FilterModeMovies, FilterModeTags, FilterModeImages:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e FilterMode) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *FilterMode) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = FilterMode(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid FilterMode", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e FilterMode) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type SavedFilter struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Mode FilterMode `db:"mode" json:"mode"`
|
||||
|
|
|
|||
|
|
@ -122,6 +122,30 @@ type ScenePartial struct {
|
|||
InteractiveSpeed *sql.NullInt64 `db:"interactive_speed" json:"interactive_speed"`
|
||||
}
|
||||
|
||||
type SceneMovieInput struct {
|
||||
MovieID string `json:"movie_id"`
|
||||
SceneIndex *int `json:"scene_index"`
|
||||
}
|
||||
|
||||
type SceneUpdateInput struct {
|
||||
ClientMutationID *string `json:"clientMutationId"`
|
||||
ID string `json:"id"`
|
||||
Title *string `json:"title"`
|
||||
Details *string `json:"details"`
|
||||
URL *string `json:"url"`
|
||||
Date *string `json:"date"`
|
||||
Rating *int `json:"rating"`
|
||||
Organized *bool `json:"organized"`
|
||||
StudioID *string `json:"studio_id"`
|
||||
GalleryIds []string `json:"gallery_ids"`
|
||||
PerformerIds []string `json:"performer_ids"`
|
||||
Movies []*SceneMovieInput `json:"movies"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
// This should be a URL or a base64 encoded data URL
|
||||
CoverImage *string `json:"cover_image"`
|
||||
StashIds []*StashIDInput `json:"stash_ids"`
|
||||
}
|
||||
|
||||
// UpdateInput constructs a SceneUpdateInput using the populated fields in the ScenePartial object.
|
||||
func (s ScenePartial) UpdateInput() SceneUpdateInput {
|
||||
boolPtrCopy := func(v *bool) *bool {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,78 @@ import (
|
|||
"database/sql"
|
||||
)
|
||||
|
||||
type ScrapedStudio struct {
|
||||
// Set if studio matched
|
||||
StoredID *string `json:"stored_id"`
|
||||
Name string `json:"name"`
|
||||
URL *string `json:"url"`
|
||||
Image *string `json:"image"`
|
||||
RemoteSiteID *string `json:"remote_site_id"`
|
||||
}
|
||||
|
||||
func (ScrapedStudio) IsScrapedContent() {}
|
||||
|
||||
// A performer from a scraping operation...
|
||||
type ScrapedPerformer struct {
|
||||
// Set if performer matched
|
||||
StoredID *string `json:"stored_id"`
|
||||
Name *string `json:"name"`
|
||||
Gender *string `json:"gender"`
|
||||
URL *string `json:"url"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
EyeColor *string `json:"eye_color"`
|
||||
Height *string `json:"height"`
|
||||
Measurements *string `json:"measurements"`
|
||||
FakeTits *string `json:"fake_tits"`
|
||||
CareerLength *string `json:"career_length"`
|
||||
Tattoos *string `json:"tattoos"`
|
||||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Tags []*ScrapedTag `json:"tags"`
|
||||
// This should be a base64 encoded data URL
|
||||
Image *string `json:"image"`
|
||||
Images []string `json:"images"`
|
||||
Details *string `json:"details"`
|
||||
DeathDate *string `json:"death_date"`
|
||||
HairColor *string `json:"hair_color"`
|
||||
Weight *string `json:"weight"`
|
||||
RemoteSiteID *string `json:"remote_site_id"`
|
||||
}
|
||||
|
||||
func (ScrapedPerformer) IsScrapedContent() {}
|
||||
|
||||
type ScrapedTag struct {
|
||||
// Set if tag matched
|
||||
StoredID *string `json:"stored_id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
func (ScrapedTag) IsScrapedContent() {}
|
||||
|
||||
// A movie from a scraping operation...
|
||||
type ScrapedMovie struct {
|
||||
StoredID *string `json:"stored_id"`
|
||||
Name *string `json:"name"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Duration *string `json:"duration"`
|
||||
Date *string `json:"date"`
|
||||
Rating *string `json:"rating"`
|
||||
Director *string `json:"director"`
|
||||
URL *string `json:"url"`
|
||||
Synopsis *string `json:"synopsis"`
|
||||
Studio *ScrapedStudio `json:"studio"`
|
||||
// This should be a base64 encoded data URL
|
||||
FrontImage *string `json:"front_image"`
|
||||
// This should be a base64 encoded data URL
|
||||
BackImage *string `json:"back_image"`
|
||||
}
|
||||
|
||||
func (ScrapedMovie) IsScrapedContent() {}
|
||||
|
||||
type ScrapedItem struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
|
|
|
|||
|
|
@ -1,5 +1,23 @@
|
|||
package models
|
||||
|
||||
type MovieFilterType struct {
|
||||
Name *StringCriterionInput `json:"name"`
|
||||
Director *StringCriterionInput `json:"director"`
|
||||
Synopsis *StringCriterionInput `json:"synopsis"`
|
||||
// Filter by duration (in seconds)
|
||||
Duration *IntCriterionInput `json:"duration"`
|
||||
// Filter by rating
|
||||
Rating *IntCriterionInput `json:"rating"`
|
||||
// Filter to only include movies with this studio
|
||||
Studios *HierarchicalMultiCriterionInput `json:"studios"`
|
||||
// Filter to only include movies missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter by url
|
||||
URL *StringCriterionInput `json:"url"`
|
||||
// Filter to only include movies where performer appears in a scene
|
||||
Performers *MultiCriterionInput `json:"performers"`
|
||||
}
|
||||
|
||||
type MovieReader interface {
|
||||
Find(id int) (*Movie, error)
|
||||
FindMany(ids []int) ([]*Movie, error)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,129 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type GenderEnum string
|
||||
|
||||
const (
|
||||
GenderEnumMale GenderEnum = "MALE"
|
||||
GenderEnumFemale GenderEnum = "FEMALE"
|
||||
GenderEnumTransgenderMale GenderEnum = "TRANSGENDER_MALE"
|
||||
GenderEnumTransgenderFemale GenderEnum = "TRANSGENDER_FEMALE"
|
||||
GenderEnumIntersex GenderEnum = "INTERSEX"
|
||||
GenderEnumNonBinary GenderEnum = "NON_BINARY"
|
||||
)
|
||||
|
||||
var AllGenderEnum = []GenderEnum{
|
||||
GenderEnumMale,
|
||||
GenderEnumFemale,
|
||||
GenderEnumTransgenderMale,
|
||||
GenderEnumTransgenderFemale,
|
||||
GenderEnumIntersex,
|
||||
GenderEnumNonBinary,
|
||||
}
|
||||
|
||||
func (e GenderEnum) IsValid() bool {
|
||||
switch e {
|
||||
case GenderEnumMale, GenderEnumFemale, GenderEnumTransgenderMale, GenderEnumTransgenderFemale, GenderEnumIntersex, GenderEnumNonBinary:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e GenderEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *GenderEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = GenderEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid GenderEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e GenderEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
type GenderCriterionInput struct {
|
||||
Value *GenderEnum `json:"value"`
|
||||
Modifier CriterionModifier `json:"modifier"`
|
||||
}
|
||||
|
||||
type PerformerFilterType struct {
|
||||
And *PerformerFilterType `json:"AND"`
|
||||
Or *PerformerFilterType `json:"OR"`
|
||||
Not *PerformerFilterType `json:"NOT"`
|
||||
Name *StringCriterionInput `json:"name"`
|
||||
Details *StringCriterionInput `json:"details"`
|
||||
// Filter by favorite
|
||||
FilterFavorites *bool `json:"filter_favorites"`
|
||||
// Filter by birth year
|
||||
BirthYear *IntCriterionInput `json:"birth_year"`
|
||||
// Filter by age
|
||||
Age *IntCriterionInput `json:"age"`
|
||||
// Filter by ethnicity
|
||||
Ethnicity *StringCriterionInput `json:"ethnicity"`
|
||||
// Filter by country
|
||||
Country *StringCriterionInput `json:"country"`
|
||||
// Filter by eye color
|
||||
EyeColor *StringCriterionInput `json:"eye_color"`
|
||||
// Filter by height
|
||||
Height *StringCriterionInput `json:"height"`
|
||||
// Filter by measurements
|
||||
Measurements *StringCriterionInput `json:"measurements"`
|
||||
// Filter by fake tits value
|
||||
FakeTits *StringCriterionInput `json:"fake_tits"`
|
||||
// Filter by career length
|
||||
CareerLength *StringCriterionInput `json:"career_length"`
|
||||
// Filter by tattoos
|
||||
Tattoos *StringCriterionInput `json:"tattoos"`
|
||||
// Filter by piercings
|
||||
Piercings *StringCriterionInput `json:"piercings"`
|
||||
// Filter by aliases
|
||||
Aliases *StringCriterionInput `json:"aliases"`
|
||||
// Filter by gender
|
||||
Gender *GenderCriterionInput `json:"gender"`
|
||||
// Filter to only include performers missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter to only include performers with these tags
|
||||
Tags *HierarchicalMultiCriterionInput `json:"tags"`
|
||||
// Filter by tag count
|
||||
TagCount *IntCriterionInput `json:"tag_count"`
|
||||
// Filter by scene count
|
||||
SceneCount *IntCriterionInput `json:"scene_count"`
|
||||
// Filter by image count
|
||||
ImageCount *IntCriterionInput `json:"image_count"`
|
||||
// Filter by gallery count
|
||||
GalleryCount *IntCriterionInput `json:"gallery_count"`
|
||||
// Filter by StashID
|
||||
StashID *StringCriterionInput `json:"stash_id"`
|
||||
// Filter by rating
|
||||
Rating *IntCriterionInput `json:"rating"`
|
||||
// Filter by url
|
||||
URL *StringCriterionInput `json:"url"`
|
||||
// Filter by hair color
|
||||
HairColor *StringCriterionInput `json:"hair_color"`
|
||||
// Filter by weight
|
||||
Weight *IntCriterionInput `json:"weight"`
|
||||
// Filter by death year
|
||||
DeathYear *IntCriterionInput `json:"death_year"`
|
||||
// Filter by studios where performer appears in scene/image/gallery
|
||||
Studios *HierarchicalMultiCriterionInput `json:"studios"`
|
||||
// Filter by autotag ignore value
|
||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||
}
|
||||
|
||||
type PerformerReader interface {
|
||||
Find(id int) (*Performer, error)
|
||||
FindMany(ids []int) ([]*Performer, error)
|
||||
|
|
|
|||
183
pkg/models/resolution.go
Normal file
183
pkg/models/resolution.go
Normal file
|
|
@ -0,0 +1,183 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type ResolutionRange struct {
|
||||
min, max int
|
||||
}
|
||||
|
||||
var resolutionRanges = map[ResolutionEnum]ResolutionRange{
|
||||
ResolutionEnum("VERY_LOW"): {144, 239},
|
||||
ResolutionEnum("LOW"): {240, 359},
|
||||
ResolutionEnum("R360P"): {360, 479},
|
||||
ResolutionEnum("STANDARD"): {480, 539},
|
||||
ResolutionEnum("WEB_HD"): {540, 719},
|
||||
ResolutionEnum("STANDARD_HD"): {720, 1079},
|
||||
ResolutionEnum("FULL_HD"): {1080, 1439},
|
||||
ResolutionEnum("QUAD_HD"): {1440, 1919},
|
||||
ResolutionEnum("VR_HD"): {1920, 2159},
|
||||
ResolutionEnum("FOUR_K"): {2160, 2879},
|
||||
ResolutionEnum("FIVE_K"): {2880, 3383},
|
||||
ResolutionEnum("SIX_K"): {3384, 4319},
|
||||
ResolutionEnum("EIGHT_K"): {4320, 8639},
|
||||
}
|
||||
|
||||
type ResolutionEnum string
|
||||
|
||||
const (
|
||||
// 144p
|
||||
ResolutionEnumVeryLow ResolutionEnum = "VERY_LOW"
|
||||
// 240p
|
||||
ResolutionEnumLow ResolutionEnum = "LOW"
|
||||
// 360p
|
||||
ResolutionEnumR360p ResolutionEnum = "R360P"
|
||||
// 480p
|
||||
ResolutionEnumStandard ResolutionEnum = "STANDARD"
|
||||
// 540p
|
||||
ResolutionEnumWebHd ResolutionEnum = "WEB_HD"
|
||||
// 720p
|
||||
ResolutionEnumStandardHd ResolutionEnum = "STANDARD_HD"
|
||||
// 1080p
|
||||
ResolutionEnumFullHd ResolutionEnum = "FULL_HD"
|
||||
// 1440p
|
||||
ResolutionEnumQuadHd ResolutionEnum = "QUAD_HD"
|
||||
// 1920p
|
||||
ResolutionEnumVrHd ResolutionEnum = "VR_HD"
|
||||
// 4k
|
||||
ResolutionEnumFourK ResolutionEnum = "FOUR_K"
|
||||
// 5k
|
||||
ResolutionEnumFiveK ResolutionEnum = "FIVE_K"
|
||||
// 6k
|
||||
ResolutionEnumSixK ResolutionEnum = "SIX_K"
|
||||
// 8k
|
||||
ResolutionEnumEightK ResolutionEnum = "EIGHT_K"
|
||||
)
|
||||
|
||||
var AllResolutionEnum = []ResolutionEnum{
|
||||
ResolutionEnumVeryLow,
|
||||
ResolutionEnumLow,
|
||||
ResolutionEnumR360p,
|
||||
ResolutionEnumStandard,
|
||||
ResolutionEnumWebHd,
|
||||
ResolutionEnumStandardHd,
|
||||
ResolutionEnumFullHd,
|
||||
ResolutionEnumQuadHd,
|
||||
ResolutionEnumVrHd,
|
||||
ResolutionEnumFourK,
|
||||
ResolutionEnumFiveK,
|
||||
ResolutionEnumSixK,
|
||||
ResolutionEnumEightK,
|
||||
}
|
||||
|
||||
func (e ResolutionEnum) IsValid() bool {
|
||||
switch e {
|
||||
case ResolutionEnumVeryLow, ResolutionEnumLow, ResolutionEnumR360p, ResolutionEnumStandard, ResolutionEnumWebHd, ResolutionEnumStandardHd, ResolutionEnumFullHd, ResolutionEnumQuadHd, ResolutionEnumVrHd, ResolutionEnumFourK, ResolutionEnumFiveK, ResolutionEnumSixK, ResolutionEnumEightK:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e ResolutionEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *ResolutionEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = ResolutionEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid ResolutionEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e ResolutionEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
// GetMaxResolution returns the maximum width or height that media must be
|
||||
// to qualify as this resolution.
|
||||
func (e *ResolutionEnum) GetMaxResolution() int {
|
||||
return resolutionRanges[*e].max
|
||||
}
|
||||
|
||||
// GetMinResolution returns the minimum width or height that media must be
|
||||
// to qualify as this resolution.
|
||||
func (e *ResolutionEnum) GetMinResolution() int {
|
||||
return resolutionRanges[*e].min
|
||||
}
|
||||
|
||||
type StreamingResolutionEnum string
|
||||
|
||||
const (
|
||||
// 240p
|
||||
StreamingResolutionEnumLow StreamingResolutionEnum = "LOW"
|
||||
// 480p
|
||||
StreamingResolutionEnumStandard StreamingResolutionEnum = "STANDARD"
|
||||
// 720p
|
||||
StreamingResolutionEnumStandardHd StreamingResolutionEnum = "STANDARD_HD"
|
||||
// 1080p
|
||||
StreamingResolutionEnumFullHd StreamingResolutionEnum = "FULL_HD"
|
||||
// 4k
|
||||
StreamingResolutionEnumFourK StreamingResolutionEnum = "FOUR_K"
|
||||
// Original
|
||||
StreamingResolutionEnumOriginal StreamingResolutionEnum = "ORIGINAL"
|
||||
)
|
||||
|
||||
var AllStreamingResolutionEnum = []StreamingResolutionEnum{
|
||||
StreamingResolutionEnumLow,
|
||||
StreamingResolutionEnumStandard,
|
||||
StreamingResolutionEnumStandardHd,
|
||||
StreamingResolutionEnumFullHd,
|
||||
StreamingResolutionEnumFourK,
|
||||
StreamingResolutionEnumOriginal,
|
||||
}
|
||||
|
||||
func (e StreamingResolutionEnum) IsValid() bool {
|
||||
switch e {
|
||||
case StreamingResolutionEnumLow, StreamingResolutionEnumStandard, StreamingResolutionEnumStandardHd, StreamingResolutionEnumFullHd, StreamingResolutionEnumFourK, StreamingResolutionEnumOriginal:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e StreamingResolutionEnum) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e *StreamingResolutionEnum) UnmarshalGQL(v interface{}) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("enums must be strings")
|
||||
}
|
||||
|
||||
*e = StreamingResolutionEnum(str)
|
||||
if !e.IsValid() {
|
||||
return fmt.Errorf("%s is not a valid StreamingResolutionEnum", str)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e StreamingResolutionEnum) MarshalGQL(w io.Writer) {
|
||||
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||
}
|
||||
|
||||
var streamingResolutionMax = map[StreamingResolutionEnum]int{
|
||||
StreamingResolutionEnumLow: resolutionRanges[ResolutionEnumLow].min,
|
||||
StreamingResolutionEnumStandard: resolutionRanges[ResolutionEnumStandard].min,
|
||||
StreamingResolutionEnumStandardHd: resolutionRanges[ResolutionEnumStandardHd].min,
|
||||
StreamingResolutionEnumFullHd: resolutionRanges[ResolutionEnumFullHd].min,
|
||||
StreamingResolutionEnumFourK: resolutionRanges[ResolutionEnumFourK].min,
|
||||
StreamingResolutionEnumOriginal: 0,
|
||||
}
|
||||
|
||||
func (e StreamingResolutionEnum) GetMaxResolution() int {
|
||||
return streamingResolutionMax[e]
|
||||
}
|
||||
|
|
@ -1,5 +1,71 @@
|
|||
package models
|
||||
|
||||
type PHashDuplicationCriterionInput struct {
|
||||
Duplicated *bool `json:"duplicated"`
|
||||
// Currently unimplemented
|
||||
Distance *int `json:"distance"`
|
||||
}
|
||||
|
||||
type SceneFilterType struct {
|
||||
And *SceneFilterType `json:"AND"`
|
||||
Or *SceneFilterType `json:"OR"`
|
||||
Not *SceneFilterType `json:"NOT"`
|
||||
Title *StringCriterionInput `json:"title"`
|
||||
Details *StringCriterionInput `json:"details"`
|
||||
// Filter by file oshash
|
||||
Oshash *StringCriterionInput `json:"oshash"`
|
||||
// Filter by file checksum
|
||||
Checksum *StringCriterionInput `json:"checksum"`
|
||||
// Filter by file phash
|
||||
Phash *StringCriterionInput `json:"phash"`
|
||||
// Filter by path
|
||||
Path *StringCriterionInput `json:"path"`
|
||||
// Filter by rating
|
||||
Rating *IntCriterionInput `json:"rating"`
|
||||
// Filter by organized
|
||||
Organized *bool `json:"organized"`
|
||||
// Filter by o-counter
|
||||
OCounter *IntCriterionInput `json:"o_counter"`
|
||||
// Filter Scenes that have an exact phash match available
|
||||
Duplicated *PHashDuplicationCriterionInput `json:"duplicated"`
|
||||
// Filter by resolution
|
||||
Resolution *ResolutionCriterionInput `json:"resolution"`
|
||||
// Filter by duration (in seconds)
|
||||
Duration *IntCriterionInput `json:"duration"`
|
||||
// Filter to only include scenes which have markers. `true` or `false`
|
||||
HasMarkers *string `json:"has_markers"`
|
||||
// Filter to only include scenes missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter to only include scenes with this studio
|
||||
Studios *HierarchicalMultiCriterionInput `json:"studios"`
|
||||
// Filter to only include scenes with this movie
|
||||
Movies *MultiCriterionInput `json:"movies"`
|
||||
// Filter to only include scenes with these tags
|
||||
Tags *HierarchicalMultiCriterionInput `json:"tags"`
|
||||
// Filter by tag count
|
||||
TagCount *IntCriterionInput `json:"tag_count"`
|
||||
// Filter to only include scenes with performers with these tags
|
||||
PerformerTags *HierarchicalMultiCriterionInput `json:"performer_tags"`
|
||||
// Filter scenes that have performers that have been favorited
|
||||
PerformerFavorite *bool `json:"performer_favorite"`
|
||||
// Filter scenes by performer age at time of scene
|
||||
PerformerAge *IntCriterionInput `json:"performer_age"`
|
||||
// Filter to only include scenes with these performers
|
||||
Performers *MultiCriterionInput `json:"performers"`
|
||||
// Filter by performer count
|
||||
PerformerCount *IntCriterionInput `json:"performer_count"`
|
||||
// Filter by StashID
|
||||
StashID *StringCriterionInput `json:"stash_id"`
|
||||
// Filter by url
|
||||
URL *StringCriterionInput `json:"url"`
|
||||
// Filter by interactive
|
||||
Interactive *bool `json:"interactive"`
|
||||
// Filter by InteractiveSpeed
|
||||
InteractiveSpeed *IntCriterionInput `json:"interactive_speed"`
|
||||
|
||||
Captions *StringCriterionInput `json:"captions"`
|
||||
}
|
||||
|
||||
type SceneQueryOptions struct {
|
||||
QueryOptions
|
||||
SceneFilter *SceneFilterType
|
||||
|
|
@ -18,6 +84,18 @@ type SceneQueryResult struct {
|
|||
resolveErr error
|
||||
}
|
||||
|
||||
type SceneDestroyInput struct {
|
||||
ID string `json:"id"`
|
||||
DeleteFile *bool `json:"delete_file"`
|
||||
DeleteGenerated *bool `json:"delete_generated"`
|
||||
}
|
||||
|
||||
type ScenesDestroyInput struct {
|
||||
Ids []string `json:"ids"`
|
||||
DeleteFile *bool `json:"delete_file"`
|
||||
DeleteGenerated *bool `json:"delete_generated"`
|
||||
}
|
||||
|
||||
func NewSceneQueryResult(finder SceneFinder) *SceneQueryResult {
|
||||
return &SceneQueryResult{
|
||||
finder: finder,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,22 @@
|
|||
package models
|
||||
|
||||
type SceneMarkerFilterType struct {
|
||||
// Filter to only include scene markers with this tag
|
||||
TagID *string `json:"tag_id"`
|
||||
// Filter to only include scene markers with these tags
|
||||
Tags *HierarchicalMultiCriterionInput `json:"tags"`
|
||||
// Filter to only include scene markers attached to a scene with these tags
|
||||
SceneTags *HierarchicalMultiCriterionInput `json:"scene_tags"`
|
||||
// Filter to only include scene markers with these performers
|
||||
Performers *MultiCriterionInput `json:"performers"`
|
||||
}
|
||||
|
||||
type MarkerStringsResultType struct {
|
||||
Count int `json:"count"`
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
type SceneMarkerReader interface {
|
||||
Find(id int) (*SceneMarker, error)
|
||||
FindMany(ids []int) ([]*SceneMarker, error)
|
||||
|
|
|
|||
|
|
@ -1,39 +1,13 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type StashBoxes []*StashBox
|
||||
|
||||
func (sb StashBoxes) ResolveStashBox(source ScraperSourceInput) (*StashBox, error) {
|
||||
if source.StashBoxIndex != nil {
|
||||
index := source.StashBoxIndex
|
||||
if *index < 0 || *index >= len(sb) {
|
||||
return nil, fmt.Errorf("%w: invalid stash_box_index: %d", ErrScraperSource, index)
|
||||
}
|
||||
|
||||
return sb[*index], nil
|
||||
}
|
||||
|
||||
if source.StashBoxEndpoint != nil {
|
||||
var ret *StashBox
|
||||
endpoint := *source.StashBoxEndpoint
|
||||
for _, b := range sb {
|
||||
if strings.EqualFold(endpoint, b.Endpoint) {
|
||||
ret = b
|
||||
}
|
||||
}
|
||||
|
||||
if ret == nil {
|
||||
return nil, fmt.Errorf(`%w: stash-box with endpoint "%s"`, ErrNotFound, endpoint)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// neither stash-box inputs were provided, so assume it is a scraper
|
||||
|
||||
return nil, nil
|
||||
type StashBoxFingerprint struct {
|
||||
Algorithm string `json:"algorithm"`
|
||||
Hash string `json:"hash"`
|
||||
Duration int `json:"duration"`
|
||||
}
|
||||
|
||||
type StashBox struct {
|
||||
Endpoint string `json:"endpoint"`
|
||||
APIKey string `json:"api_key"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,10 @@
|
|||
package models
|
||||
|
||||
type StashIDInput struct {
|
||||
Endpoint string `json:"endpoint"`
|
||||
StashID string `json:"stash_id"`
|
||||
}
|
||||
|
||||
func StashIDsFromInput(i []*StashIDInput) []StashID {
|
||||
var ret []StashID
|
||||
for _, stashID := range i {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,33 @@
|
|||
package models
|
||||
|
||||
type StudioFilterType struct {
|
||||
And *StudioFilterType `json:"AND"`
|
||||
Or *StudioFilterType `json:"OR"`
|
||||
Not *StudioFilterType `json:"NOT"`
|
||||
Name *StringCriterionInput `json:"name"`
|
||||
Details *StringCriterionInput `json:"details"`
|
||||
// Filter to only include studios with this parent studio
|
||||
Parents *MultiCriterionInput `json:"parents"`
|
||||
// Filter by StashID
|
||||
StashID *StringCriterionInput `json:"stash_id"`
|
||||
// Filter to only include studios missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter by rating
|
||||
Rating *IntCriterionInput `json:"rating"`
|
||||
// Filter by scene count
|
||||
SceneCount *IntCriterionInput `json:"scene_count"`
|
||||
// Filter by image count
|
||||
ImageCount *IntCriterionInput `json:"image_count"`
|
||||
// Filter by gallery count
|
||||
GalleryCount *IntCriterionInput `json:"gallery_count"`
|
||||
// Filter by url
|
||||
URL *StringCriterionInput `json:"url"`
|
||||
// Filter by studio aliases
|
||||
Aliases *StringCriterionInput `json:"aliases"`
|
||||
// Filter by autotag ignore value
|
||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||
}
|
||||
|
||||
type StudioReader interface {
|
||||
Find(id int) (*Studio, error)
|
||||
FindMany(ids []int) ([]*Studio, error)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,37 @@
|
|||
package models
|
||||
|
||||
type TagFilterType struct {
|
||||
And *TagFilterType `json:"AND"`
|
||||
Or *TagFilterType `json:"OR"`
|
||||
Not *TagFilterType `json:"NOT"`
|
||||
// Filter by tag name
|
||||
Name *StringCriterionInput `json:"name"`
|
||||
// Filter by tag aliases
|
||||
Aliases *StringCriterionInput `json:"aliases"`
|
||||
// Filter to only include tags missing this property
|
||||
IsMissing *string `json:"is_missing"`
|
||||
// Filter by number of scenes with this tag
|
||||
SceneCount *IntCriterionInput `json:"scene_count"`
|
||||
// Filter by number of images with this tag
|
||||
ImageCount *IntCriterionInput `json:"image_count"`
|
||||
// Filter by number of galleries with this tag
|
||||
GalleryCount *IntCriterionInput `json:"gallery_count"`
|
||||
// Filter by number of performers with this tag
|
||||
PerformerCount *IntCriterionInput `json:"performer_count"`
|
||||
// Filter by number of markers with this tag
|
||||
MarkerCount *IntCriterionInput `json:"marker_count"`
|
||||
// Filter by parent tags
|
||||
Parents *HierarchicalMultiCriterionInput `json:"parents"`
|
||||
// Filter by child tags
|
||||
Children *HierarchicalMultiCriterionInput `json:"children"`
|
||||
// Filter by number of parent tags the tag has
|
||||
ParentCount *IntCriterionInput `json:"parent_count"`
|
||||
// Filter by number f child tags the tag has
|
||||
ChildCount *IntCriterionInput `json:"child_count"`
|
||||
// Filter by autotag ignore value
|
||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||
}
|
||||
|
||||
type TagReader interface {
|
||||
Find(id int) (*Tag, error)
|
||||
FindMany(ids []int) ([]*Tag, error)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,20 @@
|
|||
package plugin
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
type PluginArgInput struct {
|
||||
Key string `json:"key"`
|
||||
Value *PluginValueInput `json:"value"`
|
||||
}
|
||||
|
||||
func findArg(args []*models.PluginArgInput, name string) *models.PluginArgInput {
|
||||
type PluginValueInput struct {
|
||||
Str *string `json:"str"`
|
||||
I *int `json:"i"`
|
||||
B *bool `json:"b"`
|
||||
F *float64 `json:"f"`
|
||||
O []*PluginArgInput `json:"o"`
|
||||
A []*PluginValueInput `json:"a"`
|
||||
}
|
||||
|
||||
func findArg(args []*PluginArgInput, name string) *PluginArgInput {
|
||||
for _, v := range args {
|
||||
if v.Key == name {
|
||||
return v
|
||||
|
|
@ -14,13 +24,13 @@ func findArg(args []*models.PluginArgInput, name string) *models.PluginArgInput
|
|||
return nil
|
||||
}
|
||||
|
||||
func applyDefaultArgs(args []*models.PluginArgInput, defaultArgs map[string]string) []*models.PluginArgInput {
|
||||
func applyDefaultArgs(args []*PluginArgInput, defaultArgs map[string]string) []*PluginArgInput {
|
||||
for k, v := range defaultArgs {
|
||||
if arg := findArg(args, k); arg == nil {
|
||||
v := v // Copy v, because it's being exported out of the loop
|
||||
args = append(args, &models.PluginArgInput{
|
||||
args = append(args, &PluginArgInput{
|
||||
Key: k,
|
||||
Value: &models.PluginValueInput{
|
||||
Value: &PluginValueInput{
|
||||
Str: &v,
|
||||
},
|
||||
})
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
|
|
@ -59,11 +58,11 @@ type Config struct {
|
|||
Hooks []*HookConfig `yaml:"hooks"`
|
||||
}
|
||||
|
||||
func (c Config) getPluginTasks(includePlugin bool) []*models.PluginTask {
|
||||
var ret []*models.PluginTask
|
||||
func (c Config) getPluginTasks(includePlugin bool) []*PluginTask {
|
||||
var ret []*PluginTask
|
||||
|
||||
for _, o := range c.Tasks {
|
||||
task := &models.PluginTask{
|
||||
task := &PluginTask{
|
||||
Name: o.Name,
|
||||
Description: &o.Description,
|
||||
}
|
||||
|
|
@ -77,11 +76,11 @@ func (c Config) getPluginTasks(includePlugin bool) []*models.PluginTask {
|
|||
return ret
|
||||
}
|
||||
|
||||
func (c Config) getPluginHooks(includePlugin bool) []*models.PluginHook {
|
||||
var ret []*models.PluginHook
|
||||
func (c Config) getPluginHooks(includePlugin bool) []*PluginHook {
|
||||
var ret []*PluginHook
|
||||
|
||||
for _, o := range c.Hooks {
|
||||
hook := &models.PluginHook{
|
||||
hook := &PluginHook{
|
||||
Name: o.Name,
|
||||
Description: &o.Description,
|
||||
Hooks: convertHooks(o.TriggeredBy),
|
||||
|
|
@ -113,8 +112,8 @@ func (c Config) getName() string {
|
|||
return c.id
|
||||
}
|
||||
|
||||
func (c Config) toPlugin() *models.Plugin {
|
||||
return &models.Plugin{
|
||||
func (c Config) toPlugin() *Plugin {
|
||||
return &Plugin{
|
||||
ID: c.id,
|
||||
Name: c.getName(),
|
||||
Description: c.Description,
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
package plugin
|
||||
|
||||
import (
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/plugin/common"
|
||||
)
|
||||
|
||||
func toPluginArgs(args []*models.PluginArgInput) common.ArgsMap {
|
||||
func toPluginArgs(args []*PluginArgInput) common.ArgsMap {
|
||||
ret := make(common.ArgsMap)
|
||||
for _, a := range args {
|
||||
ret[a.Key] = toPluginArgValue(a.Value)
|
||||
|
|
@ -14,7 +13,7 @@ func toPluginArgs(args []*models.PluginArgInput) common.ArgsMap {
|
|||
return ret
|
||||
}
|
||||
|
||||
func toPluginArgValue(arg *models.PluginValueInput) common.PluginArgValue {
|
||||
func toPluginArgValue(arg *PluginValueInput) common.PluginArgValue {
|
||||
if arg == nil {
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,13 @@ import (
|
|||
"github.com/stashapp/stash/pkg/plugin/common"
|
||||
)
|
||||
|
||||
type PluginHook struct {
|
||||
Name string `json:"name"`
|
||||
Description *string `json:"description"`
|
||||
Hooks []string `json:"hooks"`
|
||||
Plugin *Plugin `json:"plugin"`
|
||||
}
|
||||
|
||||
type HookTriggerEnum string
|
||||
|
||||
// Scan-related hooks are current disabled until post-hook execution is
|
||||
|
|
|
|||
|
|
@ -22,6 +22,16 @@ import (
|
|||
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
|
||||
)
|
||||
|
||||
type Plugin struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description *string `json:"description"`
|
||||
URL *string `json:"url"`
|
||||
Version *string `json:"version"`
|
||||
Tasks []*PluginTask `json:"tasks"`
|
||||
Hooks []*PluginHook `json:"hooks"`
|
||||
}
|
||||
|
||||
type ServerConfig interface {
|
||||
GetHost() string
|
||||
GetPort() int
|
||||
|
|
@ -103,8 +113,8 @@ func loadPlugins(path string) ([]Config, error) {
|
|||
}
|
||||
|
||||
// ListPlugins returns plugin details for all of the loaded plugins.
|
||||
func (c Cache) ListPlugins() []*models.Plugin {
|
||||
var ret []*models.Plugin
|
||||
func (c Cache) ListPlugins() []*Plugin {
|
||||
var ret []*Plugin
|
||||
for _, s := range c.plugins {
|
||||
ret = append(ret, s.toPlugin())
|
||||
}
|
||||
|
|
@ -113,8 +123,8 @@ func (c Cache) ListPlugins() []*models.Plugin {
|
|||
}
|
||||
|
||||
// ListPluginTasks returns all runnable plugin tasks in all loaded plugins.
|
||||
func (c Cache) ListPluginTasks() []*models.PluginTask {
|
||||
var ret []*models.PluginTask
|
||||
func (c Cache) ListPluginTasks() []*PluginTask {
|
||||
var ret []*PluginTask
|
||||
for _, s := range c.plugins {
|
||||
ret = append(ret, s.getPluginTasks(true)...)
|
||||
}
|
||||
|
|
@ -122,7 +132,7 @@ func (c Cache) ListPluginTasks() []*models.PluginTask {
|
|||
return ret
|
||||
}
|
||||
|
||||
func buildPluginInput(plugin *Config, operation *OperationConfig, serverConnection common.StashServerConnection, args []*models.PluginArgInput) common.PluginInput {
|
||||
func buildPluginInput(plugin *Config, operation *OperationConfig, serverConnection common.StashServerConnection, args []*PluginArgInput) common.PluginInput {
|
||||
args = applyDefaultArgs(args, operation.DefaultArgs)
|
||||
serverConnection.PluginDir = plugin.getConfigPath()
|
||||
return common.PluginInput{
|
||||
|
|
@ -152,7 +162,7 @@ func (c Cache) makeServerConnection(ctx context.Context) common.StashServerConne
|
|||
// CreateTask runs the plugin operation for the pluginID and operation
|
||||
// name provided. Returns an error if the plugin or the operation could not be
|
||||
// resolved.
|
||||
func (c Cache) CreateTask(ctx context.Context, pluginID string, operationName string, args []*models.PluginArgInput, progress chan float64) (Task, error) {
|
||||
func (c Cache) CreateTask(ctx context.Context, pluginID string, operationName string, args []*PluginArgInput, progress chan float64) (Task, error) {
|
||||
serverConnection := c.makeServerConnection(ctx)
|
||||
|
||||
// find the plugin and operation
|
||||
|
|
|
|||
|
|
@ -6,6 +6,12 @@ import (
|
|||
"github.com/stashapp/stash/pkg/plugin/common"
|
||||
)
|
||||
|
||||
type PluginTask struct {
|
||||
Name string `json:"name"`
|
||||
Description *string `json:"description"`
|
||||
Plugin *Plugin `json:"plugin"`
|
||||
}
|
||||
|
||||
// Task is the interface that handles management of a single plugin task.
|
||||
type Task interface {
|
||||
// Start starts the plugin task. Returns an error if task could not be
|
||||
|
|
|
|||
|
|
@ -25,12 +25,12 @@ func (e scraperAction) IsValid() bool {
|
|||
}
|
||||
|
||||
type scraperActionImpl interface {
|
||||
scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error)
|
||||
scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error)
|
||||
scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error)
|
||||
scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error)
|
||||
scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error)
|
||||
scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error)
|
||||
|
||||
scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error)
|
||||
scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error)
|
||||
scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error)
|
||||
scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error)
|
||||
}
|
||||
|
||||
func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, txnManager models.TransactionManager, globalConfig GlobalConfig) scraperActionImpl {
|
||||
|
|
|
|||
|
|
@ -83,8 +83,8 @@ func autotagMatchTags(path string, tagReader models.TagReader, trimExt bool) ([]
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||
var ret *models.ScrapedScene
|
||||
func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scene *models.Scene) (*ScrapedScene, error) {
|
||||
var ret *ScrapedScene
|
||||
const trimExt = false
|
||||
|
||||
// populate performers, studio and tags based on scene path
|
||||
|
|
@ -105,7 +105,7 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
|
|||
}
|
||||
|
||||
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
||||
ret = &models.ScrapedScene{
|
||||
ret = &ScrapedScene{
|
||||
Performers: performers,
|
||||
Studio: studio,
|
||||
Tags: tags,
|
||||
|
|
@ -120,7 +120,7 @@ func (s autotagScraper) viaScene(ctx context.Context, _client *http.Client, scen
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||
func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error) {
|
||||
if !gallery.Path.Valid {
|
||||
// not valid for non-path-based galleries
|
||||
return nil, nil
|
||||
|
|
@ -129,7 +129,7 @@ func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, ga
|
|||
// only trim extension if gallery is file-based
|
||||
trimExt := gallery.Zip
|
||||
|
||||
var ret *models.ScrapedGallery
|
||||
var ret *ScrapedGallery
|
||||
|
||||
// populate performers, studio and tags based on scene path
|
||||
if err := s.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||
|
|
@ -149,7 +149,7 @@ func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, ga
|
|||
}
|
||||
|
||||
if len(performers) > 0 || studio != nil || len(tags) > 0 {
|
||||
ret = &models.ScrapedGallery{
|
||||
ret = &ScrapedGallery{
|
||||
Performers: performers,
|
||||
Studio: studio,
|
||||
Tags: tags,
|
||||
|
|
@ -164,33 +164,33 @@ func (s autotagScraper) viaGallery(ctx context.Context, _client *http.Client, ga
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (s autotagScraper) supports(ty models.ScrapeContentType) bool {
|
||||
func (s autotagScraper) supports(ty ScrapeContentType) bool {
|
||||
switch ty {
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
return true
|
||||
case models.ScrapeContentTypeGallery:
|
||||
case ScrapeContentTypeGallery:
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s autotagScraper) supportsURL(url string, ty models.ScrapeContentType) bool {
|
||||
func (s autotagScraper) supportsURL(url string, ty ScrapeContentType) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (s autotagScraper) spec() models.Scraper {
|
||||
supportedScrapes := []models.ScrapeType{
|
||||
models.ScrapeTypeFragment,
|
||||
func (s autotagScraper) spec() Scraper {
|
||||
supportedScrapes := []ScrapeType{
|
||||
ScrapeTypeFragment,
|
||||
}
|
||||
|
||||
return models.Scraper{
|
||||
return Scraper{
|
||||
ID: autoTagScraperID,
|
||||
Name: autoTagScraperName,
|
||||
Scene: &models.ScraperSpec{
|
||||
Scene: &ScraperSpec{
|
||||
SupportedScrapes: supportedScrapes,
|
||||
},
|
||||
Gallery: &models.ScraperSpec{
|
||||
Gallery: &ScraperSpec{
|
||||
SupportedScrapes: supportedScrapes,
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -148,8 +148,8 @@ func (c *Cache) ReloadScrapers() error {
|
|||
|
||||
// ListScrapers lists scrapers matching one of the given types.
|
||||
// Returns a list of scrapers, sorted by their ID.
|
||||
func (c Cache) ListScrapers(tys []models.ScrapeContentType) []*models.Scraper {
|
||||
var ret []*models.Scraper
|
||||
func (c Cache) ListScrapers(tys []ScrapeContentType) []*Scraper {
|
||||
var ret []*Scraper
|
||||
for _, s := range c.scrapers {
|
||||
for _, t := range tys {
|
||||
if s.supports(t) {
|
||||
|
|
@ -168,7 +168,7 @@ func (c Cache) ListScrapers(tys []models.ScrapeContentType) []*models.Scraper {
|
|||
}
|
||||
|
||||
// GetScraper returns the scraper matching the provided id.
|
||||
func (c Cache) GetScraper(scraperID string) *models.Scraper {
|
||||
func (c Cache) GetScraper(scraperID string) *Scraper {
|
||||
s := c.findScraper(scraperID)
|
||||
if s != nil {
|
||||
spec := s.spec()
|
||||
|
|
@ -187,7 +187,7 @@ func (c Cache) findScraper(scraperID string) scraper {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c Cache) ScrapeName(ctx context.Context, id, query string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||
func (c Cache) ScrapeName(ctx context.Context, id, query string, ty ScrapeContentType) ([]ScrapedContent, error) {
|
||||
// find scraper with the provided id
|
||||
s := c.findScraper(id)
|
||||
if s == nil {
|
||||
|
|
@ -206,7 +206,7 @@ func (c Cache) ScrapeName(ctx context.Context, id, query string, ty models.Scrap
|
|||
}
|
||||
|
||||
// ScrapeFragment uses the given fragment input to scrape
|
||||
func (c Cache) ScrapeFragment(ctx context.Context, id string, input Input) (models.ScrapedContent, error) {
|
||||
func (c Cache) ScrapeFragment(ctx context.Context, id string, input Input) (ScrapedContent, error) {
|
||||
s := c.findScraper(id)
|
||||
if s == nil {
|
||||
return nil, fmt.Errorf("%w: id %s", ErrNotFound, id)
|
||||
|
|
@ -228,7 +228,7 @@ func (c Cache) ScrapeFragment(ctx context.Context, id string, input Input) (mode
|
|||
// ScrapeURL scrapes a given url for the given content. Searches the scraper cache
|
||||
// and picks the first scraper capable of scraping the given url into the desired
|
||||
// content. Returns the scraped content or an error if the scrape fails.
|
||||
func (c Cache) ScrapeURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||
func (c Cache) ScrapeURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) {
|
||||
for _, s := range c.scrapers {
|
||||
if s.supportsURL(url, ty) {
|
||||
ul, ok := s.(urlScraper)
|
||||
|
|
@ -251,7 +251,7 @@ func (c Cache) ScrapeURL(ctx context.Context, url string, ty models.ScrapeConten
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
func (c Cache) ScrapeID(ctx context.Context, scraperID string, id int, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||
func (c Cache) ScrapeID(ctx context.Context, scraperID string, id int, ty ScrapeContentType) (ScrapedContent, error) {
|
||||
s := c.findScraper(scraperID)
|
||||
if s == nil {
|
||||
return nil, fmt.Errorf("%w: id %s", ErrNotFound, scraperID)
|
||||
|
|
@ -261,9 +261,9 @@ func (c Cache) ScrapeID(ctx context.Context, scraperID string, id int, ty models
|
|||
return nil, fmt.Errorf("%w: cannot use scraper %s to scrape %v content", ErrNotSupported, scraperID, ty)
|
||||
}
|
||||
|
||||
var ret models.ScrapedContent
|
||||
var ret ScrapedContent
|
||||
switch ty {
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
ss, ok := s.(sceneScraper)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("%w: cannot use scraper %s as a scene scraper", ErrNotSupported, scraperID)
|
||||
|
|
@ -284,7 +284,7 @@ func (c Cache) ScrapeID(ctx context.Context, scraperID string, id int, ty models
|
|||
if scraped != nil {
|
||||
ret = scraped
|
||||
}
|
||||
case models.ScrapeContentTypeGallery:
|
||||
case ScrapeContentTypeGallery:
|
||||
gs, ok := s.(galleryScraper)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("%w: cannot use scraper %s as a gallery scraper", ErrNotSupported, scraperID)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
|
|
@ -233,21 +232,21 @@ func loadConfigFromYAMLFile(path string) (*config, error) {
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (c config) spec() models.Scraper {
|
||||
ret := models.Scraper{
|
||||
func (c config) spec() Scraper {
|
||||
ret := Scraper{
|
||||
ID: c.ID,
|
||||
Name: c.Name,
|
||||
}
|
||||
|
||||
performer := models.ScraperSpec{}
|
||||
performer := ScraperSpec{}
|
||||
if c.PerformerByName != nil {
|
||||
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeName)
|
||||
performer.SupportedScrapes = append(performer.SupportedScrapes, ScrapeTypeName)
|
||||
}
|
||||
if c.PerformerByFragment != nil {
|
||||
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeFragment)
|
||||
performer.SupportedScrapes = append(performer.SupportedScrapes, ScrapeTypeFragment)
|
||||
}
|
||||
if len(c.PerformerByURL) > 0 {
|
||||
performer.SupportedScrapes = append(performer.SupportedScrapes, models.ScrapeTypeURL)
|
||||
performer.SupportedScrapes = append(performer.SupportedScrapes, ScrapeTypeURL)
|
||||
for _, v := range c.PerformerByURL {
|
||||
performer.Urls = append(performer.Urls, v.URL...)
|
||||
}
|
||||
|
|
@ -257,15 +256,15 @@ func (c config) spec() models.Scraper {
|
|||
ret.Performer = &performer
|
||||
}
|
||||
|
||||
scene := models.ScraperSpec{}
|
||||
scene := ScraperSpec{}
|
||||
if c.SceneByFragment != nil {
|
||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeFragment)
|
||||
scene.SupportedScrapes = append(scene.SupportedScrapes, ScrapeTypeFragment)
|
||||
}
|
||||
if c.SceneByName != nil && c.SceneByQueryFragment != nil {
|
||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeName)
|
||||
scene.SupportedScrapes = append(scene.SupportedScrapes, ScrapeTypeName)
|
||||
}
|
||||
if len(c.SceneByURL) > 0 {
|
||||
scene.SupportedScrapes = append(scene.SupportedScrapes, models.ScrapeTypeURL)
|
||||
scene.SupportedScrapes = append(scene.SupportedScrapes, ScrapeTypeURL)
|
||||
for _, v := range c.SceneByURL {
|
||||
scene.Urls = append(scene.Urls, v.URL...)
|
||||
}
|
||||
|
|
@ -275,12 +274,12 @@ func (c config) spec() models.Scraper {
|
|||
ret.Scene = &scene
|
||||
}
|
||||
|
||||
gallery := models.ScraperSpec{}
|
||||
gallery := ScraperSpec{}
|
||||
if c.GalleryByFragment != nil {
|
||||
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeFragment)
|
||||
gallery.SupportedScrapes = append(gallery.SupportedScrapes, ScrapeTypeFragment)
|
||||
}
|
||||
if len(c.GalleryByURL) > 0 {
|
||||
gallery.SupportedScrapes = append(gallery.SupportedScrapes, models.ScrapeTypeURL)
|
||||
gallery.SupportedScrapes = append(gallery.SupportedScrapes, ScrapeTypeURL)
|
||||
for _, v := range c.GalleryByURL {
|
||||
gallery.Urls = append(gallery.Urls, v.URL...)
|
||||
}
|
||||
|
|
@ -290,9 +289,9 @@ func (c config) spec() models.Scraper {
|
|||
ret.Gallery = &gallery
|
||||
}
|
||||
|
||||
movie := models.ScraperSpec{}
|
||||
movie := ScraperSpec{}
|
||||
if len(c.MovieByURL) > 0 {
|
||||
movie.SupportedScrapes = append(movie.SupportedScrapes, models.ScrapeTypeURL)
|
||||
movie.SupportedScrapes = append(movie.SupportedScrapes, ScrapeTypeURL)
|
||||
for _, v := range c.MovieByURL {
|
||||
movie.Urls = append(movie.Urls, v.URL...)
|
||||
}
|
||||
|
|
@ -305,42 +304,42 @@ func (c config) spec() models.Scraper {
|
|||
return ret
|
||||
}
|
||||
|
||||
func (c config) supports(ty models.ScrapeContentType) bool {
|
||||
func (c config) supports(ty ScrapeContentType) bool {
|
||||
switch ty {
|
||||
case models.ScrapeContentTypePerformer:
|
||||
case ScrapeContentTypePerformer:
|
||||
return c.PerformerByName != nil || c.PerformerByFragment != nil || len(c.PerformerByURL) > 0
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0
|
||||
case models.ScrapeContentTypeGallery:
|
||||
case ScrapeContentTypeGallery:
|
||||
return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0
|
||||
case models.ScrapeContentTypeMovie:
|
||||
case ScrapeContentTypeMovie:
|
||||
return len(c.MovieByURL) > 0
|
||||
}
|
||||
|
||||
panic("Unhandled ScrapeContentType")
|
||||
}
|
||||
|
||||
func (c config) matchesURL(url string, ty models.ScrapeContentType) bool {
|
||||
func (c config) matchesURL(url string, ty ScrapeContentType) bool {
|
||||
switch ty {
|
||||
case models.ScrapeContentTypePerformer:
|
||||
case ScrapeContentTypePerformer:
|
||||
for _, scraper := range c.PerformerByURL {
|
||||
if scraper.matchesURL(url) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
for _, scraper := range c.SceneByURL {
|
||||
if scraper.matchesURL(url) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
case models.ScrapeContentTypeGallery:
|
||||
case ScrapeContentTypeGallery:
|
||||
for _, scraper := range c.GalleryByURL {
|
||||
if scraper.matchesURL(url) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
case models.ScrapeContentTypeMovie:
|
||||
case ScrapeContentTypeMovie:
|
||||
for _, scraper := range c.MovieByURL {
|
||||
if scraper.matchesURL(url) {
|
||||
return true
|
||||
|
|
|
|||
22
pkg/scraper/gallery.go
Normal file
22
pkg/scraper/gallery.go
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
package scraper
|
||||
|
||||
import "github.com/stashapp/stash/pkg/models"
|
||||
|
||||
type ScrapedGallery struct {
|
||||
Title *string `json:"title"`
|
||||
Details *string `json:"details"`
|
||||
URL *string `json:"url"`
|
||||
Date *string `json:"date"`
|
||||
Studio *models.ScrapedStudio `json:"studio"`
|
||||
Tags []*models.ScrapedTag `json:"tags"`
|
||||
Performers []*models.ScrapedPerformer `json:"performers"`
|
||||
}
|
||||
|
||||
func (ScrapedGallery) IsScrapedContent() {}
|
||||
|
||||
type ScrapedGalleryInput struct {
|
||||
Title *string `json:"title"`
|
||||
Details *string `json:"details"`
|
||||
URL *string `json:"url"`
|
||||
Date *string `json:"date"`
|
||||
}
|
||||
|
|
@ -23,7 +23,7 @@ func newGroupScraper(c config, txnManager models.TransactionManager, globalConfi
|
|||
}
|
||||
}
|
||||
|
||||
func (g group) spec() models.Scraper {
|
||||
func (g group) spec() Scraper {
|
||||
return g.config.spec()
|
||||
}
|
||||
|
||||
|
|
@ -42,14 +42,14 @@ func (g group) fragmentScraper(input Input) *scraperTypeConfig {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (g group) viaFragment(ctx context.Context, client *http.Client, input Input) (models.ScrapedContent, error) {
|
||||
func (g group) viaFragment(ctx context.Context, client *http.Client, input Input) (ScrapedContent, error) {
|
||||
stc := g.fragmentScraper(input)
|
||||
if stc == nil {
|
||||
// If there's no performer fragment scraper in the group, we try to use
|
||||
// the URL scraper. Check if there's an URL in the input, and then shift
|
||||
// to an URL scrape if it's present.
|
||||
if input.Performer != nil && input.Performer.URL != nil && *input.Performer.URL != "" {
|
||||
return g.viaURL(ctx, client, *input.Performer.URL, models.ScrapeContentTypePerformer)
|
||||
return g.viaURL(ctx, client, *input.Performer.URL, ScrapeContentTypePerformer)
|
||||
}
|
||||
|
||||
return nil, ErrNotSupported
|
||||
|
|
@ -59,7 +59,7 @@ func (g group) viaFragment(ctx context.Context, client *http.Client, input Input
|
|||
return s.scrapeByFragment(ctx, input)
|
||||
}
|
||||
|
||||
func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||
func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*ScrapedScene, error) {
|
||||
if g.config.SceneByFragment == nil {
|
||||
return nil, ErrNotSupported
|
||||
}
|
||||
|
|
@ -68,7 +68,7 @@ func (g group) viaScene(ctx context.Context, client *http.Client, scene *models.
|
|||
return s.scrapeSceneByScene(ctx, scene)
|
||||
}
|
||||
|
||||
func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||
func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *models.Gallery) (*ScrapedGallery, error) {
|
||||
if g.config.GalleryByFragment == nil {
|
||||
return nil, ErrNotSupported
|
||||
}
|
||||
|
|
@ -77,22 +77,22 @@ func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *mod
|
|||
return s.scrapeGalleryByGallery(ctx, gallery)
|
||||
}
|
||||
|
||||
func loadUrlCandidates(c config, ty models.ScrapeContentType) []*scrapeByURLConfig {
|
||||
func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig {
|
||||
switch ty {
|
||||
case models.ScrapeContentTypePerformer:
|
||||
case ScrapeContentTypePerformer:
|
||||
return c.PerformerByURL
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
return c.SceneByURL
|
||||
case models.ScrapeContentTypeMovie:
|
||||
case ScrapeContentTypeMovie:
|
||||
return c.MovieByURL
|
||||
case models.ScrapeContentTypeGallery:
|
||||
case ScrapeContentTypeGallery:
|
||||
return c.GalleryByURL
|
||||
}
|
||||
|
||||
panic("loadUrlCandidates: unreachable")
|
||||
}
|
||||
|
||||
func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||
func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty ScrapeContentType) (ScrapedContent, error) {
|
||||
candidates := loadUrlCandidates(g.config, ty)
|
||||
for _, scraper := range candidates {
|
||||
if scraper.matchesURL(url) {
|
||||
|
|
@ -111,16 +111,16 @@ func (g group) viaURL(ctx context.Context, client *http.Client, url string, ty m
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
func (g group) viaName(ctx context.Context, client *http.Client, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||
func (g group) viaName(ctx context.Context, client *http.Client, name string, ty ScrapeContentType) ([]ScrapedContent, error) {
|
||||
switch ty {
|
||||
case models.ScrapeContentTypePerformer:
|
||||
case ScrapeContentTypePerformer:
|
||||
if g.config.PerformerByName == nil {
|
||||
break
|
||||
}
|
||||
|
||||
s := g.config.getScraper(*g.config.PerformerByName, client, g.txnManager, g.globalConf)
|
||||
return s.scrapeByName(ctx, name, ty)
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
if g.config.SceneByName == nil {
|
||||
break
|
||||
}
|
||||
|
|
@ -132,10 +132,10 @@ func (g group) viaName(ctx context.Context, client *http.Client, name string, ty
|
|||
return nil, fmt.Errorf("%w: cannot load %v by name", ErrNotSupported, ty)
|
||||
}
|
||||
|
||||
func (g group) supports(ty models.ScrapeContentType) bool {
|
||||
func (g group) supports(ty ScrapeContentType) bool {
|
||||
return g.config.supports(ty)
|
||||
}
|
||||
|
||||
func (g group) supportsURL(url string, ty models.ScrapeContentType) bool {
|
||||
func (g group) supportsURL(url string, ty ScrapeContentType) bool {
|
||||
return g.config.matchesURL(url, ty)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ func setPerformerImage(ctx context.Context, client *http.Client, p *models.Scrap
|
|||
return nil
|
||||
}
|
||||
|
||||
func setSceneImage(ctx context.Context, client *http.Client, s *models.ScrapedScene, globalConfig GlobalConfig) error {
|
||||
func setSceneImage(ctx context.Context, client *http.Client, s *ScrapedScene, globalConfig GlobalConfig) error {
|
||||
// don't try to get the image if it doesn't appear to be a URL
|
||||
if s.Image == nil || !strings.HasPrefix(*s.Image, "http") {
|
||||
// nothing to do
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) {
|
|||
return docStr, err
|
||||
}
|
||||
|
||||
func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty models.ScrapeContentType) (models.ScrapedContent, error) {
|
||||
func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeContentType) (ScrapedContent, error) {
|
||||
u := replaceURL(url, s.scraper) // allow a URL Replace for url-queries
|
||||
doc, scraper, err := s.scrapeURL(ctx, u)
|
||||
if err != nil {
|
||||
|
|
@ -84,20 +84,20 @@ func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty models.Scr
|
|||
|
||||
q := s.getJsonQuery(doc)
|
||||
switch ty {
|
||||
case models.ScrapeContentTypePerformer:
|
||||
case ScrapeContentTypePerformer:
|
||||
return scraper.scrapePerformer(ctx, q)
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
return scraper.scrapeScene(ctx, q)
|
||||
case models.ScrapeContentTypeGallery:
|
||||
case ScrapeContentTypeGallery:
|
||||
return scraper.scrapeGallery(ctx, q)
|
||||
case models.ScrapeContentTypeMovie:
|
||||
case ScrapeContentTypeMovie:
|
||||
return scraper.scrapeMovie(ctx, q)
|
||||
}
|
||||
|
||||
return nil, ErrNotSupported
|
||||
}
|
||||
|
||||
func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty models.ScrapeContentType) ([]models.ScrapedContent, error) {
|
||||
func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty ScrapeContentType) ([]ScrapedContent, error) {
|
||||
scraper := s.getJsonScraper()
|
||||
|
||||
if scraper == nil {
|
||||
|
|
@ -121,9 +121,9 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty models.S
|
|||
q := s.getJsonQuery(doc)
|
||||
q.setType(SearchQuery)
|
||||
|
||||
var content []models.ScrapedContent
|
||||
var content []ScrapedContent
|
||||
switch ty {
|
||||
case models.ScrapeContentTypePerformer:
|
||||
case ScrapeContentTypePerformer:
|
||||
performers, err := scraper.scrapePerformers(ctx, q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -134,7 +134,7 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty models.S
|
|||
}
|
||||
|
||||
return content, nil
|
||||
case models.ScrapeContentTypeScene:
|
||||
case ScrapeContentTypeScene:
|
||||
scenes, err := scraper.scrapeScenes(ctx, q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -150,7 +150,7 @@ func (s *jsonScraper) scrapeByName(ctx context.Context, name string, ty models.S
|
|||
return nil, ErrNotSupported
|
||||
}
|
||||
|
||||
func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*models.ScrapedScene, error) {
|
||||
func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error) {
|
||||
// construct the URL
|
||||
queryURL := queryURLParametersFromScene(scene)
|
||||
if s.scraper.QueryURLReplacements != nil {
|
||||
|
|
@ -174,7 +174,7 @@ func (s *jsonScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scen
|
|||
return scraper.scrapeScene(ctx, q)
|
||||
}
|
||||
|
||||
func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (models.ScrapedContent, error) {
|
||||
func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) {
|
||||
switch {
|
||||
case input.Gallery != nil:
|
||||
return nil, fmt.Errorf("%w: cannot use a json scraper as a gallery fragment scraper", ErrNotSupported)
|
||||
|
|
@ -209,7 +209,7 @@ func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (models
|
|||
return scraper.scrapeScene(ctx, q)
|
||||
}
|
||||
|
||||
func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*models.ScrapedGallery, error) {
|
||||
func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) {
|
||||
// construct the URL
|
||||
queryURL := queryURLParametersFromGallery(gallery)
|
||||
if s.scraper.QueryURLReplacements != nil {
|
||||
|
|
|
|||
|
|
@ -809,8 +809,8 @@ func (s mappedScraper) scrapePerformers(ctx context.Context, q mappedQuery) ([]*
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (s mappedScraper) processScene(ctx context.Context, q mappedQuery, r mappedResult) *models.ScrapedScene {
|
||||
var ret models.ScrapedScene
|
||||
func (s mappedScraper) processScene(ctx context.Context, q mappedQuery, r mappedResult) *ScrapedScene {
|
||||
var ret ScrapedScene
|
||||
|
||||
sceneScraperConfig := s.Scene
|
||||
|
||||
|
|
@ -884,8 +884,8 @@ func (s mappedScraper) processScene(ctx context.Context, q mappedQuery, r mapped
|
|||
return &ret
|
||||
}
|
||||
|
||||
func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*models.ScrapedScene, error) {
|
||||
var ret []*models.ScrapedScene
|
||||
func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*ScrapedScene, error) {
|
||||
var ret []*ScrapedScene
|
||||
|
||||
sceneScraperConfig := s.Scene
|
||||
sceneMap := sceneScraperConfig.mappedConfig
|
||||
|
|
@ -903,8 +903,8 @@ func (s mappedScraper) scrapeScenes(ctx context.Context, q mappedQuery) ([]*mode
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*models.ScrapedScene, error) {
|
||||
var ret *models.ScrapedScene
|
||||
func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*ScrapedScene, error) {
|
||||
var ret *ScrapedScene
|
||||
|
||||
sceneScraperConfig := s.Scene
|
||||
sceneMap := sceneScraperConfig.mappedConfig
|
||||
|
|
@ -921,8 +921,8 @@ func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*models.
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*models.ScrapedGallery, error) {
|
||||
var ret *models.ScrapedGallery
|
||||
func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*ScrapedGallery, error) {
|
||||
var ret *ScrapedGallery
|
||||
|
||||
galleryScraperConfig := s.Gallery
|
||||
galleryMap := galleryScraperConfig.mappedConfig
|
||||
|
|
@ -937,7 +937,7 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*model
|
|||
logger.Debug(`Processing gallery:`)
|
||||
results := galleryMap.process(ctx, q, s.Common)
|
||||
if len(results) > 0 {
|
||||
ret = &models.ScrapedGallery{}
|
||||
ret = &ScrapedGallery{}
|
||||
|
||||
results[0].apply(ret)
|
||||
|
||||
|
|
|
|||
12
pkg/scraper/movie.go
Normal file
12
pkg/scraper/movie.go
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
package scraper
|
||||
|
||||
type ScrapedMovieInput struct {
|
||||
Name *string `json:"name"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Duration *string `json:"duration"`
|
||||
Date *string `json:"date"`
|
||||
Rating *string `json:"rating"`
|
||||
Director *string `json:"director"`
|
||||
URL *string `json:"url"`
|
||||
Synopsis *string `json:"synopsis"`
|
||||
}
|
||||
27
pkg/scraper/performer.go
Normal file
27
pkg/scraper/performer.go
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
package scraper
|
||||
|
||||
type ScrapedPerformerInput struct {
|
||||
// Set if performer matched
|
||||
StoredID *string `json:"stored_id"`
|
||||
Name *string `json:"name"`
|
||||
Gender *string `json:"gender"`
|
||||
URL *string `json:"url"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
EyeColor *string `json:"eye_color"`
|
||||
Height *string `json:"height"`
|
||||
Measurements *string `json:"measurements"`
|
||||
FakeTits *string `json:"fake_tits"`
|
||||
CareerLength *string `json:"career_length"`
|
||||
Tattoos *string `json:"tattoos"`
|
||||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Details *string `json:"details"`
|
||||
DeathDate *string `json:"death_date"`
|
||||
HairColor *string `json:"hair_color"`
|
||||
Weight *string `json:"weight"`
|
||||
RemoteSiteID *string `json:"remote_site_id"`
|
||||
}
|
||||
|
|
@ -11,7 +11,7 @@ import (
|
|||
// postScrape handles post-processing of scraped content. If the content
|
||||
// requires post-processing, this function fans out to the given content
|
||||
// type and post-processes it.
|
||||
func (c Cache) postScrape(ctx context.Context, content models.ScrapedContent) (models.ScrapedContent, error) {
|
||||
func (c Cache) postScrape(ctx context.Context, content ScrapedContent) (ScrapedContent, error) {
|
||||
// Analyze the concrete type, call the right post-processing function
|
||||
switch v := content.(type) {
|
||||
case *models.ScrapedPerformer:
|
||||
|
|
@ -20,17 +20,17 @@ func (c Cache) postScrape(ctx context.Context, content models.ScrapedContent) (m
|
|||
}
|
||||
case models.ScrapedPerformer:
|
||||
return c.postScrapePerformer(ctx, v)
|
||||
case *models.ScrapedScene:
|
||||
case *ScrapedScene:
|
||||
if v != nil {
|
||||
return c.postScrapeScene(ctx, *v)
|
||||
}
|
||||
case models.ScrapedScene:
|
||||
case ScrapedScene:
|
||||
return c.postScrapeScene(ctx, v)
|
||||
case *models.ScrapedGallery:
|
||||
case *ScrapedGallery:
|
||||
if v != nil {
|
||||
return c.postScrapeGallery(ctx, *v)
|
||||
}
|
||||
case models.ScrapedGallery:
|
||||
case ScrapedGallery:
|
||||
return c.postScrapeGallery(ctx, v)
|
||||
case *models.ScrapedMovie:
|
||||
if v != nil {
|
||||
|
|
@ -44,7 +44,7 @@ func (c Cache) postScrape(ctx context.Context, content models.ScrapedContent) (m
|
|||
return content, nil
|
||||
}
|
||||
|
||||
func (c Cache) postScrapePerformer(ctx context.Context, p models.ScrapedPerformer) (models.ScrapedContent, error) {
|
||||
func (c Cache) postScrapePerformer(ctx context.Context, p models.ScrapedPerformer) (ScrapedContent, error) {
|
||||
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||
tqb := r.Tag()
|
||||
|
||||
|
|
@ -67,7 +67,7 @@ func (c Cache) postScrapePerformer(ctx context.Context, p models.ScrapedPerforme
|
|||
return p, nil
|
||||
}
|
||||
|
||||
func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie) (models.ScrapedContent, error) {
|
||||
func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie) (ScrapedContent, error) {
|
||||
if m.Studio != nil {
|
||||
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||
return match.ScrapedStudio(r.Studio(), m.Studio, nil)
|
||||
|
|
@ -105,7 +105,7 @@ func (c Cache) postScrapeScenePerformer(ctx context.Context, p models.ScrapedPer
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c Cache) postScrapeScene(ctx context.Context, scene models.ScrapedScene) (models.ScrapedContent, error) {
|
||||
func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene) (ScrapedContent, error) {
|
||||
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||
pqb := r.Performer()
|
||||
mqb := r.Movie()
|
||||
|
|
@ -159,7 +159,7 @@ func (c Cache) postScrapeScene(ctx context.Context, scene models.ScrapedScene) (
|
|||
return scene, nil
|
||||
}
|
||||
|
||||
func (c Cache) postScrapeGallery(ctx context.Context, g models.ScrapedGallery) (models.ScrapedContent, error) {
|
||||
func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery) (ScrapedContent, error) {
|
||||
if err := c.txnManager.WithReadTxn(ctx, func(r models.ReaderRepository) error {
|
||||
pqb := r.Performer()
|
||||
tqb := r.Tag()
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue