[Files Refactor] Import export fixup (#2763)

* Adjust json schema
* Remove mappings file from export
* Import file/folder support
* Update documentation
* Make gallery filenames unique
This commit is contained in:
WithoutPants 2022-08-30 12:17:15 +10:00
parent 1222b7b87b
commit 0b534d89c6
35 changed files with 3315 additions and 3146 deletions

View file

@ -0,0 +1,259 @@
package manager
import (
"context"
"errors"
"fmt"
"path/filepath"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models/jsonschema"
)
// HACK: this is all here because of an import loop in jsonschema -> models -> file
var errZipFileNotExist = errors.New("zip file does not exist")
type fileFolderImporter struct {
ReaderWriter file.Store
FolderStore file.FolderStore
Input jsonschema.DirEntry
file file.File
folder *file.Folder
}
func (i *fileFolderImporter) PreImport(ctx context.Context) error {
var err error
switch ff := i.Input.(type) {
case *jsonschema.BaseDirEntry:
i.folder, err = i.folderJSONToFolder(ctx, ff)
default:
i.file, err = i.fileJSONToFile(ctx, i.Input)
}
return err
}
func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) {
path := filepath.FromSlash(baseJSON.Path)
ret := file.Folder{
DirEntry: file.DirEntry{
ModTime: baseJSON.ModTime.GetTime(),
},
Path: path,
CreatedAt: baseJSON.CreatedAt.GetTime(),
UpdatedAt: baseJSON.CreatedAt.GetTime(),
}
if err := i.populateZipFileID(ctx, &ret.DirEntry); err != nil {
return nil, err
}
// set parent folder id during the creation process
return &ret, nil
}
func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (file.File, error) {
switch ff := fileJSON.(type) {
case *jsonschema.VideoFile:
baseFile, err := i.baseFileJSONToBaseFile(ctx, ff.BaseFile)
if err != nil {
return nil, err
}
return &file.VideoFile{
BaseFile: baseFile,
Format: ff.Format,
Width: ff.Width,
Height: ff.Height,
Duration: ff.Duration,
VideoCodec: ff.VideoCodec,
AudioCodec: ff.AudioCodec,
FrameRate: ff.FrameRate,
BitRate: ff.BitRate,
Interactive: ff.Interactive,
InteractiveSpeed: ff.InteractiveSpeed,
}, nil
case *jsonschema.ImageFile:
baseFile, err := i.baseFileJSONToBaseFile(ctx, ff.BaseFile)
if err != nil {
return nil, err
}
return &file.ImageFile{
BaseFile: baseFile,
Format: ff.Format,
Width: ff.Width,
Height: ff.Height,
}, nil
case *jsonschema.BaseFile:
return i.baseFileJSONToBaseFile(ctx, ff)
}
return nil, fmt.Errorf("unknown file type")
}
func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) {
path := filepath.FromSlash(baseJSON.Path)
baseFile := file.BaseFile{
DirEntry: file.DirEntry{
ModTime: baseJSON.ModTime.GetTime(),
},
Basename: filepath.Base(path),
Size: baseJSON.Size,
CreatedAt: baseJSON.CreatedAt.GetTime(),
UpdatedAt: baseJSON.CreatedAt.GetTime(),
}
for _, fp := range baseJSON.Fingerprints {
baseFile.Fingerprints = append(baseFile.Fingerprints, file.Fingerprint{
Type: fp.Type,
Fingerprint: fp.Fingerprint,
})
}
if err := i.populateZipFileID(ctx, &baseFile.DirEntry); err != nil {
return nil, err
}
return &baseFile, nil
}
func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error {
zipFilePath := filepath.FromSlash(i.Input.DirEntry().ZipFile)
if zipFilePath != "" {
zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath)
if err != nil {
return fmt.Errorf("error finding file by path %q: %v", zipFilePath, err)
}
if zf == nil {
return errZipFileNotExist
}
id := zf.Base().ID
f.ZipFileID = &id
}
return nil
}
func (i *fileFolderImporter) PostImport(ctx context.Context, id int) error {
return nil
}
func (i *fileFolderImporter) Name() string {
return filepath.FromSlash(i.Input.DirEntry().Path)
}
func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) {
path := filepath.FromSlash(i.Input.DirEntry().Path)
existing, err := i.ReaderWriter.FindByPath(ctx, path)
if err != nil {
return nil, err
}
if existing != nil {
id := int(existing.Base().ID)
return &id, nil
}
return nil, nil
}
func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string) (*file.Folder, error) {
parentPath := filepath.Dir(p)
if parentPath == "." || parentPath == string(filepath.Separator) {
// get or create this folder
return i.getOrCreateFolder(ctx, p, nil)
}
parent, err := i.createFolderHierarchy(ctx, parentPath)
if err != nil {
return nil, err
}
return i.getOrCreateFolder(ctx, p, parent)
}
func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, parent *file.Folder) (*file.Folder, error) {
folder, err := i.FolderStore.FindByPath(ctx, path)
if err != nil {
return nil, err
}
if folder != nil {
return folder, nil
}
now := time.Now()
folder = &file.Folder{
Path: path,
CreatedAt: now,
UpdatedAt: now,
}
if parent != nil {
folder.ZipFileID = parent.ZipFileID
folder.ParentFolderID = &parent.ID
}
if err := i.FolderStore.Create(ctx, folder); err != nil {
return nil, err
}
return folder, nil
}
func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) {
// create folder hierarchy and set parent folder id
path := filepath.FromSlash(i.Input.DirEntry().Path)
path = filepath.Dir(path)
folder, err := i.createFolderHierarchy(ctx, path)
if err != nil {
return nil, fmt.Errorf("creating folder hierarchy for %q: %w", path, err)
}
if i.folder != nil {
return i.createFolder(ctx, folder)
}
return i.createFile(ctx, folder)
}
func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file.Folder) (*int, error) {
if parentFolder != nil {
i.file.Base().ParentFolderID = parentFolder.ID
}
if err := i.ReaderWriter.Create(ctx, i.file); err != nil {
return nil, fmt.Errorf("error creating file: %w", err)
}
id := int(i.file.Base().ID)
return &id, nil
}
func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *file.Folder) (*int, error) {
if parentFolder != nil {
i.folder.ParentFolderID = &parentFolder.ID
}
if err := i.FolderStore.Create(ctx, i.folder); err != nil {
return nil, fmt.Errorf("error creating folder: %w", err)
}
id := int(i.folder.ID)
return &id, nil
}
func (i *fileFolderImporter) Update(ctx context.Context, id int) error {
// update not supported
return nil
}

View file

@ -1,6 +1,8 @@
package manager package manager
import ( import (
"path/filepath"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/models/paths"
) )
@ -9,14 +11,6 @@ type jsonUtils struct {
json paths.JSONPaths json paths.JSONPaths
} }
func (jp *jsonUtils) getMappings() (*jsonschema.Mappings, error) {
return jsonschema.LoadMappingsFile(jp.json.MappingsFile)
}
func (jp *jsonUtils) saveMappings(mappings *jsonschema.Mappings) error {
return jsonschema.SaveMappingsFile(jp.json.MappingsFile, mappings)
}
func (jp *jsonUtils) getScraped() ([]jsonschema.ScrapedItem, error) { func (jp *jsonUtils) getScraped() ([]jsonschema.ScrapedItem, error) {
return jsonschema.LoadScrapedFile(jp.json.ScrapedFile) return jsonschema.LoadScrapedFile(jp.json.ScrapedFile)
} }
@ -25,58 +19,34 @@ func (jp *jsonUtils) saveScaped(scraped []jsonschema.ScrapedItem) error {
return jsonschema.SaveScrapedFile(jp.json.ScrapedFile, scraped) return jsonschema.SaveScrapedFile(jp.json.ScrapedFile, scraped)
} }
func (jp *jsonUtils) getPerformer(checksum string) (*jsonschema.Performer, error) { func (jp *jsonUtils) savePerformer(fn string, performer *jsonschema.Performer) error {
return jsonschema.LoadPerformerFile(jp.json.PerformerJSONPath(checksum)) return jsonschema.SavePerformerFile(filepath.Join(jp.json.Performers, fn), performer)
} }
func (jp *jsonUtils) savePerformer(checksum string, performer *jsonschema.Performer) error { func (jp *jsonUtils) saveStudio(fn string, studio *jsonschema.Studio) error {
return jsonschema.SavePerformerFile(jp.json.PerformerJSONPath(checksum), performer) return jsonschema.SaveStudioFile(filepath.Join(jp.json.Studios, fn), studio)
} }
func (jp *jsonUtils) getStudio(checksum string) (*jsonschema.Studio, error) { func (jp *jsonUtils) saveTag(fn string, tag *jsonschema.Tag) error {
return jsonschema.LoadStudioFile(jp.json.StudioJSONPath(checksum)) return jsonschema.SaveTagFile(filepath.Join(jp.json.Tags, fn), tag)
} }
func (jp *jsonUtils) saveStudio(checksum string, studio *jsonschema.Studio) error { func (jp *jsonUtils) saveMovie(fn string, movie *jsonschema.Movie) error {
return jsonschema.SaveStudioFile(jp.json.StudioJSONPath(checksum), studio) return jsonschema.SaveMovieFile(filepath.Join(jp.json.Movies, fn), movie)
} }
func (jp *jsonUtils) getTag(checksum string) (*jsonschema.Tag, error) { func (jp *jsonUtils) saveScene(fn string, scene *jsonschema.Scene) error {
return jsonschema.LoadTagFile(jp.json.TagJSONPath(checksum)) return jsonschema.SaveSceneFile(filepath.Join(jp.json.Scenes, fn), scene)
} }
func (jp *jsonUtils) saveTag(checksum string, tag *jsonschema.Tag) error { func (jp *jsonUtils) saveImage(fn string, image *jsonschema.Image) error {
return jsonschema.SaveTagFile(jp.json.TagJSONPath(checksum), tag) return jsonschema.SaveImageFile(filepath.Join(jp.json.Images, fn), image)
} }
func (jp *jsonUtils) getMovie(checksum string) (*jsonschema.Movie, error) { func (jp *jsonUtils) saveGallery(fn string, gallery *jsonschema.Gallery) error {
return jsonschema.LoadMovieFile(jp.json.MovieJSONPath(checksum)) return jsonschema.SaveGalleryFile(filepath.Join(jp.json.Galleries, fn), gallery)
} }
func (jp *jsonUtils) saveMovie(checksum string, movie *jsonschema.Movie) error { func (jp *jsonUtils) saveFile(fn string, file jsonschema.DirEntry) error {
return jsonschema.SaveMovieFile(jp.json.MovieJSONPath(checksum), movie) return jsonschema.SaveFileFile(filepath.Join(jp.json.Files, fn), file)
}
func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) {
return jsonschema.LoadSceneFile(jp.json.SceneJSONPath(checksum))
}
func (jp *jsonUtils) saveScene(checksum string, scene *jsonschema.Scene) error {
return jsonschema.SaveSceneFile(jp.json.SceneJSONPath(checksum), scene)
}
func (jp *jsonUtils) getImage(checksum string) (*jsonschema.Image, error) {
return jsonschema.LoadImageFile(jp.json.ImageJSONPath(checksum))
}
func (jp *jsonUtils) saveImage(checksum string, image *jsonschema.Image) error {
return jsonschema.SaveImageFile(jp.json.ImageJSONPath(checksum), image)
}
func (jp *jsonUtils) getGallery(checksum string) (*jsonschema.Gallery, error) {
return jsonschema.LoadGalleryFile(jp.json.GalleryJSONPath(checksum))
}
func (jp *jsonUtils) saveGallery(checksum string, gallery *jsonschema.Gallery) error {
return jsonschema.SaveGalleryFile(jp.json.GalleryJSONPath(checksum), gallery)
} }

View file

@ -20,6 +20,7 @@ type ImageReaderWriter interface {
type GalleryReaderWriter interface { type GalleryReaderWriter interface {
models.GalleryReaderWriter models.GalleryReaderWriter
gallery.FinderCreatorUpdater gallery.FinderCreatorUpdater
gallery.Finder
} }
type SceneReaderWriter interface { type SceneReaderWriter interface {

View file

@ -8,13 +8,14 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"runtime" "runtime"
"strconv"
"sync" "sync"
"time" "time"
"github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
@ -38,7 +39,6 @@ type ExportTask struct {
baseDir string baseDir string
json jsonUtils json jsonUtils
Mappings *jsonschema.Mappings
fileNamingAlgorithm models.HashAlgorithm fileNamingAlgorithm models.HashAlgorithm
scenes *exportSpec scenes *exportSpec
@ -118,8 +118,6 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) {
// @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count + Movie.count // @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count + Movie.count
workerCount := runtime.GOMAXPROCS(0) // set worker count to number of cpus available workerCount := runtime.GOMAXPROCS(0) // set worker count to number of cpus available
t.Mappings = &jsonschema.Mappings{}
startTime := time.Now() startTime := time.Now()
if t.full { if t.full {
@ -140,10 +138,16 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) {
}() }()
} }
if t.baseDir == "" {
logger.Errorf("baseDir must not be empty")
return
}
t.json = jsonUtils{ t.json = jsonUtils{
json: *paths.GetJSONPaths(t.baseDir), json: *paths.GetJSONPaths(t.baseDir),
} }
paths.EmptyJSONDirs(t.baseDir)
paths.EnsureJSONDirs(t.baseDir) paths.EnsureJSONDirs(t.baseDir)
txnErr := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { txnErr := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
@ -180,10 +184,6 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) {
logger.Warnf("error while running export transaction: %v", txnErr) logger.Warnf("error while running export transaction: %v", txnErr)
} }
if err := t.json.saveMappings(t.Mappings); err != nil {
logger.Errorf("[mappings] failed to save json: %s", err.Error())
}
if !t.full { if !t.full {
err := t.generateDownload() err := t.generateDownload()
if err != nil { if err != nil {
@ -226,12 +226,6 @@ func (t *ExportTask) zipFiles(w io.Writer) error {
json: *paths.GetJSONPaths(""), json: *paths.GetJSONPaths(""),
} }
// write the mappings file
err := t.zipFile(t.json.json.MappingsFile, "", z)
if err != nil {
return err
}
walkWarn(t.json.json.Tags, t.zipWalkFunc(u.json.Tags, z)) walkWarn(t.json.json.Tags, t.zipWalkFunc(u.json.Tags, z))
walkWarn(t.json.json.Galleries, t.zipWalkFunc(u.json.Galleries, z)) walkWarn(t.json.json.Galleries, t.zipWalkFunc(u.json.Galleries, z))
walkWarn(t.json.json.Performers, t.zipWalkFunc(u.json.Performers, z)) walkWarn(t.json.json.Performers, t.zipWalkFunc(u.json.Performers, z))
@ -380,7 +374,6 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Reposit
if (i % 100) == 0 { // make progress easier to read if (i % 100) == 0 { // make progress easier to read
logger.Progressf("[scenes] %d of %d", index, len(scenes)) logger.Progressf("[scenes] %d of %d", index, len(scenes))
} }
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathNameMapping{Path: scene.Path(), Checksum: scene.GetHash(t.fileNamingAlgorithm)})
jobCh <- scene // feed workers jobCh <- scene // feed workers
} }
@ -390,6 +383,96 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Reposit
logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers) logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers)
} }
func exportFile(f file.File, t *ExportTask) {
newFileJSON := fileToJSON(f)
fn := newFileJSON.Filename()
if err := t.json.saveFile(fn, newFileJSON); err != nil {
logger.Errorf("[files] <%s> failed to save json: %s", fn, err.Error())
}
}
func fileToJSON(f file.File) jsonschema.DirEntry {
bf := f.Base()
base := jsonschema.BaseFile{
BaseDirEntry: jsonschema.BaseDirEntry{
Type: jsonschema.DirEntryTypeFile,
ModTime: json.JSONTime{Time: bf.ModTime},
Path: filepath.ToSlash(bf.Path),
CreatedAt: json.JSONTime{Time: bf.CreatedAt},
UpdatedAt: json.JSONTime{Time: bf.UpdatedAt},
},
Size: bf.Size,
}
if bf.ZipFile != nil {
base.ZipFile = bf.ZipFile.Base().Path
}
for _, fp := range bf.Fingerprints {
base.Fingerprints = append(base.Fingerprints, jsonschema.Fingerprint{
Type: fp.Type,
Fingerprint: fp.Fingerprint,
})
}
switch ff := f.(type) {
case *file.VideoFile:
base.Type = jsonschema.DirEntryTypeVideo
return jsonschema.VideoFile{
BaseFile: &base,
Format: ff.Format,
Width: ff.Width,
Height: ff.Height,
Duration: ff.Duration,
VideoCodec: ff.VideoCodec,
AudioCodec: ff.AudioCodec,
FrameRate: ff.FrameRate,
BitRate: ff.BitRate,
Interactive: ff.Interactive,
InteractiveSpeed: ff.InteractiveSpeed,
}
case *file.ImageFile:
base.Type = jsonschema.DirEntryTypeImage
return jsonschema.ImageFile{
BaseFile: &base,
Format: ff.Format,
Width: ff.Width,
Height: ff.Height,
}
}
return &base
}
func exportFolder(f file.Folder, t *ExportTask) {
newFileJSON := folderToJSON(f)
fn := newFileJSON.Filename()
if err := t.json.saveFile(fn, newFileJSON); err != nil {
logger.Errorf("[files] <%s> failed to save json: %s", fn, err.Error())
}
}
func folderToJSON(f file.Folder) jsonschema.DirEntry {
base := jsonschema.BaseDirEntry{
Type: jsonschema.DirEntryTypeFolder,
ModTime: json.JSONTime{Time: f.ModTime},
Path: filepath.ToSlash(f.Path),
CreatedAt: json.JSONTime{Time: f.CreatedAt},
UpdatedAt: json.JSONTime{Time: f.UpdatedAt},
}
if f.ZipFile != nil {
base.ZipFile = f.ZipFile.Base().Path
}
return &base
}
func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo Repository, t *ExportTask) { func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo Repository, t *ExportTask) {
defer wg.Done() defer wg.Done()
sceneReader := repo.Scene sceneReader := repo.Scene
@ -413,6 +496,11 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
continue continue
} }
// export files
for _, f := range s.Files {
exportFile(f, t)
}
newSceneJSON.Studio, err = scene.GetStudioName(ctx, studioReader, s) newSceneJSON.Studio, err = scene.GetStudioName(ctx, studioReader, s)
if err != nil { if err != nil {
logger.Errorf("[scenes] <%s> error getting scene studio name: %s", sceneHash, err.Error()) logger.Errorf("[scenes] <%s> error getting scene studio name: %s", sceneHash, err.Error())
@ -425,7 +513,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
continue continue
} }
newSceneJSON.Galleries = gallery.GetChecksums(galleries) newSceneJSON.Galleries = gallery.GetRefs(galleries)
performers, err := performerReader.FindBySceneID(ctx, s.ID) performers, err := performerReader.FindBySceneID(ctx, s.ID)
if err != nil { if err != nil {
@ -477,12 +565,17 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
} }
sceneJSON, err := t.json.getScene(sceneHash) pf := s.PrimaryFile()
if err == nil && jsonschema.CompareJSON(*sceneJSON, *newSceneJSON) { basename := ""
continue hash := ""
if pf != nil {
basename = pf.Basename
hash = s.OSHash()
} }
if err := t.json.saveScene(sceneHash, newSceneJSON); err != nil { fn := newSceneJSON.Filename(basename, hash)
if err := t.json.saveScene(fn, newSceneJSON); err != nil {
logger.Errorf("[scenes] <%s> failed to save json: %s", sceneHash, err.Error()) logger.Errorf("[scenes] <%s> failed to save json: %s", sceneHash, err.Error())
} }
} }
@ -522,7 +615,6 @@ func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo Reposit
if (i % 100) == 0 { // make progress easier to read if (i % 100) == 0 { // make progress easier to read
logger.Progressf("[images] %d of %d", index, len(images)) logger.Progressf("[images] %d of %d", index, len(images))
} }
t.Mappings.Images = append(t.Mappings.Images, jsonschema.PathNameMapping{Path: image.Path(), Checksum: image.Checksum()})
jobCh <- image // feed workers jobCh <- image // feed workers
} }
@ -544,6 +636,11 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
newImageJSON := image.ToBasicJSON(s) newImageJSON := image.ToBasicJSON(s)
// export files
for _, f := range s.Files {
exportFile(f, t)
}
var err error var err error
newImageJSON.Studio, err = image.GetStudioName(ctx, studioReader, s) newImageJSON.Studio, err = image.GetStudioName(ctx, studioReader, s)
if err != nil { if err != nil {
@ -557,7 +654,7 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
continue continue
} }
newImageJSON.Galleries = t.getGalleryChecksums(imageGalleries) newImageJSON.Galleries = gallery.GetRefs(imageGalleries)
performers, err := performerReader.FindByImageID(ctx, s.ID) performers, err := performerReader.FindByImageID(ctx, s.ID)
if err != nil { if err != nil {
@ -585,24 +682,22 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
} }
imageJSON, err := t.json.getImage(imageHash) pf := s.PrimaryFile()
if err == nil && jsonschema.CompareJSON(*imageJSON, *newImageJSON) { basename := ""
continue hash := ""
if pf != nil {
basename = pf.Basename
hash = s.Checksum()
} }
if err := t.json.saveImage(imageHash, newImageJSON); err != nil { fn := newImageJSON.Filename(basename, hash)
if err := t.json.saveImage(fn, newImageJSON); err != nil {
logger.Errorf("[images] <%s> failed to save json: %s", imageHash, err.Error()) logger.Errorf("[images] <%s> failed to save json: %s", imageHash, err.Error())
} }
} }
} }
func (t *ExportTask) getGalleryChecksums(galleries []*models.Gallery) (ret []string) {
for _, g := range galleries {
ret = append(ret, g.Checksum())
}
return
}
func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo Repository) { func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo Repository) {
var galleriesWg sync.WaitGroup var galleriesWg sync.WaitGroup
@ -638,14 +733,6 @@ func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo Repo
logger.Progressf("[galleries] %d of %d", index, len(galleries)) logger.Progressf("[galleries] %d of %d", index, len(galleries))
} }
title := gallery.Title
path := gallery.Path()
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathNameMapping{
Path: path,
Name: title,
Checksum: gallery.Checksum(),
})
jobCh <- gallery jobCh <- gallery
} }
@ -670,6 +757,27 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
continue continue
} }
// export files
for _, f := range g.Files {
exportFile(f, t)
}
// export folder if necessary
if g.FolderID != nil {
folder, err := repo.Folder.Find(ctx, *g.FolderID)
if err != nil {
logger.Errorf("[galleries] <%s> error getting gallery folder: %v", galleryHash, err)
continue
}
if folder == nil {
logger.Errorf("[galleries] <%s> unable to find gallery folder", galleryHash)
continue
}
exportFolder(*folder, t)
}
newGalleryJSON.Studio, err = gallery.GetStudioName(ctx, studioReader, g) newGalleryJSON.Studio, err = gallery.GetStudioName(ctx, studioReader, g)
if err != nil { if err != nil {
logger.Errorf("[galleries] <%s> error getting gallery studio name: %s", galleryHash, err.Error()) logger.Errorf("[galleries] <%s> error getting gallery studio name: %s", galleryHash, err.Error())
@ -701,12 +809,23 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers)) t.performers.IDs = intslice.IntAppendUniques(t.performers.IDs, performer.GetIDs(performers))
} }
galleryJSON, err := t.json.getGallery(galleryHash) pf := g.PrimaryFile()
if err == nil && jsonschema.CompareJSON(*galleryJSON, *newGalleryJSON) { basename := ""
continue // use id in case multiple galleries with the same basename
hash := strconv.Itoa(g.ID)
switch {
case pf != nil:
basename = pf.Base().Basename
case g.FolderPath != "":
basename = filepath.Base(g.FolderPath)
default:
basename = g.Title
} }
if err := t.json.saveGallery(galleryHash, newGalleryJSON); err != nil { fn := newGalleryJSON.Filename(basename, hash)
if err := t.json.saveGallery(fn, newGalleryJSON); err != nil {
logger.Errorf("[galleries] <%s> failed to save json: %s", galleryHash, err.Error()) logger.Errorf("[galleries] <%s> failed to save json: %s", galleryHash, err.Error())
} }
} }
@ -742,7 +861,6 @@ func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo Rep
index := i + 1 index := i + 1
logger.Progressf("[performers] %d of %d", index, len(performers)) logger.Progressf("[performers] %d of %d", index, len(performers))
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.PathNameMapping{Name: performer.Name.String, Checksum: performer.Checksum})
jobCh <- performer // feed workers jobCh <- performer // feed workers
} }
@ -777,14 +895,9 @@ func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jo
t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags)) t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags))
} }
performerJSON, err := t.json.getPerformer(p.Checksum) fn := newPerformerJSON.Filename()
if err != nil {
logger.Debugf("[performers] error reading performer json: %s", err.Error())
} else if jsonschema.CompareJSON(*performerJSON, *newPerformerJSON) {
continue
}
if err := t.json.savePerformer(p.Checksum, newPerformerJSON); err != nil { if err := t.json.savePerformer(fn, newPerformerJSON); err != nil {
logger.Errorf("[performers] <%s> failed to save json: %s", p.Checksum, err.Error()) logger.Errorf("[performers] <%s> failed to save json: %s", p.Checksum, err.Error())
} }
} }
@ -821,7 +934,6 @@ func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo Reposi
index := i + 1 index := i + 1
logger.Progressf("[studios] %d of %d", index, len(studios)) logger.Progressf("[studios] %d of %d", index, len(studios))
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.PathNameMapping{Name: studio.Name.String, Checksum: studio.Checksum})
jobCh <- studio // feed workers jobCh <- studio // feed workers
} }
@ -844,12 +956,9 @@ func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobCh
continue continue
} }
studioJSON, err := t.json.getStudio(s.Checksum) fn := newStudioJSON.Filename()
if err == nil && jsonschema.CompareJSON(*studioJSON, *newStudioJSON) {
continue
}
if err := t.json.saveStudio(s.Checksum, newStudioJSON); err != nil { if err := t.json.saveStudio(fn, newStudioJSON); err != nil {
logger.Errorf("[studios] <%s> failed to save json: %s", s.Checksum, err.Error()) logger.Errorf("[studios] <%s> failed to save json: %s", s.Checksum, err.Error())
} }
} }
@ -886,10 +995,6 @@ func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo Repositor
index := i + 1 index := i + 1
logger.Progressf("[tags] %d of %d", index, len(tags)) logger.Progressf("[tags] %d of %d", index, len(tags))
// generate checksum on the fly by name, since we don't store it
checksum := md5.FromString(tag.Name)
t.Mappings.Tags = append(t.Mappings.Tags, jsonschema.PathNameMapping{Name: tag.Name, Checksum: checksum})
jobCh <- tag // feed workers jobCh <- tag // feed workers
} }
@ -912,16 +1017,10 @@ func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan
continue continue
} }
// generate checksum on the fly by name, since we don't store it fn := newTagJSON.Filename()
checksum := md5.FromString(thisTag.Name)
tagJSON, err := t.json.getTag(checksum) if err := t.json.saveTag(fn, newTagJSON); err != nil {
if err == nil && jsonschema.CompareJSON(*tagJSON, *newTagJSON) { logger.Errorf("[tags] <%s> failed to save json: %s", fn, err.Error())
continue
}
if err := t.json.saveTag(checksum, newTagJSON); err != nil {
logger.Errorf("[tags] <%s> failed to save json: %s", checksum, err.Error())
} }
} }
} }
@ -957,7 +1056,6 @@ func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo Reposit
index := i + 1 index := i + 1
logger.Progressf("[movies] %d of %d", index, len(movies)) logger.Progressf("[movies] %d of %d", index, len(movies))
t.Mappings.Movies = append(t.Mappings.Movies, jsonschema.PathNameMapping{Name: movie.Name.String, Checksum: movie.Checksum})
jobCh <- movie // feed workers jobCh <- movie // feed workers
} }
@ -987,15 +1085,10 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
} }
} }
movieJSON, err := t.json.getMovie(m.Checksum) fn := newMovieJSON.Filename()
if err != nil {
logger.Debugf("[movies] error reading movie json: %s", err.Error())
} else if jsonschema.CompareJSON(*movieJSON, *newMovieJSON) {
continue
}
if err := t.json.saveMovie(m.Checksum, newMovieJSON); err != nil { if err := t.json.saveMovie(fn, newMovieJSON); err != nil {
logger.Errorf("[movies] <%s> failed to save json: %s", m.Checksum, err.Error()) logger.Errorf("[movies] <%s> failed to save json: %s", fn, err.Error())
} }
} }
} }

View file

@ -7,6 +7,7 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"time" "time"
@ -37,7 +38,6 @@ type ImportTask struct {
DuplicateBehaviour ImportDuplicateEnum DuplicateBehaviour ImportDuplicateEnum
MissingRefBehaviour models.ImportMissingRefEnum MissingRefBehaviour models.ImportMissingRefEnum
mappings *jsonschema.Mappings
scraped []jsonschema.ScrapedItem scraped []jsonschema.ScrapedItem
fileNamingAlgorithm models.HashAlgorithm fileNamingAlgorithm models.HashAlgorithm
} }
@ -112,11 +112,6 @@ func (t *ImportTask) Start(ctx context.Context) {
t.MissingRefBehaviour = models.ImportMissingRefEnumFail t.MissingRefBehaviour = models.ImportMissingRefEnumFail
} }
t.mappings, _ = t.json.getMappings()
if t.mappings == nil {
logger.Error("missing mappings json")
return
}
scraped, _ := t.json.getScraped() scraped, _ := t.json.getScraped()
if scraped == nil { if scraped == nil {
logger.Warn("missing scraped json") logger.Warn("missing scraped json")
@ -136,6 +131,7 @@ func (t *ImportTask) Start(ctx context.Context) {
t.ImportPerformers(ctx) t.ImportPerformers(ctx)
t.ImportStudios(ctx) t.ImportStudios(ctx)
t.ImportMovies(ctx) t.ImportMovies(ctx)
t.ImportFiles(ctx)
t.ImportGalleries(ctx) t.ImportGalleries(ctx)
t.ImportScrapedItems(ctx) t.ImportScrapedItems(ctx)
@ -199,15 +195,25 @@ func (t *ImportTask) unzipFile() error {
func (t *ImportTask) ImportPerformers(ctx context.Context) { func (t *ImportTask) ImportPerformers(ctx context.Context) {
logger.Info("[performers] importing") logger.Info("[performers] importing")
for i, mappingJSON := range t.mappings.Performers { path := t.json.json.Performers
files, err := ioutil.ReadDir(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[performers] failed to read performers directory: %v", err)
}
return
}
for i, fi := range files {
index := i + 1 index := i + 1
performerJSON, err := t.json.getPerformer(mappingJSON.Checksum) performerJSON, err := jsonschema.LoadPerformerFile(filepath.Join(path, fi.Name()))
if err != nil { if err != nil {
logger.Errorf("[performers] failed to read json: %s", err.Error()) logger.Errorf("[performers] failed to read json: %s", err.Error())
continue continue
} }
logger.Progressf("[performers] %d of %d", index, len(t.mappings.Performers)) logger.Progressf("[performers] %d of %d", index, len(files))
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := t.txnManager r := t.txnManager
@ -220,7 +226,7 @@ func (t *ImportTask) ImportPerformers(ctx context.Context) {
return performImport(ctx, importer, t.DuplicateBehaviour) return performImport(ctx, importer, t.DuplicateBehaviour)
}); err != nil { }); err != nil {
logger.Errorf("[performers] <%s> import failed: %s", mappingJSON.Checksum, err.Error()) logger.Errorf("[performers] <%s> import failed: %s", fi.Name(), err.Error())
} }
} }
@ -232,15 +238,25 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
logger.Info("[studios] importing") logger.Info("[studios] importing")
for i, mappingJSON := range t.mappings.Studios { path := t.json.json.Studios
files, err := ioutil.ReadDir(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[studios] failed to read studios directory: %v", err)
}
return
}
for i, fi := range files {
index := i + 1 index := i + 1
studioJSON, err := t.json.getStudio(mappingJSON.Checksum) studioJSON, err := jsonschema.LoadStudioFile(filepath.Join(path, fi.Name()))
if err != nil { if err != nil {
logger.Errorf("[studios] failed to read json: %s", err.Error()) logger.Errorf("[studios] failed to read json: %s", err.Error())
continue continue
} }
logger.Progressf("[studios] %d of %d", index, len(t.mappings.Studios)) logger.Progressf("[studios] %d of %d", index, len(files))
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
return t.ImportStudio(ctx, studioJSON, pendingParent, t.txnManager.Studio) return t.ImportStudio(ctx, studioJSON, pendingParent, t.txnManager.Studio)
@ -253,7 +269,7 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
continue continue
} }
logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error()) logger.Errorf("[studios] <%s> failed to create: %s", fi.Name(), err.Error())
continue continue
} }
} }
@ -311,15 +327,25 @@ func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.St
func (t *ImportTask) ImportMovies(ctx context.Context) { func (t *ImportTask) ImportMovies(ctx context.Context) {
logger.Info("[movies] importing") logger.Info("[movies] importing")
for i, mappingJSON := range t.mappings.Movies { path := t.json.json.Movies
files, err := ioutil.ReadDir(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[movies] failed to read movies directory: %v", err)
}
return
}
for i, fi := range files {
index := i + 1 index := i + 1
movieJSON, err := t.json.getMovie(mappingJSON.Checksum) movieJSON, err := jsonschema.LoadMovieFile(filepath.Join(path, fi.Name()))
if err != nil { if err != nil {
logger.Errorf("[movies] failed to read json: %s", err.Error()) logger.Errorf("[movies] failed to read json: %s", err.Error())
continue continue
} }
logger.Progressf("[movies] %d of %d", index, len(t.mappings.Movies)) logger.Progressf("[movies] %d of %d", index, len(files))
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := t.txnManager r := t.txnManager
@ -335,7 +361,7 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
return performImport(ctx, movieImporter, t.DuplicateBehaviour) return performImport(ctx, movieImporter, t.DuplicateBehaviour)
}); err != nil { }); err != nil {
logger.Errorf("[movies] <%s> import failed: %s", mappingJSON.Checksum, err.Error()) logger.Errorf("[movies] <%s> import failed: %s", fi.Name(), err.Error())
continue continue
} }
} }
@ -343,18 +369,118 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
logger.Info("[movies] import complete") logger.Info("[movies] import complete")
} }
func (t *ImportTask) ImportFiles(ctx context.Context) {
logger.Info("[files] importing")
path := t.json.json.Files
files, err := ioutil.ReadDir(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[files] failed to read files directory: %v", err)
}
return
}
pendingParent := make(map[string][]jsonschema.DirEntry)
for i, fi := range files {
index := i + 1
fileJSON, err := jsonschema.LoadFileFile(filepath.Join(path, fi.Name()))
if err != nil {
logger.Errorf("[files] failed to read json: %s", err.Error())
continue
}
logger.Progressf("[files] %d of %d", index, len(files))
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
return t.ImportFile(ctx, fileJSON, pendingParent)
}); err != nil {
if errors.Is(err, errZipFileNotExist) {
// add to the pending parent list so that it is created after the parent
s := pendingParent[fileJSON.DirEntry().ZipFile]
s = append(s, fileJSON)
pendingParent[fileJSON.DirEntry().ZipFile] = s
continue
}
logger.Errorf("[files] <%s> failed to create: %s", fi.Name(), err.Error())
continue
}
}
// create the leftover studios, warning for missing parents
if len(pendingParent) > 0 {
logger.Warnf("[files] importing files with missing zip files")
for _, s := range pendingParent {
for _, orphanFileJSON := range s {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
return t.ImportFile(ctx, orphanFileJSON, nil)
}); err != nil {
logger.Errorf("[files] <%s> failed to create: %s", orphanFileJSON.DirEntry().Path, err.Error())
continue
}
}
}
}
logger.Info("[files] import complete")
}
func (t *ImportTask) ImportFile(ctx context.Context, fileJSON jsonschema.DirEntry, pendingParent map[string][]jsonschema.DirEntry) error {
r := t.txnManager
readerWriter := r.File
fileImporter := &fileFolderImporter{
ReaderWriter: readerWriter,
FolderStore: r.Folder,
Input: fileJSON,
}
// ignore duplicate files - don't overwrite
if err := performImport(ctx, fileImporter, ImportDuplicateEnumIgnore); err != nil {
return err
}
// now create the files pending this file's creation
s := pendingParent[fileJSON.DirEntry().Path]
for _, childFileJSON := range s {
// map is nil since we're not checking parent studios at this point
if err := t.ImportFile(ctx, childFileJSON, nil); err != nil {
return fmt.Errorf("failed to create child file <%s>: %s", childFileJSON.DirEntry().Path, err.Error())
}
}
// delete the entry from the map so that we know its not left over
delete(pendingParent, fileJSON.DirEntry().Path)
return nil
}
func (t *ImportTask) ImportGalleries(ctx context.Context) { func (t *ImportTask) ImportGalleries(ctx context.Context) {
logger.Info("[galleries] importing") logger.Info("[galleries] importing")
for i, mappingJSON := range t.mappings.Galleries { path := t.json.json.Galleries
files, err := ioutil.ReadDir(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[galleries] failed to read galleries directory: %v", err)
}
return
}
for i, fi := range files {
index := i + 1 index := i + 1
galleryJSON, err := t.json.getGallery(mappingJSON.Checksum) galleryJSON, err := jsonschema.LoadGalleryFile(filepath.Join(path, fi.Name()))
if err != nil { if err != nil {
logger.Errorf("[galleries] failed to read json: %s", err.Error()) logger.Errorf("[galleries] failed to read json: %s", err.Error())
continue continue
} }
logger.Progressf("[galleries] %d of %d", index, len(t.mappings.Galleries)) logger.Progressf("[galleries] %d of %d", index, len(files))
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := t.txnManager r := t.txnManager
@ -365,6 +491,8 @@ func (t *ImportTask) ImportGalleries(ctx context.Context) {
galleryImporter := &gallery.Importer{ galleryImporter := &gallery.Importer{
ReaderWriter: readerWriter, ReaderWriter: readerWriter,
FolderFinder: r.Folder,
FileFinder: r.File,
PerformerWriter: performerWriter, PerformerWriter: performerWriter,
StudioWriter: studioWriter, StudioWriter: studioWriter,
TagWriter: tagWriter, TagWriter: tagWriter,
@ -374,7 +502,7 @@ func (t *ImportTask) ImportGalleries(ctx context.Context) {
return performImport(ctx, galleryImporter, t.DuplicateBehaviour) return performImport(ctx, galleryImporter, t.DuplicateBehaviour)
}); err != nil { }); err != nil {
logger.Errorf("[galleries] <%s> import failed to commit: %s", mappingJSON.Checksum, err.Error()) logger.Errorf("[galleries] <%s> import failed to commit: %s", fi.Name(), err.Error())
continue continue
} }
} }
@ -386,15 +514,25 @@ func (t *ImportTask) ImportTags(ctx context.Context) {
pendingParent := make(map[string][]*jsonschema.Tag) pendingParent := make(map[string][]*jsonschema.Tag)
logger.Info("[tags] importing") logger.Info("[tags] importing")
for i, mappingJSON := range t.mappings.Tags { path := t.json.json.Tags
files, err := ioutil.ReadDir(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[tags] failed to read tags directory: %v", err)
}
return
}
for i, fi := range files {
index := i + 1 index := i + 1
tagJSON, err := t.json.getTag(mappingJSON.Checksum) tagJSON, err := jsonschema.LoadTagFile(filepath.Join(path, fi.Name()))
if err != nil { if err != nil {
logger.Errorf("[tags] failed to read json: %s", err.Error()) logger.Errorf("[tags] failed to read json: %s", err.Error())
continue continue
} }
logger.Progressf("[tags] %d of %d", index, len(t.mappings.Tags)) logger.Progressf("[tags] %d of %d", index, len(files))
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
return t.ImportTag(ctx, tagJSON, pendingParent, false, t.txnManager.Tag) return t.ImportTag(ctx, tagJSON, pendingParent, false, t.txnManager.Tag)
@ -405,7 +543,7 @@ func (t *ImportTask) ImportTags(ctx context.Context) {
continue continue
} }
logger.Errorf("[tags] <%s> failed to import: %s", mappingJSON.Checksum, err.Error()) logger.Errorf("[tags] <%s> failed to import: %s", fi.Name(), err.Error())
continue continue
} }
} }
@ -467,7 +605,7 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
for i, mappingJSON := range t.scraped { for i, mappingJSON := range t.scraped {
index := i + 1 index := i + 1
logger.Progressf("[scraped sites] %d of %d", index, len(t.mappings.Scenes)) logger.Progressf("[scraped sites] %d of %d", index, len(t.scraped))
newScrapedItem := models.ScrapedItem{ newScrapedItem := models.ScrapedItem{
Title: sql.NullString{String: mappingJSON.Title, Valid: true}, Title: sql.NullString{String: mappingJSON.Title, Valid: true},
@ -511,18 +649,26 @@ func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
func (t *ImportTask) ImportScenes(ctx context.Context) { func (t *ImportTask) ImportScenes(ctx context.Context) {
logger.Info("[scenes] importing") logger.Info("[scenes] importing")
for i, mappingJSON := range t.mappings.Scenes { path := t.json.json.Scenes
index := i + 1 files, err := ioutil.ReadDir(path)
if err != nil {
logger.Progressf("[scenes] %d of %d", index, len(t.mappings.Scenes)) if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[scenes] failed to read scenes directory: %v", err)
sceneJSON, err := t.json.getScene(mappingJSON.Checksum)
if err != nil {
logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
continue
} }
sceneHash := mappingJSON.Checksum return
}
for i, fi := range files {
index := i + 1
logger.Progressf("[scenes] %d of %d", index, len(files))
sceneJSON, err := jsonschema.LoadSceneFile(filepath.Join(path, fi.Name()))
if err != nil {
logger.Infof("[scenes] <%s> json parse failure: %s", fi.Name(), err.Error())
continue
}
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := t.txnManager r := t.txnManager
@ -537,12 +683,12 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
sceneImporter := &scene.Importer{ sceneImporter := &scene.Importer{
ReaderWriter: readerWriter, ReaderWriter: readerWriter,
Input: *sceneJSON, Input: *sceneJSON,
Path: mappingJSON.Path, FileFinder: r.File,
FileNamingAlgorithm: t.fileNamingAlgorithm, FileNamingAlgorithm: t.fileNamingAlgorithm,
MissingRefBehaviour: t.MissingRefBehaviour, MissingRefBehaviour: t.MissingRefBehaviour,
GalleryWriter: galleryWriter, GalleryFinder: galleryWriter,
MovieWriter: movieWriter, MovieWriter: movieWriter,
PerformerWriter: performerWriter, PerformerWriter: performerWriter,
StudioWriter: studioWriter, StudioWriter: studioWriter,
@ -570,7 +716,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
return nil return nil
}); err != nil { }); err != nil {
logger.Errorf("[scenes] <%s> import failed: %s", sceneHash, err.Error()) logger.Errorf("[scenes] <%s> import failed: %s", fi.Name(), err.Error())
} }
} }
@ -580,18 +726,26 @@ func (t *ImportTask) ImportScenes(ctx context.Context) {
func (t *ImportTask) ImportImages(ctx context.Context) { func (t *ImportTask) ImportImages(ctx context.Context) {
logger.Info("[images] importing") logger.Info("[images] importing")
for i, mappingJSON := range t.mappings.Images { path := t.json.json.Images
index := i + 1 files, err := ioutil.ReadDir(path)
if err != nil {
logger.Progressf("[images] %d of %d", index, len(t.mappings.Images)) if !errors.Is(err, os.ErrNotExist) {
logger.Errorf("[images] failed to read images directory: %v", err)
imageJSON, err := t.json.getImage(mappingJSON.Checksum)
if err != nil {
logger.Infof("[images] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
continue
} }
imageHash := mappingJSON.Checksum return
}
for i, fi := range files {
index := i + 1
logger.Progressf("[images] %d of %d", index, len(files))
imageJSON, err := jsonschema.LoadImageFile(filepath.Join(path, fi.Name()))
if err != nil {
logger.Infof("[images] <%s> json parse failure: %s", fi.Name(), err.Error())
continue
}
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := t.txnManager r := t.txnManager
@ -603,12 +757,12 @@ func (t *ImportTask) ImportImages(ctx context.Context) {
imageImporter := &image.Importer{ imageImporter := &image.Importer{
ReaderWriter: readerWriter, ReaderWriter: readerWriter,
FileFinder: r.File,
Input: *imageJSON, Input: *imageJSON,
Path: mappingJSON.Path,
MissingRefBehaviour: t.MissingRefBehaviour, MissingRefBehaviour: t.MissingRefBehaviour,
GalleryWriter: galleryWriter, GalleryFinder: galleryWriter,
PerformerWriter: performerWriter, PerformerWriter: performerWriter,
StudioWriter: studioWriter, StudioWriter: studioWriter,
TagWriter: tagWriter, TagWriter: tagWriter,
@ -616,7 +770,7 @@ func (t *ImportTask) ImportImages(ctx context.Context) {
return performImport(ctx, imageImporter, t.DuplicateBehaviour) return performImport(ctx, imageImporter, t.DuplicateBehaviour)
}); err != nil { }); err != nil {
logger.Errorf("[images] <%s> import failed: %s", imageHash, err.Error()) logger.Errorf("[images] <%s> import failed: %s", fi.Name(), err.Error())
} }
} }

View file

@ -13,14 +13,17 @@ import (
// does not convert the relationships to other objects. // does not convert the relationships to other objects.
func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
newGalleryJSON := jsonschema.Gallery{ newGalleryJSON := jsonschema.Gallery{
Title: gallery.Title, FolderPath: gallery.FolderPath,
URL: gallery.URL, Title: gallery.Title,
Details: gallery.Details, URL: gallery.URL,
CreatedAt: json.JSONTime{Time: gallery.CreatedAt}, Details: gallery.Details,
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt}, CreatedAt: json.JSONTime{Time: gallery.CreatedAt},
UpdatedAt: json.JSONTime{Time: gallery.UpdatedAt},
} }
newGalleryJSON.Path = gallery.Path() for _, f := range gallery.Files {
newGalleryJSON.ZipFiles = append(newGalleryJSON.ZipFiles, f.Base().Path)
}
if gallery.Date != nil { if gallery.Date != nil {
newGalleryJSON.Date = gallery.Date.String() newGalleryJSON.Date = gallery.Date.String()
@ -61,12 +64,22 @@ func GetIDs(galleries []*models.Gallery) []int {
return results return results
} }
func GetChecksums(galleries []*models.Gallery) []string { func GetRefs(galleries []*models.Gallery) []jsonschema.GalleryRef {
var results []string var results []jsonschema.GalleryRef
for _, gallery := range galleries { for _, gallery := range galleries {
if gallery.Checksum() != "" { toAdd := jsonschema.GalleryRef{}
results = append(results, gallery.Checksum()) switch {
case gallery.FolderPath != "":
toAdd.FolderPath = gallery.FolderPath
case len(gallery.Files) > 0:
for _, f := range gallery.Files {
toAdd.ZipFiles = append(toAdd.ZipFiles, f.Base().Path)
}
default:
toAdd.Title = gallery.Title
} }
results = append(results, toAdd)
} }
return results return results

View file

@ -1,171 +1,162 @@
package gallery package gallery
// import ( import (
// "errors" "errors"
// "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
// "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
// "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
// "github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
// "github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
// "testing" "testing"
// "time" "time"
// ) )
// const ( const (
// galleryID = 1 galleryID = 1
// studioID = 4 studioID = 4
// missingStudioID = 5 missingStudioID = 5
// errStudioID = 6 errStudioID = 6
// // noTagsID = 11 // noTagsID = 11
// ) )
// var ( var (
// path = "path" url = "url"
// isZip = true title = "title"
// url = "url" date = "2001-01-01"
// checksum = "checksum" dateObj = models.NewDate(date)
// title = "title" rating = 5
// date = "2001-01-01" organized = true
// dateObj = models.NewDate(date) details = "details"
// rating = 5 )
// organized = true
// details = "details"
// )
// const ( const (
// studioName = "studioName" studioName = "studioName"
// ) )
// var ( var (
// createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
// updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
// ) )
// func createFullGallery(id int) models.Gallery { func createFullGallery(id int) models.Gallery {
// return models.Gallery{ return models.Gallery{
// ID: id, ID: id,
// Path: &path, Title: title,
// Zip: isZip, Date: &dateObj,
// Title: title, Details: details,
// Checksum: checksum, Rating: &rating,
// Date: &dateObj, Organized: organized,
// Details: details, URL: url,
// Rating: &rating, CreatedAt: createTime,
// Organized: organized, UpdatedAt: updateTime,
// URL: url, }
// CreatedAt: createTime, }
// UpdatedAt: updateTime,
// }
// }
// func createFullJSONGallery() *jsonschema.Gallery { func createFullJSONGallery() *jsonschema.Gallery {
// return &jsonschema.Gallery{ return &jsonschema.Gallery{
// Title: title, Title: title,
// Path: path, Date: date,
// Zip: isZip, Details: details,
// Checksum: checksum, Rating: rating,
// Date: date, Organized: organized,
// Details: details, URL: url,
// Rating: rating, CreatedAt: json.JSONTime{
// Organized: organized, Time: createTime,
// URL: url, },
// CreatedAt: json.JSONTime{ UpdatedAt: json.JSONTime{
// Time: createTime, Time: updateTime,
// }, },
// UpdatedAt: json.JSONTime{ }
// Time: updateTime, }
// },
// }
// }
// type basicTestScenario struct { type basicTestScenario struct {
// input models.Gallery input models.Gallery
// expected *jsonschema.Gallery expected *jsonschema.Gallery
// err bool err bool
// } }
// var scenarios = []basicTestScenario{ var scenarios = []basicTestScenario{
// { {
// createFullGallery(galleryID), createFullGallery(galleryID),
// createFullJSONGallery(), createFullJSONGallery(),
// false, false,
// }, },
// } }
// func TestToJSON(t *testing.T) { func TestToJSON(t *testing.T) {
// for i, s := range scenarios { for i, s := range scenarios {
// gallery := s.input gallery := s.input
// json, err := ToBasicJSON(&gallery) json, err := ToBasicJSON(&gallery)
// switch { switch {
// case !s.err && err != nil: case !s.err && err != nil:
// t.Errorf("[%d] unexpected error: %s", i, err.Error()) t.Errorf("[%d] unexpected error: %s", i, err.Error())
// case s.err && err == nil: case s.err && err == nil:
// t.Errorf("[%d] expected error not returned", i) t.Errorf("[%d] expected error not returned", i)
// default: default:
// assert.Equal(t, s.expected, json, "[%d]", i) assert.Equal(t, s.expected, json, "[%d]", i)
// } }
// } }
// } }
// func createStudioGallery(studioID int) models.Gallery { func createStudioGallery(studioID int) models.Gallery {
// return models.Gallery{ return models.Gallery{
// StudioID: &studioID, StudioID: &studioID,
// } }
// } }
// type stringTestScenario struct { type stringTestScenario struct {
// input models.Gallery input models.Gallery
// expected string expected string
// err bool err bool
// } }
// var getStudioScenarios = []stringTestScenario{ var getStudioScenarios = []stringTestScenario{
// { {
// createStudioGallery(studioID), createStudioGallery(studioID),
// studioName, studioName,
// false, false,
// }, },
// { {
// createStudioGallery(missingStudioID), createStudioGallery(missingStudioID),
// "", "",
// false, false,
// }, },
// { {
// createStudioGallery(errStudioID), createStudioGallery(errStudioID),
// "", "",
// true, true,
// }, },
// } }
// func TestGetStudioName(t *testing.T) { func TestGetStudioName(t *testing.T) {
// mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
// studioErr := errors.New("error getting image") studioErr := errors.New("error getting image")
// mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{ mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
// Name: models.NullString(studioName), Name: models.NullString(studioName),
// }, nil).Once() }, nil).Once()
// mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once() mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
// mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once() mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
// for i, s := range getStudioScenarios { for i, s := range getStudioScenarios {
// gallery := s.input gallery := s.input
// json, err := GetStudioName(testCtx, mockStudioReader, &gallery) json, err := GetStudioName(testCtx, mockStudioReader, &gallery)
// switch { switch {
// case !s.err && err != nil: case !s.err && err != nil:
// t.Errorf("[%d] unexpected error: %s", i, err.Error()) t.Errorf("[%d] unexpected error: %s", i, err.Error())
// case s.err && err == nil: case s.err && err == nil:
// t.Errorf("[%d] expected error not returned", i) t.Errorf("[%d] expected error not returned", i)
// default: default:
// assert.Equal(t, s.expected, json, "[%d]", i) assert.Equal(t, s.expected, json, "[%d]", i)
// } }
// } }
// mockStudioReader.AssertExpectations(t) mockStudioReader.AssertExpectations(t)
// } }

View file

@ -3,8 +3,10 @@ package gallery
import ( import (
"context" "context"
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/performer"
@ -18,6 +20,8 @@ type Importer struct {
StudioWriter studio.NameFinderCreator StudioWriter studio.NameFinderCreator
PerformerWriter performer.NameFinderCreator PerformerWriter performer.NameFinderCreator
TagWriter tag.NameFinderCreator TagWriter tag.NameFinderCreator
FileFinder file.Getter
FolderFinder file.FolderGetter
Input jsonschema.Gallery Input jsonschema.Gallery
MissingRefBehaviour models.ImportMissingRefEnum MissingRefBehaviour models.ImportMissingRefEnum
@ -32,6 +36,10 @@ type FullCreatorUpdater interface {
func (i *Importer) PreImport(ctx context.Context) error { func (i *Importer) PreImport(ctx context.Context) error {
i.gallery = i.galleryJSONToGallery(i.Input) i.gallery = i.galleryJSONToGallery(i.Input)
if err := i.populateFilesFolder(ctx); err != nil {
return err
}
if err := i.populateStudio(ctx); err != nil { if err := i.populateStudio(ctx); err != nil {
return err return err
} }
@ -238,31 +246,97 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta
return ret, nil return ret, nil
} }
func (i *Importer) populateFilesFolder(ctx context.Context) error {
for _, ref := range i.Input.ZipFiles {
path := filepath.FromSlash(ref)
f, err := i.FileFinder.FindByPath(ctx, path)
if err != nil {
return fmt.Errorf("error finding file: %w", err)
}
if f == nil {
return fmt.Errorf("gallery zip file '%s' not found", path)
} else {
i.gallery.Files = append(i.gallery.Files, f)
}
}
if i.Input.FolderPath != "" {
path := filepath.FromSlash(i.Input.FolderPath)
f, err := i.FolderFinder.FindByPath(ctx, path)
if err != nil {
return fmt.Errorf("error finding folder: %w", err)
}
if f == nil {
return fmt.Errorf("gallery folder '%s' not found", path)
} else {
i.gallery.FolderID = &f.ID
}
}
return nil
}
func (i *Importer) PostImport(ctx context.Context, id int) error { func (i *Importer) PostImport(ctx context.Context, id int) error {
return nil return nil
} }
func (i *Importer) Name() string { func (i *Importer) Name() string {
return i.Input.Path if i.Input.Title != "" {
return i.Input.Title
}
if i.Input.FolderPath != "" {
return i.Input.FolderPath
}
if len(i.Input.ZipFiles) > 0 {
return i.Input.ZipFiles[0]
}
return ""
} }
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
// TODO var existing []*models.Gallery
// existing, err := i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum) var err error
// if err != nil { switch {
// return nil, err case len(i.gallery.Files) > 0:
// } for _, f := range i.gallery.Files {
existing, err := i.ReaderWriter.FindByFileID(ctx, f.Base().ID)
if err != nil {
return nil, err
}
// if existing != nil { if existing != nil {
// id := existing.ID break
// return &id, nil }
// } }
case i.gallery.FolderID != nil:
existing, err = i.ReaderWriter.FindByFolderID(ctx, *i.gallery.FolderID)
default:
existing, err = i.ReaderWriter.FindUserGalleryByTitle(ctx, i.gallery.Title)
}
if err != nil {
return nil, err
}
if len(existing) > 0 {
id := existing[0].ID
return &id, nil
}
return nil, nil return nil, nil
} }
func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Create(ctx context.Context) (*int, error) {
err := i.ReaderWriter.Create(ctx, &i.gallery, nil) var fileIDs []file.ID
for _, f := range i.gallery.Files {
fileIDs = append(fileIDs, f.Base().ID)
}
err := i.ReaderWriter.Create(ctx, &i.gallery, fileIDs)
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating gallery: %v", err) return nil, fmt.Errorf("error creating gallery: %v", err)
} }

View file

@ -1,441 +1,322 @@
package gallery package gallery
// import ( import (
// "context" "context"
// "errors" "errors"
// "testing" "testing"
// "time" "time"
// "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
// "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
// "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
// "github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
// "github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
// "github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
// ) )
// var ( var (
// galleryNameErr = "galleryNameErr" existingStudioID = 101
// // existingGalleryName = "existingGalleryName" existingPerformerID = 103
existingTagID = 105
// existingGalleryID = 100
// existingStudioID = 101 existingStudioName = "existingStudioName"
// existingPerformerID = 103 existingStudioErr = "existingStudioErr"
// existingTagID = 105 missingStudioName = "missingStudioName"
// existingStudioName = "existingStudioName" existingPerformerName = "existingPerformerName"
// existingStudioErr = "existingStudioErr" existingPerformerErr = "existingPerformerErr"
// missingStudioName = "missingStudioName" missingPerformerName = "missingPerformerName"
// existingPerformerName = "existingPerformerName" existingTagName = "existingTagName"
// existingPerformerErr = "existingPerformerErr" existingTagErr = "existingTagErr"
// missingPerformerName = "missingPerformerName" missingTagName = "missingTagName"
)
// existingTagName = "existingTagName"
// existingTagErr = "existingTagErr" var testCtx = context.Background()
// missingTagName = "missingTagName"
var (
// missingChecksum = "missingChecksum" createdAt = time.Date(2001, time.January, 2, 1, 2, 3, 4, time.Local)
// errChecksum = "errChecksum" updatedAt = time.Date(2002, time.January, 2, 1, 2, 3, 4, time.Local)
// ) )
// var testCtx = context.Background() func TestImporterPreImport(t *testing.T) {
i := Importer{
// var ( Input: jsonschema.Gallery{
// createdAt = time.Date(2001, time.January, 2, 1, 2, 3, 4, time.Local) Title: title,
// updatedAt = time.Date(2002, time.January, 2, 1, 2, 3, 4, time.Local) Date: date,
// ) Details: details,
Rating: rating,
// func TestImporterName(t *testing.T) { Organized: organized,
// i := Importer{ URL: url,
// Input: jsonschema.Gallery{ CreatedAt: json.JSONTime{
// Path: path, Time: createdAt,
// }, },
// } UpdatedAt: json.JSONTime{
Time: updatedAt,
// assert.Equal(t, path, i.Name()) },
// } },
}
// func TestImporterPreImport(t *testing.T) {
// i := Importer{ err := i.PreImport(testCtx)
// Input: jsonschema.Gallery{ assert.Nil(t, err)
// Path: path,
// Checksum: checksum, expectedGallery := models.Gallery{
// Title: title, Title: title,
// Date: date, Date: &dateObj,
// Details: details, Details: details,
// Rating: rating, Rating: &rating,
// Organized: organized, Organized: organized,
// URL: url, URL: url,
// CreatedAt: json.JSONTime{ TagIDs: models.NewRelatedIDs([]int{}),
// Time: createdAt, PerformerIDs: models.NewRelatedIDs([]int{}),
// }, CreatedAt: createdAt,
// UpdatedAt: json.JSONTime{ UpdatedAt: updatedAt,
// Time: updatedAt, }
// },
// }, assert.Equal(t, expectedGallery, i.gallery)
// } }
// err := i.PreImport(testCtx) func TestImporterPreImportWithStudio(t *testing.T) {
// assert.Nil(t, err) studioReaderWriter := &mocks.StudioReaderWriter{}
// expectedGallery := models.Gallery{ i := Importer{
// Path: &path, StudioWriter: studioReaderWriter,
// Checksum: checksum, Input: jsonschema.Gallery{
// Title: title, Studio: existingStudioName,
// Date: &dateObj, },
// Details: details, }
// Rating: &rating,
// Organized: organized, studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
// URL: url, ID: existingStudioID,
// CreatedAt: createdAt, }, nil).Once()
// UpdatedAt: updatedAt, studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
// }
err := i.PreImport(testCtx)
// assert.Equal(t, expectedGallery, i.gallery) assert.Nil(t, err)
// } assert.Equal(t, existingStudioID, *i.gallery.StudioID)
// func TestImporterPreImportWithStudio(t *testing.T) { i.Input.Studio = existingStudioErr
// studioReaderWriter := &mocks.StudioReaderWriter{} err = i.PreImport(testCtx)
assert.NotNil(t, err)
// i := Importer{
// StudioWriter: studioReaderWriter, studioReaderWriter.AssertExpectations(t)
// Input: jsonschema.Gallery{ }
// Studio: existingStudioName,
// Path: path, func TestImporterPreImportWithMissingStudio(t *testing.T) {
// }, studioReaderWriter := &mocks.StudioReaderWriter{}
// }
i := Importer{
// studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{ StudioWriter: studioReaderWriter,
// ID: existingStudioID, Input: jsonschema.Gallery{
// }, nil).Once() Studio: missingStudioName,
// studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once() },
MissingRefBehaviour: models.ImportMissingRefEnumFail,
// err := i.PreImport(testCtx) }
// assert.Nil(t, err)
// assert.Equal(t, existingStudioID, *i.gallery.StudioID) studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
// i.Input.Studio = existingStudioErr ID: existingStudioID,
// err = i.PreImport(testCtx) }, nil)
// assert.NotNil(t, err)
err := i.PreImport(testCtx)
// studioReaderWriter.AssertExpectations(t) assert.NotNil(t, err)
// }
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
// func TestImporterPreImportWithMissingStudio(t *testing.T) { err = i.PreImport(testCtx)
// studioReaderWriter := &mocks.StudioReaderWriter{} assert.Nil(t, err)
// i := Importer{ i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// StudioWriter: studioReaderWriter, err = i.PreImport(testCtx)
// Input: jsonschema.Gallery{ assert.Nil(t, err)
// Path: path, assert.Equal(t, existingStudioID, *i.gallery.StudioID)
// Studio: missingStudioName,
// }, studioReaderWriter.AssertExpectations(t)
// MissingRefBehaviour: models.ImportMissingRefEnumFail, }
// }
func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) studioReaderWriter := &mocks.StudioReaderWriter{}
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
// ID: existingStudioID, i := Importer{
// }, nil) StudioWriter: studioReaderWriter,
Input: jsonschema.Gallery{
// err := i.PreImport(testCtx) Studio: missingStudioName,
// assert.NotNil(t, err) },
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore }
// err = i.PreImport(testCtx)
// assert.Nil(t, err) studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// err = i.PreImport(testCtx) err := i.PreImport(testCtx)
// assert.Nil(t, err) assert.NotNil(t, err)
// assert.Equal(t, existingStudioID, *i.gallery.StudioID) }
// studioReaderWriter.AssertExpectations(t) func TestImporterPreImportWithPerformer(t *testing.T) {
// } performerReaderWriter := &mocks.PerformerReaderWriter{}
// func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { i := Importer{
// studioReaderWriter := &mocks.StudioReaderWriter{} PerformerWriter: performerReaderWriter,
MissingRefBehaviour: models.ImportMissingRefEnumFail,
// i := Importer{ Input: jsonschema.Gallery{
// StudioWriter: studioReaderWriter, Performers: []string{
// Input: jsonschema.Gallery{ existingPerformerName,
// Path: path, },
// Studio: missingStudioName, },
// }, }
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
// } performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
{
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() ID: existingPerformerID,
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error")) Name: models.NullString(existingPerformerName),
},
// err := i.PreImport(testCtx) }, nil).Once()
// assert.NotNil(t, err) performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
// }
err := i.PreImport(testCtx)
// func TestImporterPreImportWithPerformer(t *testing.T) { assert.Nil(t, err)
// performerReaderWriter := &mocks.PerformerReaderWriter{} assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs.List())
// i := Importer{ i.Input.Performers = []string{existingPerformerErr}
// PerformerWriter: performerReaderWriter, err = i.PreImport(testCtx)
// MissingRefBehaviour: models.ImportMissingRefEnumFail, assert.NotNil(t, err)
// Input: jsonschema.Gallery{
// Path: path, performerReaderWriter.AssertExpectations(t)
// Performers: []string{ }
// existingPerformerName,
// }, func TestImporterPreImportWithMissingPerformer(t *testing.T) {
// }, performerReaderWriter := &mocks.PerformerReaderWriter{}
// }
i := Importer{
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{ PerformerWriter: performerReaderWriter,
// { Input: jsonschema.Gallery{
// ID: existingPerformerID, Performers: []string{
// Name: models.NullString(existingPerformerName), missingPerformerName,
// }, },
// }, nil).Once() },
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once() MissingRefBehaviour: models.ImportMissingRefEnumFail,
}
// err := i.PreImport(testCtx)
// assert.Nil(t, err) performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
// assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs) performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
ID: existingPerformerID,
// i.Input.Performers = []string{existingPerformerErr} }, nil)
// err = i.PreImport(testCtx)
// assert.NotNil(t, err) err := i.PreImport(testCtx)
assert.NotNil(t, err)
// performerReaderWriter.AssertExpectations(t)
// } i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
err = i.PreImport(testCtx)
// func TestImporterPreImportWithMissingPerformer(t *testing.T) { assert.Nil(t, err)
// performerReaderWriter := &mocks.PerformerReaderWriter{}
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// i := Importer{ err = i.PreImport(testCtx)
// PerformerWriter: performerReaderWriter, assert.Nil(t, err)
// Input: jsonschema.Gallery{ assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs.List())
// Path: path,
// Performers: []string{ performerReaderWriter.AssertExpectations(t)
// missingPerformerName, }
// },
// }, func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
// MissingRefBehaviour: models.ImportMissingRefEnumFail, performerReaderWriter := &mocks.PerformerReaderWriter{}
// }
i := Importer{
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) PerformerWriter: performerReaderWriter,
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{ Input: jsonschema.Gallery{
// ID: existingPerformerID, Performers: []string{
// }, nil) missingPerformerName,
},
// err := i.PreImport(testCtx) },
// assert.NotNil(t, err) MissingRefBehaviour: models.ImportMissingRefEnumCreate,
}
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
// err = i.PreImport(testCtx) performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
// assert.Nil(t, err) performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate err := i.PreImport(testCtx)
// err = i.PreImport(testCtx) assert.NotNil(t, err)
// assert.Nil(t, err) }
// assert.Equal(t, []int{existingPerformerID}, i.gallery.PerformerIDs)
func TestImporterPreImportWithTag(t *testing.T) {
// performerReaderWriter.AssertExpectations(t) tagReaderWriter := &mocks.TagReaderWriter{}
// }
i := Importer{
// func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { TagWriter: tagReaderWriter,
// performerReaderWriter := &mocks.PerformerReaderWriter{} MissingRefBehaviour: models.ImportMissingRefEnumFail,
Input: jsonschema.Gallery{
// i := Importer{ Tags: []string{
// PerformerWriter: performerReaderWriter, existingTagName,
// Input: jsonschema.Gallery{ },
// Path: path, },
// Performers: []string{ }
// missingPerformerName,
// }, tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
// }, {
// MissingRefBehaviour: models.ImportMissingRefEnumCreate, ID: existingTagID,
// } Name: existingTagName,
},
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once() }, nil).Once()
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error")) tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
// err := i.PreImport(testCtx) err := i.PreImport(testCtx)
// assert.NotNil(t, err) assert.Nil(t, err)
// } assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs.List())
// func TestImporterPreImportWithTag(t *testing.T) { i.Input.Tags = []string{existingTagErr}
// tagReaderWriter := &mocks.TagReaderWriter{} err = i.PreImport(testCtx)
assert.NotNil(t, err)
// i := Importer{
// TagWriter: tagReaderWriter, tagReaderWriter.AssertExpectations(t)
// MissingRefBehaviour: models.ImportMissingRefEnumFail, }
// Input: jsonschema.Gallery{
// Path: path, func TestImporterPreImportWithMissingTag(t *testing.T) {
// Tags: []string{ tagReaderWriter := &mocks.TagReaderWriter{}
// existingTagName,
// }, i := Importer{
// }, TagWriter: tagReaderWriter,
// } Input: jsonschema.Gallery{
Tags: []string{
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{ missingTagName,
// { },
// ID: existingTagID, },
// Name: existingTagName, MissingRefBehaviour: models.ImportMissingRefEnumFail,
// }, }
// }, nil).Once()
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once() tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
// err := i.PreImport(testCtx) ID: existingTagID,
// assert.Nil(t, err) }, nil)
// assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs)
err := i.PreImport(testCtx)
// i.Input.Tags = []string{existingTagErr} assert.NotNil(t, err)
// err = i.PreImport(testCtx)
// assert.NotNil(t, err) i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
err = i.PreImport(testCtx)
// tagReaderWriter.AssertExpectations(t) assert.Nil(t, err)
// }
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// func TestImporterPreImportWithMissingTag(t *testing.T) { err = i.PreImport(testCtx)
// tagReaderWriter := &mocks.TagReaderWriter{} assert.Nil(t, err)
assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs.List())
// i := Importer{
// TagWriter: tagReaderWriter, tagReaderWriter.AssertExpectations(t)
// Input: jsonschema.Gallery{ }
// Path: path,
// Tags: []string{ func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
// missingTagName, tagReaderWriter := &mocks.TagReaderWriter{}
// },
// }, i := Importer{
// MissingRefBehaviour: models.ImportMissingRefEnumFail, TagWriter: tagReaderWriter,
// } Input: jsonschema.Gallery{
Tags: []string{
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) missingTagName,
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{ },
// ID: existingTagID, },
// }, nil) MissingRefBehaviour: models.ImportMissingRefEnumCreate,
}
// err := i.PreImport(testCtx)
// assert.NotNil(t, err) tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
// err = i.PreImport(testCtx) err := i.PreImport(testCtx)
// assert.Nil(t, err) assert.NotNil(t, err)
}
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// err = i.PreImport(testCtx)
// assert.Nil(t, err)
// assert.Equal(t, []int{existingTagID}, i.gallery.TagIDs)
// tagReaderWriter.AssertExpectations(t)
// }
// func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
// tagReaderWriter := &mocks.TagReaderWriter{}
// i := Importer{
// TagWriter: tagReaderWriter,
// Input: jsonschema.Gallery{
// Path: path,
// Tags: []string{
// missingTagName,
// },
// },
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
// }
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
// err := i.PreImport(testCtx)
// assert.NotNil(t, err)
// }
// func TestImporterFindExistingID(t *testing.T) {
// readerWriter := &mocks.GalleryReaderWriter{}
// i := Importer{
// ReaderWriter: readerWriter,
// Input: jsonschema.Gallery{
// Path: path,
// Checksum: missingChecksum,
// },
// }
// expectedErr := errors.New("FindBy* error")
// readerWriter.On("FindByChecksum", testCtx, missingChecksum).Return(nil, nil).Once()
// readerWriter.On("FindByChecksum", testCtx, checksum).Return(&models.Gallery{
// ID: existingGalleryID,
// }, nil).Once()
// readerWriter.On("FindByChecksum", testCtx, errChecksum).Return(nil, expectedErr).Once()
// id, err := i.FindExistingID(testCtx)
// assert.Nil(t, id)
// assert.Nil(t, err)
// i.Input.Checksum = checksum
// id, err = i.FindExistingID(testCtx)
// assert.Equal(t, existingGalleryID, *id)
// assert.Nil(t, err)
// i.Input.Checksum = errChecksum
// id, err = i.FindExistingID(testCtx)
// assert.Nil(t, id)
// assert.NotNil(t, err)
// readerWriter.AssertExpectations(t)
// }
// func TestCreate(t *testing.T) {
// readerWriter := &mocks.GalleryReaderWriter{}
// gallery := models.Gallery{
// Title: title,
// }
// galleryErr := models.Gallery{
// Title: galleryNameErr,
// }
// i := Importer{
// ReaderWriter: readerWriter,
// gallery: gallery,
// }
// errCreate := errors.New("Create error")
// readerWriter.On("Create", testCtx, &gallery).Run(func(args mock.Arguments) {
// args.Get(1).(*models.Gallery).ID = galleryID
// }).Return(nil).Once()
// readerWriter.On("Create", testCtx, &galleryErr).Return(errCreate).Once()
// id, err := i.Create(testCtx)
// assert.Equal(t, galleryID, *id)
// assert.Nil(t, err)
// i.gallery = galleryErr
// id, err = i.Create(testCtx)
// assert.Nil(t, id)
// assert.NotNil(t, err)
// readerWriter.AssertExpectations(t)
// }
// func TestUpdate(t *testing.T) {
// readerWriter := &mocks.GalleryReaderWriter{}
// gallery := models.Gallery{
// Title: title,
// }
// i := Importer{
// ReaderWriter: readerWriter,
// gallery: gallery,
// }
// // id needs to be set for the mock input
// gallery.ID = galleryID
// readerWriter.On("Update", testCtx, &gallery).Return(nil, nil).Once()
// err := i.Update(testCtx, galleryID)
// assert.Nil(t, err)
// readerWriter.AssertExpectations(t)
// }

View file

@ -4,6 +4,7 @@ import (
"context" "context"
"strconv" "strconv"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
) )
@ -15,8 +16,12 @@ type CountQueryer interface {
QueryCount(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error) QueryCount(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error)
} }
type ChecksumsFinder interface { type Finder interface {
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error) FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error)
FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error)
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error)
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
} }
func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) { func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) {

View file

@ -16,8 +16,7 @@ import (
// const mutexType = "gallery" // const mutexType = "gallery"
type FinderCreatorUpdater interface { type FinderCreatorUpdater interface {
FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) Finder
FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error)
Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error
AddFileID(ctx context.Context, id int, fileID file.ID) error AddFileID(ctx context.Context, id int, fileID file.ID) error
} }

View file

@ -14,7 +14,6 @@ import (
// of cover image. // of cover image.
func ToBasicJSON(image *models.Image) *jsonschema.Image { func ToBasicJSON(image *models.Image) *jsonschema.Image {
newImageJSON := jsonschema.Image{ newImageJSON := jsonschema.Image{
Checksum: image.Checksum(),
Title: image.Title, Title: image.Title,
CreatedAt: json.JSONTime{Time: image.CreatedAt}, CreatedAt: json.JSONTime{Time: image.CreatedAt},
UpdatedAt: json.JSONTime{Time: image.UpdatedAt}, UpdatedAt: json.JSONTime{Time: image.UpdatedAt},
@ -27,23 +26,25 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
newImageJSON.Organized = image.Organized newImageJSON.Organized = image.Organized
newImageJSON.OCounter = image.OCounter newImageJSON.OCounter = image.OCounter
newImageJSON.File = getImageFileJSON(image) for _, f := range image.Files {
newImageJSON.Files = append(newImageJSON.Files, f.Base().Path)
}
return &newImageJSON return &newImageJSON
} }
func getImageFileJSON(image *models.Image) *jsonschema.ImageFile { // func getImageFileJSON(image *models.Image) *jsonschema.ImageFile {
ret := &jsonschema.ImageFile{} // ret := &jsonschema.ImageFile{}
f := image.PrimaryFile() // f := image.PrimaryFile()
ret.ModTime = json.JSONTime{Time: f.ModTime} // ret.ModTime = json.JSONTime{Time: f.ModTime}
ret.Size = f.Size // ret.Size = f.Size
ret.Width = f.Width // ret.Width = f.Width
ret.Height = f.Height // ret.Height = f.Height
return ret // return ret
} // }
// GetStudioName returns the name of the provided image's studio. It returns an // GetStudioName returns the name of the provided image's studio. It returns an
// empty string if there is no studio assigned to the image. // empty string if there is no studio assigned to the image.

View file

@ -1,165 +1,144 @@
package image package image
// import ( import (
// "errors" "errors"
// "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models"
// "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json"
// "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema"
// "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/mocks"
// "github.com/stashapp/stash/pkg/models/mocks" "github.com/stretchr/testify/assert"
// "github.com/stretchr/testify/assert"
// "testing" "testing"
// "time" "time"
// ) )
// const ( const (
// imageID = 1 imageID = 1
// errImageID = 3
// studioID = 4 studioID = 4
// missingStudioID = 5 missingStudioID = 5
// errStudioID = 6 errStudioID = 6
// ) )
// var ( var (
// checksum = "checksum" title = "title"
// title = "title" rating = 5
// rating = 5 organized = true
// organized = true ocounter = 2
// ocounter = 2 )
// size int64 = 123
// width = 100
// height = 100
// )
// const ( const (
// studioName = "studioName" studioName = "studioName"
// ) )
// var ( var (
// createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC) createTime = time.Date(2001, 01, 01, 0, 0, 0, 0, time.UTC)
// updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC)
// ) )
// func createFullImage(id int) models.Image { func createFullImage(id int) models.Image {
// return models.Image{ return models.Image{
// ID: id, ID: id,
// Title: title, Title: title,
// Files: []*file.ImageFile{ OCounter: ocounter,
// { Rating: &rating,
// BaseFile: &file.BaseFile{ Organized: organized,
// Size: size, CreatedAt: createTime,
// }, UpdatedAt: updateTime,
// Height: height, }
// Width: width, }
// },
// },
// OCounter: ocounter,
// Rating: &rating,
// Organized: organized,
// CreatedAt: createTime,
// UpdatedAt: updateTime,
// }
// }
// func createFullJSONImage() *jsonschema.Image { func createFullJSONImage() *jsonschema.Image {
// return &jsonschema.Image{ return &jsonschema.Image{
// Title: title, Title: title,
// Checksum: checksum, OCounter: ocounter,
// OCounter: ocounter, Rating: rating,
// Rating: rating, Organized: organized,
// Organized: organized, CreatedAt: json.JSONTime{
// File: &jsonschema.ImageFile{ Time: createTime,
// Height: height, },
// Size: size, UpdatedAt: json.JSONTime{
// Width: width, Time: updateTime,
// }, },
// CreatedAt: json.JSONTime{ }
// Time: createTime, }
// },
// UpdatedAt: json.JSONTime{
// Time: updateTime,
// },
// }
// }
// type basicTestScenario struct { type basicTestScenario struct {
// input models.Image input models.Image
// expected *jsonschema.Image expected *jsonschema.Image
// } }
// var scenarios = []basicTestScenario{ var scenarios = []basicTestScenario{
// { {
// createFullImage(imageID), createFullImage(imageID),
// createFullJSONImage(), createFullJSONImage(),
// }, },
// } }
// func TestToJSON(t *testing.T) { func TestToJSON(t *testing.T) {
// for i, s := range scenarios { for i, s := range scenarios {
// image := s.input image := s.input
// json := ToBasicJSON(&image) json := ToBasicJSON(&image)
// assert.Equal(t, s.expected, json, "[%d]", i) assert.Equal(t, s.expected, json, "[%d]", i)
// } }
// } }
// func createStudioImage(studioID int) models.Image { func createStudioImage(studioID int) models.Image {
// return models.Image{ return models.Image{
// StudioID: &studioID, StudioID: &studioID,
// } }
// } }
// type stringTestScenario struct { type stringTestScenario struct {
// input models.Image input models.Image
// expected string expected string
// err bool err bool
// } }
// var getStudioScenarios = []stringTestScenario{ var getStudioScenarios = []stringTestScenario{
// { {
// createStudioImage(studioID), createStudioImage(studioID),
// studioName, studioName,
// false, false,
// }, },
// { {
// createStudioImage(missingStudioID), createStudioImage(missingStudioID),
// "", "",
// false, false,
// }, },
// { {
// createStudioImage(errStudioID), createStudioImage(errStudioID),
// "", "",
// true, true,
// }, },
// } }
// func TestGetStudioName(t *testing.T) { func TestGetStudioName(t *testing.T) {
// mockStudioReader := &mocks.StudioReaderWriter{} mockStudioReader := &mocks.StudioReaderWriter{}
// studioErr := errors.New("error getting image") studioErr := errors.New("error getting image")
// mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{ mockStudioReader.On("Find", testCtx, studioID).Return(&models.Studio{
// Name: models.NullString(studioName), Name: models.NullString(studioName),
// }, nil).Once() }, nil).Once()
// mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once() mockStudioReader.On("Find", testCtx, missingStudioID).Return(nil, nil).Once()
// mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once() mockStudioReader.On("Find", testCtx, errStudioID).Return(nil, studioErr).Once()
// for i, s := range getStudioScenarios { for i, s := range getStudioScenarios {
// image := s.input image := s.input
// json, err := GetStudioName(testCtx, mockStudioReader, &image) json, err := GetStudioName(testCtx, mockStudioReader, &image)
// switch { switch {
// case !s.err && err != nil: case !s.err && err != nil:
// t.Errorf("[%d] unexpected error: %s", i, err.Error()) t.Errorf("[%d] unexpected error: %s", i, err.Error())
// case s.err && err == nil: case s.err && err == nil:
// t.Errorf("[%d] expected error not returned", i) t.Errorf("[%d] expected error not returned", i)
// default: default:
// assert.Equal(t, s.expected, json, "[%d]", i) assert.Equal(t, s.expected, json, "[%d]", i)
// } }
// } }
// mockStudioReader.AssertExpectations(t) mockStudioReader.AssertExpectations(t)
// } }

View file

@ -3,8 +3,10 @@ package image
import ( import (
"context" "context"
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/performer"
@ -13,8 +15,9 @@ import (
"github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/tag"
) )
type GalleryChecksumsFinder interface { type GalleryFinder interface {
FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error) FindByPath(ctx context.Context, p string) ([]*models.Gallery, error)
FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error)
} }
type FullCreatorUpdater interface { type FullCreatorUpdater interface {
@ -24,12 +27,12 @@ type FullCreatorUpdater interface {
type Importer struct { type Importer struct {
ReaderWriter FullCreatorUpdater ReaderWriter FullCreatorUpdater
FileFinder file.Getter
StudioWriter studio.NameFinderCreator StudioWriter studio.NameFinderCreator
GalleryWriter GalleryChecksumsFinder GalleryFinder GalleryFinder
PerformerWriter performer.NameFinderCreator PerformerWriter performer.NameFinderCreator
TagWriter tag.NameFinderCreator TagWriter tag.NameFinderCreator
Input jsonschema.Image Input jsonschema.Image
Path string
MissingRefBehaviour models.ImportMissingRefEnum MissingRefBehaviour models.ImportMissingRefEnum
ID int ID int
@ -39,6 +42,10 @@ type Importer struct {
func (i *Importer) PreImport(ctx context.Context) error { func (i *Importer) PreImport(ctx context.Context) error {
i.image = i.imageJSONToImage(i.Input) i.image = i.imageJSONToImage(i.Input)
if err := i.populateFiles(ctx); err != nil {
return err
}
if err := i.populateStudio(ctx); err != nil { if err := i.populateStudio(ctx); err != nil {
return err return err
} }
@ -65,6 +72,12 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
TagIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}),
GalleryIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}),
Title: imageJSON.Title,
Organized: imageJSON.Organized,
OCounter: imageJSON.OCounter,
CreatedAt: imageJSON.CreatedAt.GetTime(),
UpdatedAt: imageJSON.UpdatedAt.GetTime(),
} }
if imageJSON.Title != "" { if imageJSON.Title != "" {
@ -74,26 +87,27 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image {
newImage.Rating = &imageJSON.Rating newImage.Rating = &imageJSON.Rating
} }
newImage.Organized = imageJSON.Organized
newImage.OCounter = imageJSON.OCounter
newImage.CreatedAt = imageJSON.CreatedAt.GetTime()
newImage.UpdatedAt = imageJSON.UpdatedAt.GetTime()
// if imageJSON.File != nil {
// if imageJSON.File.Size != 0 {
// newImage.Size = &imageJSON.File.Size
// }
// if imageJSON.File.Width != 0 {
// newImage.Width = &imageJSON.File.Width
// }
// if imageJSON.File.Height != 0 {
// newImage.Height = &imageJSON.File.Height
// }
// }
return newImage return newImage
} }
func (i *Importer) populateFiles(ctx context.Context) error {
for _, ref := range i.Input.Files {
path := filepath.FromSlash(ref)
f, err := i.FileFinder.FindByPath(ctx, path)
if err != nil {
return fmt.Errorf("error finding file: %w", err)
}
if f == nil {
return fmt.Errorf("image file '%s' not found", path)
} else {
i.image.Files = append(i.image.Files, f.(*file.ImageFile))
}
}
return nil
}
func (i *Importer) populateStudio(ctx context.Context) error { func (i *Importer) populateStudio(ctx context.Context) error {
if i.Input.Studio != "" { if i.Input.Studio != "" {
studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false) studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false)
@ -136,16 +150,45 @@ func (i *Importer) createStudio(ctx context.Context, name string) (int, error) {
return created.ID, nil return created.ID, nil
} }
func (i *Importer) locateGallery(ctx context.Context, ref jsonschema.GalleryRef) (*models.Gallery, error) {
var galleries []*models.Gallery
var err error
switch {
case ref.FolderPath != "":
galleries, err = i.GalleryFinder.FindByPath(ctx, ref.FolderPath)
case len(ref.ZipFiles) > 0:
for _, p := range ref.ZipFiles {
galleries, err = i.GalleryFinder.FindByPath(ctx, p)
if err != nil {
break
}
if len(galleries) > 0 {
break
}
}
case ref.Title != "":
galleries, err = i.GalleryFinder.FindUserGalleryByTitle(ctx, ref.Title)
}
var ret *models.Gallery
if len(galleries) > 0 {
ret = galleries[0]
}
return ret, err
}
func (i *Importer) populateGalleries(ctx context.Context) error { func (i *Importer) populateGalleries(ctx context.Context) error {
for _, checksum := range i.Input.Galleries { for _, ref := range i.Input.Galleries {
gallery, err := i.GalleryWriter.FindByChecksums(ctx, []string{checksum}) gallery, err := i.locateGallery(ctx, ref)
if err != nil { if err != nil {
return fmt.Errorf("error finding gallery: %v", err) return fmt.Errorf("error finding gallery: %v", err)
} }
if len(gallery) == 0 { if gallery == nil {
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
return fmt.Errorf("image gallery '%s' not found", i.Input.Studio) return fmt.Errorf("image gallery '%s' not found", ref.String())
} }
// we don't create galleries - just ignore // we don't create galleries - just ignore
@ -153,7 +196,7 @@ func (i *Importer) populateGalleries(ctx context.Context) error {
continue continue
} }
} else { } else {
i.image.GalleryIDs.Add(gallery[0].ID) i.image.GalleryIDs.Add(gallery.ID)
} }
} }
@ -242,28 +285,46 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
} }
func (i *Importer) Name() string { func (i *Importer) Name() string {
return i.Path if i.Input.Title != "" {
return i.Input.Title
}
if len(i.Input.Files) > 0 {
return i.Input.Files[0]
}
return ""
} }
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
// var existing []*models.Image var existing []*models.Image
// var err error var err error
// existing, err = i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum)
// if err != nil { for _, f := range i.image.Files {
// return nil, err existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
// } if err != nil {
return nil, err
}
// if len(existing) > 0 { if len(existing) > 0 {
// id := existing[0].ID id := existing[0].ID
// return &id, nil return &id, nil
// } }
}
return nil, nil return nil, nil
} }
func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Create(ctx context.Context) (*int, error) {
err := i.ReaderWriter.Create(ctx, &models.ImageCreateInput{Image: &i.image}) var fileIDs []file.ID
for _, f := range i.image.Files {
fileIDs = append(fileIDs, f.Base().ID)
}
err := i.ReaderWriter.Create(ctx, &models.ImageCreateInput{
Image: &i.image,
FileIDs: fileIDs,
})
if err != nil { if err != nil {
return nil, fmt.Errorf("error creating image: %v", err) return nil, fmt.Errorf("error creating image: %v", err)
} }

View file

@ -1,492 +1,285 @@
package image package image
// import ( import (
// "context" "context"
// "errors" "errors"
// "testing" "testing"
// "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
// "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
// "github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
// "github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
// "github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
// ) )
// var ( var (
// path = "path" existingStudioID = 101
existingPerformerID = 103
// imageNameErr = "imageNameErr" existingTagID = 105
// // existingImageName = "existingImageName"
existingStudioName = "existingStudioName"
// existingImageID = 100 existingStudioErr = "existingStudioErr"
// existingStudioID = 101 missingStudioName = "missingStudioName"
// existingGalleryID = 102
// existingPerformerID = 103 existingPerformerName = "existingPerformerName"
// // existingMovieID = 104 existingPerformerErr = "existingPerformerErr"
// existingTagID = 105 missingPerformerName = "missingPerformerName"
// existingStudioName = "existingStudioName" existingTagName = "existingTagName"
// existingStudioErr = "existingStudioErr" existingTagErr = "existingTagErr"
// missingStudioName = "missingStudioName" missingTagName = "missingTagName"
)
// existingGalleryChecksum = "existingGalleryChecksum"
// existingGalleryErr = "existingGalleryErr" var testCtx = context.Background()
// missingGalleryChecksum = "missingGalleryChecksum"
func TestImporterPreImport(t *testing.T) {
// existingPerformerName = "existingPerformerName" i := Importer{}
// existingPerformerErr = "existingPerformerErr"
// missingPerformerName = "missingPerformerName" err := i.PreImport(testCtx)
assert.Nil(t, err)
// existingTagName = "existingTagName" }
// existingTagErr = "existingTagErr"
// missingTagName = "missingTagName" func TestImporterPreImportWithStudio(t *testing.T) {
studioReaderWriter := &mocks.StudioReaderWriter{}
// missingChecksum = "missingChecksum"
// errChecksum = "errChecksum" i := Importer{
// ) StudioWriter: studioReaderWriter,
Input: jsonschema.Image{
// var testCtx = context.Background() Studio: existingStudioName,
},
// func TestImporterName(t *testing.T) { }
// i := Importer{
// Path: path, studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{
// Input: jsonschema.Image{}, ID: existingStudioID,
// } }, nil).Once()
studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once()
// assert.Equal(t, path, i.Name())
// } err := i.PreImport(testCtx)
assert.Nil(t, err)
// func TestImporterPreImport(t *testing.T) { assert.Equal(t, existingStudioID, *i.image.StudioID)
// i := Importer{
// Path: path, i.Input.Studio = existingStudioErr
// } err = i.PreImport(testCtx)
assert.NotNil(t, err)
// err := i.PreImport(testCtx)
// assert.Nil(t, err) studioReaderWriter.AssertExpectations(t)
// } }
// func TestImporterPreImportWithStudio(t *testing.T) { func TestImporterPreImportWithMissingStudio(t *testing.T) {
// studioReaderWriter := &mocks.StudioReaderWriter{} studioReaderWriter := &mocks.StudioReaderWriter{}
// i := Importer{ i := Importer{
// StudioWriter: studioReaderWriter, StudioWriter: studioReaderWriter,
// Path: path, Input: jsonschema.Image{
// Input: jsonschema.Image{ Studio: missingStudioName,
// Studio: existingStudioName, },
// }, MissingRefBehaviour: models.ImportMissingRefEnumFail,
// } }
// studioReaderWriter.On("FindByName", testCtx, existingStudioName, false).Return(&models.Studio{ studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3)
// ID: existingStudioID, studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{
// }, nil).Once() ID: existingStudioID,
// studioReaderWriter.On("FindByName", testCtx, existingStudioErr, false).Return(nil, errors.New("FindByName error")).Once() }, nil)
// err := i.PreImport(testCtx) err := i.PreImport(testCtx)
// assert.Nil(t, err) assert.NotNil(t, err)
// assert.Equal(t, existingStudioID, *i.image.StudioID)
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
// i.Input.Studio = existingStudioErr err = i.PreImport(testCtx)
// err = i.PreImport(testCtx) assert.Nil(t, err)
// assert.NotNil(t, err)
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// studioReaderWriter.AssertExpectations(t) err = i.PreImport(testCtx)
// } assert.Nil(t, err)
assert.Equal(t, existingStudioID, *i.image.StudioID)
// func TestImporterPreImportWithMissingStudio(t *testing.T) {
// studioReaderWriter := &mocks.StudioReaderWriter{} studioReaderWriter.AssertExpectations(t)
}
// i := Importer{
// Path: path, func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
// StudioWriter: studioReaderWriter, studioReaderWriter := &mocks.StudioReaderWriter{}
// Input: jsonschema.Image{
// Studio: missingStudioName, i := Importer{
// }, StudioWriter: studioReaderWriter,
// MissingRefBehaviour: models.ImportMissingRefEnumFail, Input: jsonschema.Image{
// } Studio: missingStudioName,
},
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Times(3) MissingRefBehaviour: models.ImportMissingRefEnumCreate,
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(&models.Studio{ }
// ID: existingStudioID,
// }, nil) studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once()
studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error"))
// err := i.PreImport(testCtx)
// assert.NotNil(t, err) err := i.PreImport(testCtx)
assert.NotNil(t, err)
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore }
// err = i.PreImport(testCtx)
// assert.Nil(t, err) func TestImporterPreImportWithPerformer(t *testing.T) {
performerReaderWriter := &mocks.PerformerReaderWriter{}
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// err = i.PreImport(testCtx) i := Importer{
// assert.Nil(t, err) PerformerWriter: performerReaderWriter,
// assert.Equal(t, existingStudioID, *i.image.StudioID) MissingRefBehaviour: models.ImportMissingRefEnumFail,
Input: jsonschema.Image{
// studioReaderWriter.AssertExpectations(t) Performers: []string{
// } existingPerformerName,
},
// func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { },
// studioReaderWriter := &mocks.StudioReaderWriter{} }
// i := Importer{ performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{
// StudioWriter: studioReaderWriter, {
// Path: path, ID: existingPerformerID,
// Input: jsonschema.Image{ Name: models.NullString(existingPerformerName),
// Studio: missingStudioName, },
// }, }, nil).Once()
// MissingRefBehaviour: models.ImportMissingRefEnumCreate, performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
// }
err := i.PreImport(testCtx)
// studioReaderWriter.On("FindByName", testCtx, missingStudioName, false).Return(nil, nil).Once() assert.Nil(t, err)
// studioReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Studio")).Return(nil, errors.New("Create error")) assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs.List())
// err := i.PreImport(testCtx) i.Input.Performers = []string{existingPerformerErr}
// assert.NotNil(t, err) err = i.PreImport(testCtx)
// } assert.NotNil(t, err)
// func TestImporterPreImportWithGallery(t *testing.T) { performerReaderWriter.AssertExpectations(t)
// galleryReaderWriter := &mocks.GalleryReaderWriter{} }
// i := Importer{ func TestImporterPreImportWithMissingPerformer(t *testing.T) {
// GalleryWriter: galleryReaderWriter, performerReaderWriter := &mocks.PerformerReaderWriter{}
// Path: path,
// Input: jsonschema.Image{ i := Importer{
// Galleries: []string{ PerformerWriter: performerReaderWriter,
// existingGalleryChecksum, Input: jsonschema.Image{
// }, Performers: []string{
// }, missingPerformerName,
// } },
},
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{existingGalleryChecksum}).Return([]*models.Gallery{{ MissingRefBehaviour: models.ImportMissingRefEnumFail,
// ID: existingGalleryID, }
// }}, nil).Once()
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{existingGalleryErr}).Return(nil, errors.New("FindByChecksum error")).Once() performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3)
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
// err := i.PreImport(testCtx) ID: existingPerformerID,
// assert.Nil(t, err) }, nil)
// assert.Equal(t, existingGalleryID, i.image.GalleryIDs[0])
err := i.PreImport(testCtx)
// i.Input.Galleries = []string{ assert.NotNil(t, err)
// existingGalleryErr,
// } i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
err = i.PreImport(testCtx)
// err = i.PreImport(testCtx) assert.Nil(t, err)
// assert.NotNil(t, err)
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// galleryReaderWriter.AssertExpectations(t) err = i.PreImport(testCtx)
// } assert.Nil(t, err)
assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs.List())
// func TestImporterPreImportWithMissingGallery(t *testing.T) {
// galleryReaderWriter := &mocks.GalleryReaderWriter{} performerReaderWriter.AssertExpectations(t)
}
// i := Importer{
// Path: path, func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) {
// GalleryWriter: galleryReaderWriter, performerReaderWriter := &mocks.PerformerReaderWriter{}
// Input: jsonschema.Image{
// Galleries: []string{ i := Importer{
// missingGalleryChecksum, PerformerWriter: performerReaderWriter,
// }, Input: jsonschema.Image{
// }, Performers: []string{
// MissingRefBehaviour: models.ImportMissingRefEnumFail, missingPerformerName,
// } },
},
// galleryReaderWriter.On("FindByChecksums", testCtx, []string{missingGalleryChecksum}).Return(nil, nil).Times(3) MissingRefBehaviour: models.ImportMissingRefEnumCreate,
}
// err := i.PreImport(testCtx)
// assert.NotNil(t, err) performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
// err = i.PreImport(testCtx) err := i.PreImport(testCtx)
// assert.Nil(t, err) assert.NotNil(t, err)
// assert.Nil(t, i.image.GalleryIDs) }
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate func TestImporterPreImportWithTag(t *testing.T) {
// err = i.PreImport(testCtx) tagReaderWriter := &mocks.TagReaderWriter{}
// assert.Nil(t, err)
// assert.Nil(t, i.image.GalleryIDs) i := Importer{
TagWriter: tagReaderWriter,
// galleryReaderWriter.AssertExpectations(t) MissingRefBehaviour: models.ImportMissingRefEnumFail,
// } Input: jsonschema.Image{
Tags: []string{
// func TestImporterPreImportWithPerformer(t *testing.T) { existingTagName,
// performerReaderWriter := &mocks.PerformerReaderWriter{} },
},
// i := Importer{ }
// PerformerWriter: performerReaderWriter,
// Path: path, tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
// MissingRefBehaviour: models.ImportMissingRefEnumFail, {
// Input: jsonschema.Image{ ID: existingTagID,
// Performers: []string{ Name: existingTagName,
// existingPerformerName, },
// }, }, nil).Once()
// }, tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
// }
err := i.PreImport(testCtx)
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerName}, false).Return([]*models.Performer{ assert.Nil(t, err)
// { assert.Equal(t, []int{existingTagID}, i.image.TagIDs.List())
// ID: existingPerformerID,
// Name: models.NullString(existingPerformerName), i.Input.Tags = []string{existingTagErr}
// }, err = i.PreImport(testCtx)
// }, nil).Once() assert.NotNil(t, err)
// performerReaderWriter.On("FindByNames", testCtx, []string{existingPerformerErr}, false).Return(nil, errors.New("FindByNames error")).Once()
tagReaderWriter.AssertExpectations(t)
// err := i.PreImport(testCtx) }
// assert.Nil(t, err)
// assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs) func TestImporterPreImportWithMissingTag(t *testing.T) {
tagReaderWriter := &mocks.TagReaderWriter{}
// i.Input.Performers = []string{existingPerformerErr}
// err = i.PreImport(testCtx) i := Importer{
// assert.NotNil(t, err) TagWriter: tagReaderWriter,
Input: jsonschema.Image{
// performerReaderWriter.AssertExpectations(t) Tags: []string{
// } missingTagName,
},
// func TestImporterPreImportWithMissingPerformer(t *testing.T) { },
// performerReaderWriter := &mocks.PerformerReaderWriter{} MissingRefBehaviour: models.ImportMissingRefEnumFail,
}
// i := Importer{
// Path: path, tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
// PerformerWriter: performerReaderWriter, tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
// Input: jsonschema.Image{ ID: existingTagID,
// Performers: []string{ }, nil)
// missingPerformerName,
// }, err := i.PreImport(testCtx)
// }, assert.NotNil(t, err)
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
// } i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
err = i.PreImport(testCtx)
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) assert.Nil(t, err)
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(&models.Performer{
// ID: existingPerformerID, i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// }, nil) err = i.PreImport(testCtx)
assert.Nil(t, err)
// err := i.PreImport(testCtx) assert.Equal(t, []int{existingTagID}, i.image.TagIDs.List())
// assert.NotNil(t, err)
tagReaderWriter.AssertExpectations(t)
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore }
// err = i.PreImport(testCtx)
// assert.Nil(t, err) func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
tagReaderWriter := &mocks.TagReaderWriter{}
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// err = i.PreImport(testCtx) i := Importer{
// assert.Nil(t, err) TagWriter: tagReaderWriter,
// assert.Equal(t, []int{existingPerformerID}, i.image.PerformerIDs) Input: jsonschema.Image{
Tags: []string{
// performerReaderWriter.AssertExpectations(t) missingTagName,
// } },
},
// func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { MissingRefBehaviour: models.ImportMissingRefEnumCreate,
// performerReaderWriter := &mocks.PerformerReaderWriter{} }
// i := Importer{ tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
// PerformerWriter: performerReaderWriter, tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
// Path: path,
// Input: jsonschema.Image{ err := i.PreImport(testCtx)
// Performers: []string{ assert.NotNil(t, err)
// missingPerformerName, }
// },
// },
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
// }
// performerReaderWriter.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once()
// performerReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Performer")).Return(nil, errors.New("Create error"))
// err := i.PreImport(testCtx)
// assert.NotNil(t, err)
// }
// func TestImporterPreImportWithTag(t *testing.T) {
// tagReaderWriter := &mocks.TagReaderWriter{}
// i := Importer{
// TagWriter: tagReaderWriter,
// Path: path,
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
// Input: jsonschema.Image{
// Tags: []string{
// existingTagName,
// },
// },
// }
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
// {
// ID: existingTagID,
// Name: existingTagName,
// },
// }, nil).Once()
// tagReaderWriter.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
// err := i.PreImport(testCtx)
// assert.Nil(t, err)
// assert.Equal(t, []int{existingTagID}, i.image.TagIDs)
// i.Input.Tags = []string{existingTagErr}
// err = i.PreImport(testCtx)
// assert.NotNil(t, err)
// tagReaderWriter.AssertExpectations(t)
// }
// func TestImporterPreImportWithMissingTag(t *testing.T) {
// tagReaderWriter := &mocks.TagReaderWriter{}
// i := Importer{
// Path: path,
// TagWriter: tagReaderWriter,
// Input: jsonschema.Image{
// Tags: []string{
// missingTagName,
// },
// },
// MissingRefBehaviour: models.ImportMissingRefEnumFail,
// }
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(&models.Tag{
// ID: existingTagID,
// }, nil)
// err := i.PreImport(testCtx)
// assert.NotNil(t, err)
// i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
// err = i.PreImport(testCtx)
// assert.Nil(t, err)
// i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
// err = i.PreImport(testCtx)
// assert.Nil(t, err)
// assert.Equal(t, []int{existingTagID}, i.image.TagIDs)
// tagReaderWriter.AssertExpectations(t)
// }
// func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
// tagReaderWriter := &mocks.TagReaderWriter{}
// i := Importer{
// TagWriter: tagReaderWriter,
// Path: path,
// Input: jsonschema.Image{
// Tags: []string{
// missingTagName,
// },
// },
// MissingRefBehaviour: models.ImportMissingRefEnumCreate,
// }
// tagReaderWriter.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
// tagReaderWriter.On("Create", testCtx, mock.AnythingOfType("models.Tag")).Return(nil, errors.New("Create error"))
// err := i.PreImport(testCtx)
// assert.NotNil(t, err)
// }
// func TestImporterFindExistingID(t *testing.T) {
// readerWriter := &mocks.ImageReaderWriter{}
// i := Importer{
// ReaderWriter: readerWriter,
// Path: path,
// Input: jsonschema.Image{
// Checksum: missingChecksum,
// },
// }
// expectedErr := errors.New("FindBy* error")
// readerWriter.On("FindByChecksum", testCtx, missingChecksum).Return(nil, nil).Once()
// readerWriter.On("FindByChecksum", testCtx, checksum).Return(&models.Image{
// ID: existingImageID,
// }, nil).Once()
// readerWriter.On("FindByChecksum", testCtx, errChecksum).Return(nil, expectedErr).Once()
// id, err := i.FindExistingID(testCtx)
// assert.Nil(t, id)
// assert.Nil(t, err)
// i.Input.Checksum = checksum
// id, err = i.FindExistingID(testCtx)
// assert.Equal(t, existingImageID, *id)
// assert.Nil(t, err)
// i.Input.Checksum = errChecksum
// id, err = i.FindExistingID(testCtx)
// assert.Nil(t, id)
// assert.NotNil(t, err)
// readerWriter.AssertExpectations(t)
// }
// func TestCreate(t *testing.T) {
// readerWriter := &mocks.ImageReaderWriter{}
// image := models.Image{
// Title: title,
// }
// imageErr := models.Image{
// Title: imageNameErr,
// }
// i := Importer{
// ReaderWriter: readerWriter,
// image: image,
// }
// errCreate := errors.New("Create error")
// readerWriter.On("Create", testCtx, &image).Run(func(args mock.Arguments) {
// args.Get(1).(*models.Image).ID = imageID
// }).Return(nil).Once()
// readerWriter.On("Create", testCtx, &imageErr).Return(errCreate).Once()
// id, err := i.Create(testCtx)
// assert.Equal(t, imageID, *id)
// assert.Nil(t, err)
// assert.Equal(t, imageID, i.ID)
// i.image = imageErr
// id, err = i.Create(testCtx)
// assert.Nil(t, id)
// assert.NotNil(t, err)
// readerWriter.AssertExpectations(t)
// }
// func TestUpdate(t *testing.T) {
// readerWriter := &mocks.ImageReaderWriter{}
// image := models.Image{
// Title: title,
// }
// imageErr := models.Image{
// Title: imageNameErr,
// }
// i := Importer{
// ReaderWriter: readerWriter,
// image: image,
// }
// errUpdate := errors.New("Update error")
// // id needs to be set for the mock input
// image.ID = imageID
// readerWriter.On("Update", testCtx, &image).Return(nil).Once()
// err := i.Update(testCtx, imageID)
// assert.Nil(t, err)
// assert.Equal(t, imageID, i.ID)
// i.image = imageErr
// // need to set id separately
// imageErr.ID = errImageID
// readerWriter.On("Update", testCtx, &imageErr).Return(errUpdate).Once()
// err = i.Update(testCtx, errImageID)
// assert.NotNil(t, err)
// readerWriter.AssertExpectations(t)
// }

View file

@ -0,0 +1,156 @@
package jsonschema
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"os"
"path"
"strings"
jsoniter "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models/json"
)
const (
DirEntryTypeFolder = "folder"
DirEntryTypeVideo = "video"
DirEntryTypeImage = "image"
DirEntryTypeFile = "file"
)
type DirEntry interface {
IsFile() bool
Filename() string
DirEntry() *BaseDirEntry
}
type BaseDirEntry struct {
ZipFile string `json:"zip_file,omitempty"`
ModTime json.JSONTime `json:"mod_time"`
Type string `json:"type,omitempty"`
Path string `json:"path,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
}
func (f *BaseDirEntry) DirEntry() *BaseDirEntry {
return f
}
func (f *BaseDirEntry) IsFile() bool {
return false
}
func (f *BaseDirEntry) Filename() string {
// prefix with the path depth so that we can import lower-level files/folders first
depth := strings.Count(f.Path, string("/"))
// hash the full path for a unique filename
hash := md5.FromString(f.Path)
basename := path.Base(f.Path)
return fmt.Sprintf("%02x.%s.%s.json", depth, basename, hash)
}
type BaseFile struct {
BaseDirEntry
Fingerprints []Fingerprint `json:"fingerprints,omitempty"`
Size int64 `json:"size"`
}
func (f *BaseFile) IsFile() bool {
return true
}
type Fingerprint struct {
Type string `json:"type,omitempty"`
Fingerprint interface{} `json:"fingerprint,omitempty"`
}
type VideoFile struct {
*BaseFile
Format string `json:"format,omitempty"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
Duration float64 `json:"duration,omitempty"`
VideoCodec string `json:"video_codec,omitempty"`
AudioCodec string `json:"audio_codec,omitempty"`
FrameRate float64 `json:"frame_rate,omitempty"`
BitRate int64 `json:"bitrate,omitempty"`
Interactive bool `json:"interactive,omitempty"`
InteractiveSpeed *int `json:"interactive_speed,omitempty"`
}
type ImageFile struct {
*BaseFile
Format string `json:"format,omitempty"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
}
func LoadFileFile(filePath string) (DirEntry, error) {
r, err := os.Open(filePath)
if err != nil {
return nil, err
}
defer r.Close()
data, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(bytes.NewReader(data))
var bf BaseDirEntry
if err := jsonParser.Decode(&bf); err != nil {
return nil, err
}
jsonParser = json.NewDecoder(bytes.NewReader(data))
switch bf.Type {
case DirEntryTypeFolder:
return &bf, nil
case DirEntryTypeVideo:
var vf VideoFile
if err := jsonParser.Decode(&vf); err != nil {
return nil, err
}
return &vf, nil
case DirEntryTypeImage:
var imf ImageFile
if err := jsonParser.Decode(&imf); err != nil {
return nil, err
}
return &imf, nil
case DirEntryTypeFile:
var bff BaseFile
if err := jsonParser.Decode(&bff); err != nil {
return nil, err
}
return &bff, nil
default:
return nil, errors.New("unknown file type")
}
}
func SaveFileFile(filePath string, file DirEntry) error {
if file == nil {
return fmt.Errorf("file must not be nil")
}
return marshalToFile(filePath, file)
}

View file

@ -0,0 +1,56 @@
package jsonschema
import (
"fmt"
"os"
"path"
"strings"
jsoniter "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/models/json"
)
type Folder struct {
BaseDirEntry
Path string `json:"path,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
}
func (f *Folder) Filename() string {
// prefix with the path depth so that we can import lower-level folders first
depth := strings.Count(f.Path, string("/"))
// hash the full path for a unique filename
hash := md5.FromString(f.Path)
basename := path.Base(f.Path)
return fmt.Sprintf("%2x.%s.%s.json", depth, basename, hash)
}
func LoadFolderFile(filePath string) (*Folder, error) {
var folder Folder
file, err := os.Open(filePath)
if err != nil {
return nil, err
}
defer file.Close()
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&folder)
if err != nil {
return nil, err
}
return &folder, nil
}
func SaveFolderFile(filePath string, folder *Folder) error {
if folder == nil {
return fmt.Errorf("folder must not be nil")
}
return marshalToFile(filePath, folder)
}

View file

@ -3,27 +3,37 @@ package jsonschema
import ( import (
"fmt" "fmt"
"os" "os"
"strings"
jsoniter "github.com/json-iterator/go" jsoniter "github.com/json-iterator/go"
"github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
) )
type Gallery struct { type Gallery struct {
Path string `json:"path,omitempty"` ZipFiles []string `json:"zip_files,omitempty"`
Checksum string `json:"checksum,omitempty"` FolderPath string `json:"folder_path,omitempty"`
Zip bool `json:"zip,omitempty"` Title string `json:"title,omitempty"`
Title string `json:"title,omitempty"` URL string `json:"url,omitempty"`
URL string `json:"url,omitempty"` Date string `json:"date,omitempty"`
Date string `json:"date,omitempty"` Details string `json:"details,omitempty"`
Details string `json:"details,omitempty"` Rating int `json:"rating,omitempty"`
Rating int `json:"rating,omitempty"` Organized bool `json:"organized,omitempty"`
Organized bool `json:"organized,omitempty"` Studio string `json:"studio,omitempty"`
Studio string `json:"studio,omitempty"` Performers []string `json:"performers,omitempty"`
Performers []string `json:"performers,omitempty"` Tags []string `json:"tags,omitempty"`
Tags []string `json:"tags,omitempty"` CreatedAt json.JSONTime `json:"created_at,omitempty"`
FileModTime json.JSONTime `json:"file_mod_time,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"` }
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
func (s Gallery) Filename(basename string, hash string) string {
ret := basename
if ret != "" {
ret += "."
}
ret += hash
return ret + ".json"
} }
func LoadGalleryFile(filePath string) (*Gallery, error) { func LoadGalleryFile(filePath string) (*Gallery, error) {
@ -48,3 +58,23 @@ func SaveGalleryFile(filePath string, gallery *Gallery) error {
} }
return marshalToFile(filePath, gallery) return marshalToFile(filePath, gallery)
} }
// GalleryRef is used to identify a Gallery.
// Only one field should be populated.
type GalleryRef struct {
ZipFiles []string `json:"zip_files,omitempty"`
FolderPath string `json:"folder_path,omitempty"`
// Title is used only if FolderPath and ZipPaths is empty
Title string `json:"title,omitempty"`
}
func (r GalleryRef) String() string {
switch {
case r.FolderPath != "":
return "{ folder: " + r.FolderPath + " }"
case len(r.ZipFiles) > 0:
return "{ zipFiles: [" + strings.Join(r.ZipFiles, ", ") + "] }"
default:
return "{ title: " + r.Title + " }"
}
}

View file

@ -8,28 +8,33 @@ import (
"github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/json"
) )
type ImageFile struct {
ModTime json.JSONTime `json:"mod_time,omitempty"`
Size int64 `json:"size"`
Width int `json:"width"`
Height int `json:"height"`
}
type Image struct { type Image struct {
Title string `json:"title,omitempty"` Title string `json:"title,omitempty"`
Checksum string `json:"checksum,omitempty"`
Studio string `json:"studio,omitempty"` Studio string `json:"studio,omitempty"`
Rating int `json:"rating,omitempty"` Rating int `json:"rating,omitempty"`
Organized bool `json:"organized,omitempty"` Organized bool `json:"organized,omitempty"`
OCounter int `json:"o_counter,omitempty"` OCounter int `json:"o_counter,omitempty"`
Galleries []string `json:"galleries,omitempty"` Galleries []GalleryRef `json:"galleries,omitempty"`
Performers []string `json:"performers,omitempty"` Performers []string `json:"performers,omitempty"`
Tags []string `json:"tags,omitempty"` Tags []string `json:"tags,omitempty"`
File *ImageFile `json:"file,omitempty"` Files []string `json:"files,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"` CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
} }
func (s Image) Filename(basename string, hash string) string {
ret := s.Title
if ret == "" {
ret = basename
}
if hash != "" {
ret += "." + hash
}
return ret + ".json"
}
func LoadImageFile(filePath string) (*Image, error) { func LoadImageFile(filePath string) (*Image, error) {
var image Image var image Image
file, err := os.Open(filePath) file, err := os.Open(filePath)

View file

@ -1,47 +0,0 @@
package jsonschema
import (
"fmt"
"os"
jsoniter "github.com/json-iterator/go"
)
type PathNameMapping struct {
Path string `json:"path,omitempty"`
Name string `json:"name,omitempty"`
Checksum string `json:"checksum"`
}
type Mappings struct {
Tags []PathNameMapping `json:"tags"`
Performers []PathNameMapping `json:"performers"`
Studios []PathNameMapping `json:"studios"`
Movies []PathNameMapping `json:"movies"`
Galleries []PathNameMapping `json:"galleries"`
Scenes []PathNameMapping `json:"scenes"`
Images []PathNameMapping `json:"images"`
}
func LoadMappingsFile(filePath string) (*Mappings, error) {
var mappings Mappings
file, err := os.Open(filePath)
if err != nil {
return nil, err
}
defer file.Close()
var json = jsoniter.ConfigCompatibleWithStandardLibrary
jsonParser := json.NewDecoder(file)
err = jsonParser.Decode(&mappings)
if err != nil {
return nil, err
}
return &mappings, nil
}
func SaveMappingsFile(filePath string, mappings *Mappings) error {
if mappings == nil {
return fmt.Errorf("mappings must not be nil")
}
return marshalToFile(filePath, mappings)
}

View file

@ -26,6 +26,10 @@ type Movie struct {
UpdatedAt json.JSONTime `json:"updated_at,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
} }
func (s Movie) Filename() string {
return s.Name + ".json"
}
// Backwards Compatible synopsis for the movie // Backwards Compatible synopsis for the movie
type MovieSynopsisBC struct { type MovieSynopsisBC struct {
Synopsis string `json:"sypnopsis,omitempty"` Synopsis string `json:"sypnopsis,omitempty"`

View file

@ -40,6 +40,10 @@ type Performer struct {
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
} }
func (s Performer) Filename() string {
return s.Name + ".json"
}
func LoadPerformerFile(filePath string) (*Performer, error) { func LoadPerformerFile(filePath string) (*Performer, error) {
var performer Performer var performer Performer
file, err := os.Open(filePath) file, err := os.Open(filePath)

View file

@ -38,9 +38,6 @@ type SceneMovie struct {
type Scene struct { type Scene struct {
Title string `json:"title,omitempty"` Title string `json:"title,omitempty"`
Checksum string `json:"checksum,omitempty"`
OSHash string `json:"oshash,omitempty"`
Phash string `json:"phash,omitempty"`
Studio string `json:"studio,omitempty"` Studio string `json:"studio,omitempty"`
URL string `json:"url,omitempty"` URL string `json:"url,omitempty"`
Date string `json:"date,omitempty"` Date string `json:"date,omitempty"`
@ -48,18 +45,31 @@ type Scene struct {
Organized bool `json:"organized,omitempty"` Organized bool `json:"organized,omitempty"`
OCounter int `json:"o_counter,omitempty"` OCounter int `json:"o_counter,omitempty"`
Details string `json:"details,omitempty"` Details string `json:"details,omitempty"`
Galleries []string `json:"galleries,omitempty"` Galleries []GalleryRef `json:"galleries,omitempty"`
Performers []string `json:"performers,omitempty"` Performers []string `json:"performers,omitempty"`
Movies []SceneMovie `json:"movies,omitempty"` Movies []SceneMovie `json:"movies,omitempty"`
Tags []string `json:"tags,omitempty"` Tags []string `json:"tags,omitempty"`
Markers []SceneMarker `json:"markers,omitempty"` Markers []SceneMarker `json:"markers,omitempty"`
File *SceneFile `json:"file,omitempty"` Files []string `json:"files,omitempty"`
Cover string `json:"cover,omitempty"` Cover string `json:"cover,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"` CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
StashIDs []models.StashID `json:"stash_ids,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"`
} }
func (s Scene) Filename(basename string, hash string) string {
ret := s.Title
if ret == "" {
ret = basename
}
if hash != "" {
ret += "." + hash
}
return ret + ".json"
}
func LoadSceneFile(filePath string) (*Scene, error) { func LoadSceneFile(filePath string) (*Scene, error) {
var scene Scene var scene Scene
file, err := os.Open(filePath) file, err := os.Open(filePath)

View file

@ -23,6 +23,10 @@ type Studio struct {
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
} }
func (s Studio) Filename() string {
return s.Name + ".json"
}
func LoadStudioFile(filePath string) (*Studio, error) { func LoadStudioFile(filePath string) (*Studio, error) {
var studio Studio var studio Studio
file, err := os.Open(filePath) file, err := os.Open(filePath)

View file

@ -18,6 +18,10 @@ type Tag struct {
UpdatedAt json.JSONTime `json:"updated_at,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
} }
func (s Tag) Filename() string {
return s.Name + ".json"
}
func LoadTagFile(filePath string) (*Tag, error) { func LoadTagFile(filePath string) (*Tag, error) {
var tag Tag var tag Tag
file, err := os.Open(filePath) file, err := os.Open(filePath)

View file

@ -10,8 +10,7 @@ import (
type JSONPaths struct { type JSONPaths struct {
Metadata string Metadata string
MappingsFile string ScrapedFile string
ScrapedFile string
Performers string Performers string
Scenes string Scenes string
@ -20,12 +19,12 @@ type JSONPaths struct {
Studios string Studios string
Tags string Tags string
Movies string Movies string
Files string
} }
func newJSONPaths(baseDir string) *JSONPaths { func newJSONPaths(baseDir string) *JSONPaths {
jp := JSONPaths{} jp := JSONPaths{}
jp.Metadata = baseDir jp.Metadata = baseDir
jp.MappingsFile = filepath.Join(baseDir, "mappings.json")
jp.ScrapedFile = filepath.Join(baseDir, "scraped.json") jp.ScrapedFile = filepath.Join(baseDir, "scraped.json")
jp.Performers = filepath.Join(baseDir, "performers") jp.Performers = filepath.Join(baseDir, "performers")
jp.Scenes = filepath.Join(baseDir, "scenes") jp.Scenes = filepath.Join(baseDir, "scenes")
@ -34,6 +33,7 @@ func newJSONPaths(baseDir string) *JSONPaths {
jp.Studios = filepath.Join(baseDir, "studios") jp.Studios = filepath.Join(baseDir, "studios")
jp.Movies = filepath.Join(baseDir, "movies") jp.Movies = filepath.Join(baseDir, "movies")
jp.Tags = filepath.Join(baseDir, "tags") jp.Tags = filepath.Join(baseDir, "tags")
jp.Files = filepath.Join(baseDir, "files")
return &jp return &jp
} }
@ -42,6 +42,18 @@ func GetJSONPaths(baseDir string) *JSONPaths {
return jp return jp
} }
func EmptyJSONDirs(baseDir string) {
jsonPaths := GetJSONPaths(baseDir)
_ = fsutil.EmptyDir(jsonPaths.Scenes)
_ = fsutil.EmptyDir(jsonPaths.Images)
_ = fsutil.EmptyDir(jsonPaths.Galleries)
_ = fsutil.EmptyDir(jsonPaths.Performers)
_ = fsutil.EmptyDir(jsonPaths.Studios)
_ = fsutil.EmptyDir(jsonPaths.Movies)
_ = fsutil.EmptyDir(jsonPaths.Tags)
_ = fsutil.EmptyDir(jsonPaths.Files)
}
func EnsureJSONDirs(baseDir string) { func EnsureJSONDirs(baseDir string) {
jsonPaths := GetJSONPaths(baseDir) jsonPaths := GetJSONPaths(baseDir)
if err := fsutil.EnsureDir(jsonPaths.Metadata); err != nil { if err := fsutil.EnsureDir(jsonPaths.Metadata); err != nil {
@ -68,32 +80,7 @@ func EnsureJSONDirs(baseDir string) {
if err := fsutil.EnsureDir(jsonPaths.Tags); err != nil { if err := fsutil.EnsureDir(jsonPaths.Tags); err != nil {
logger.Warnf("couldn't create directories for Tags: %v", err) logger.Warnf("couldn't create directories for Tags: %v", err)
} }
} if err := fsutil.EnsureDir(jsonPaths.Files); err != nil {
logger.Warnf("couldn't create directories for Files: %v", err)
func (jp *JSONPaths) PerformerJSONPath(checksum string) string { }
return filepath.Join(jp.Performers, checksum+".json")
}
func (jp *JSONPaths) SceneJSONPath(checksum string) string {
return filepath.Join(jp.Scenes, checksum+".json")
}
func (jp *JSONPaths) ImageJSONPath(checksum string) string {
return filepath.Join(jp.Images, checksum+".json")
}
func (jp *JSONPaths) GalleryJSONPath(checksum string) string {
return filepath.Join(jp.Galleries, checksum+".json")
}
func (jp *JSONPaths) StudioJSONPath(checksum string) string {
return filepath.Join(jp.Studios, checksum+".json")
}
func (jp *JSONPaths) TagJSONPath(checksum string) string {
return filepath.Join(jp.Tags, checksum+".json")
}
func (jp *JSONPaths) MovieJSONPath(checksum string) string {
return filepath.Join(jp.Movies, checksum+".json")
} }

View file

@ -45,18 +45,6 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
UpdatedAt: json.JSONTime{Time: scene.UpdatedAt}, UpdatedAt: json.JSONTime{Time: scene.UpdatedAt},
} }
// if scene.Checksum != nil {
// newSceneJSON.Checksum = *scene.Checksum
// }
// if scene.OSHash != nil {
// newSceneJSON.OSHash = *scene.OSHash
// }
// if scene.Phash != nil {
// newSceneJSON.Phash = utils.PhashToString(*scene.Phash)
// }
if scene.Date != nil { if scene.Date != nil {
newSceneJSON.Date = scene.Date.String() newSceneJSON.Date = scene.Date.String()
} }
@ -68,7 +56,9 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
newSceneJSON.Organized = scene.Organized newSceneJSON.Organized = scene.Organized
newSceneJSON.OCounter = scene.OCounter newSceneJSON.OCounter = scene.OCounter
newSceneJSON.File = getSceneFileJSON(scene) for _, f := range scene.Files {
newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path)
}
cover, err := reader.GetCover(ctx, scene.ID) cover, err := reader.GetCover(ctx, scene.ID)
if err != nil { if err != nil {
@ -93,52 +83,52 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
return &newSceneJSON, nil return &newSceneJSON, nil
} }
func getSceneFileJSON(scene *models.Scene) *jsonschema.SceneFile { // func getSceneFileJSON(scene *models.Scene) *jsonschema.SceneFile {
ret := &jsonschema.SceneFile{} // ret := &jsonschema.SceneFile{}
// TODO // TODO
// if scene.FileModTime != nil { // if scene.FileModTime != nil {
// ret.ModTime = json.JSONTime{Time: *scene.FileModTime} // ret.ModTime = json.JSONTime{Time: *scene.FileModTime}
// } // }
// if scene.Size != nil { // if scene.Size != nil {
// ret.Size = *scene.Size // ret.Size = *scene.Size
// } // }
// if scene.Duration != nil { // if scene.Duration != nil {
// ret.Duration = getDecimalString(*scene.Duration) // ret.Duration = getDecimalString(*scene.Duration)
// } // }
// if scene.VideoCodec != nil { // if scene.VideoCodec != nil {
// ret.VideoCodec = *scene.VideoCodec // ret.VideoCodec = *scene.VideoCodec
// } // }
// if scene.AudioCodec != nil { // if scene.AudioCodec != nil {
// ret.AudioCodec = *scene.AudioCodec // ret.AudioCodec = *scene.AudioCodec
// } // }
// if scene.Format != nil { // if scene.Format != nil {
// ret.Format = *scene.Format // ret.Format = *scene.Format
// } // }
// if scene.Width != nil { // if scene.Width != nil {
// ret.Width = *scene.Width // ret.Width = *scene.Width
// } // }
// if scene.Height != nil { // if scene.Height != nil {
// ret.Height = *scene.Height // ret.Height = *scene.Height
// } // }
// if scene.Framerate != nil { // if scene.Framerate != nil {
// ret.Framerate = getDecimalString(*scene.Framerate) // ret.Framerate = getDecimalString(*scene.Framerate)
// } // }
// if scene.Bitrate != nil { // if scene.Bitrate != nil {
// ret.Bitrate = int(*scene.Bitrate) // ret.Bitrate = int(*scene.Bitrate)
// } // }
return ret // return ret
} // }
// GetStudioName returns the name of the provided scene's studio. It returns an // GetStudioName returns the name of the provided scene's studio. It returns an
// empty string if there is no studio assigned to the scene. // empty string if there is no studio assigned to the scene.

File diff suppressed because it is too large Load diff

View file

@ -3,8 +3,10 @@ package scene
import ( import (
"context" "context"
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
@ -24,13 +26,13 @@ type FullCreatorUpdater interface {
type Importer struct { type Importer struct {
ReaderWriter FullCreatorUpdater ReaderWriter FullCreatorUpdater
FileFinder file.Getter
StudioWriter studio.NameFinderCreator StudioWriter studio.NameFinderCreator
GalleryWriter gallery.ChecksumsFinder GalleryFinder gallery.Finder
PerformerWriter performer.NameFinderCreator PerformerWriter performer.NameFinderCreator
MovieWriter movie.NameFinderCreator MovieWriter movie.NameFinderCreator
TagWriter tag.NameFinderCreator TagWriter tag.NameFinderCreator
Input jsonschema.Scene Input jsonschema.Scene
Path string
MissingRefBehaviour models.ImportMissingRefEnum MissingRefBehaviour models.ImportMissingRefEnum
FileNamingAlgorithm models.HashAlgorithm FileNamingAlgorithm models.HashAlgorithm
@ -42,6 +44,10 @@ type Importer struct {
func (i *Importer) PreImport(ctx context.Context) error { func (i *Importer) PreImport(ctx context.Context) error {
i.scene = i.sceneJSONToScene(i.Input) i.scene = i.sceneJSONToScene(i.Input)
if err := i.populateFiles(ctx); err != nil {
return err
}
if err := i.populateStudio(ctx); err != nil { if err := i.populateStudio(ctx); err != nil {
return err return err
} }
@ -86,21 +92,6 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
StashIDs: models.NewRelatedStashIDs(sceneJSON.StashIDs), StashIDs: models.NewRelatedStashIDs(sceneJSON.StashIDs),
} }
// if sceneJSON.Checksum != "" {
// newScene.Checksum = &sceneJSON.Checksum
// }
// if sceneJSON.OSHash != "" {
// newScene.OSHash = &sceneJSON.OSHash
// }
// if sceneJSON.Phash != "" {
// hash, err := strconv.ParseUint(sceneJSON.Phash, 16, 64)
// if err == nil {
// v := int64(hash)
// newScene.Phash = &v
// }
// }
if sceneJSON.Date != "" { if sceneJSON.Date != "" {
d := models.NewDate(sceneJSON.Date) d := models.NewDate(sceneJSON.Date)
newScene.Date = &d newScene.Date = &d
@ -114,42 +105,27 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
newScene.CreatedAt = sceneJSON.CreatedAt.GetTime() newScene.CreatedAt = sceneJSON.CreatedAt.GetTime()
newScene.UpdatedAt = sceneJSON.UpdatedAt.GetTime() newScene.UpdatedAt = sceneJSON.UpdatedAt.GetTime()
// if sceneJSON.File != nil {
// if sceneJSON.File.Size != "" {
// newScene.Size = &sceneJSON.File.Size
// }
// if sceneJSON.File.Duration != "" {
// duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
// newScene.Duration = &duration
// }
// if sceneJSON.File.VideoCodec != "" {
// newScene.VideoCodec = &sceneJSON.File.VideoCodec
// }
// if sceneJSON.File.AudioCodec != "" {
// newScene.AudioCodec = &sceneJSON.File.AudioCodec
// }
// if sceneJSON.File.Format != "" {
// newScene.Format = &sceneJSON.File.Format
// }
// if sceneJSON.File.Width != 0 {
// newScene.Width = &sceneJSON.File.Width
// }
// if sceneJSON.File.Height != 0 {
// newScene.Height = &sceneJSON.File.Height
// }
// if sceneJSON.File.Framerate != "" {
// framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
// newScene.Framerate = &framerate
// }
// if sceneJSON.File.Bitrate != 0 {
// v := int64(sceneJSON.File.Bitrate)
// newScene.Bitrate = &v
// }
// }
return newScene return newScene
} }
func (i *Importer) populateFiles(ctx context.Context) error {
for _, ref := range i.Input.Files {
path := filepath.FromSlash(ref)
f, err := i.FileFinder.FindByPath(ctx, path)
if err != nil {
return fmt.Errorf("error finding file: %w", err)
}
if f == nil {
return fmt.Errorf("scene file '%s' not found", path)
} else {
i.scene.Files = append(i.scene.Files, f.(*file.VideoFile))
}
}
return nil
}
func (i *Importer) populateStudio(ctx context.Context) error { func (i *Importer) populateStudio(ctx context.Context) error {
if i.Input.Studio != "" { if i.Input.Studio != "" {
studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false) studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false)
@ -192,33 +168,50 @@ func (i *Importer) createStudio(ctx context.Context, name string) (int, error) {
return created.ID, nil return created.ID, nil
} }
func (i *Importer) locateGallery(ctx context.Context, ref jsonschema.GalleryRef) (*models.Gallery, error) {
var galleries []*models.Gallery
var err error
switch {
case ref.FolderPath != "":
galleries, err = i.GalleryFinder.FindByPath(ctx, ref.FolderPath)
case len(ref.ZipFiles) > 0:
for _, p := range ref.ZipFiles {
galleries, err = i.GalleryFinder.FindByPath(ctx, p)
if err != nil {
break
}
if len(galleries) > 0 {
break
}
}
case ref.Title != "":
galleries, err = i.GalleryFinder.FindUserGalleryByTitle(ctx, ref.Title)
}
var ret *models.Gallery
if len(galleries) > 0 {
ret = galleries[0]
}
return ret, err
}
func (i *Importer) populateGalleries(ctx context.Context) error { func (i *Importer) populateGalleries(ctx context.Context) error {
if len(i.Input.Galleries) > 0 { for _, ref := range i.Input.Galleries {
checksums := i.Input.Galleries gallery, err := i.locateGallery(ctx, ref)
galleries, err := i.GalleryWriter.FindByChecksums(ctx, checksums)
if err != nil { if err != nil {
return err return err
} }
var pluckedChecksums []string if gallery == nil {
for _, gallery := range galleries {
pluckedChecksums = append(pluckedChecksums, gallery.Checksum())
}
missingGalleries := stringslice.StrFilter(checksums, func(checksum string) bool {
return !stringslice.StrInclude(pluckedChecksums, checksum)
})
if len(missingGalleries) > 0 {
if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { if i.MissingRefBehaviour == models.ImportMissingRefEnumFail {
return fmt.Errorf("scene galleries [%s] not found", strings.Join(missingGalleries, ", ")) return fmt.Errorf("scene gallery '%s' not found", ref.String())
} }
// we don't create galleries - just ignore // we don't create galleries - just ignore
} } else {
i.scene.GalleryIDs.Add(gallery.ID)
for _, o := range galleries {
i.scene.GalleryIDs.Add(o.ID)
} }
} }
@ -366,37 +359,42 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
} }
func (i *Importer) Name() string { func (i *Importer) Name() string {
return i.Path if i.Input.Title != "" {
return i.Input.Title
}
if len(i.Input.Files) > 0 {
return i.Input.Files[0]
}
return ""
} }
func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { func (i *Importer) FindExistingID(ctx context.Context) (*int, error) {
// TODO var existing []*models.Scene
// var existing []*models.Scene var err error
// var err error
// switch i.FileNamingAlgorithm { for _, f := range i.scene.Files {
// case models.HashAlgorithmMd5: existing, err = i.ReaderWriter.FindByFileID(ctx, f.ID)
// existing, err = i.ReaderWriter.FindByChecksum(ctx, i.Input.Checksum) if err != nil {
// case models.HashAlgorithmOshash: return nil, err
// existing, err = i.ReaderWriter.FindByOSHash(ctx, i.Input.OSHash) }
// default:
// panic("unknown file naming algorithm")
// }
// if err != nil { if len(existing) > 0 {
// return nil, err id := existing[0].ID
// } return &id, nil
}
// if len(existing) > 0 { }
// id := existing[0].ID
// return &id, nil
// }
return nil, nil return nil, nil
} }
func (i *Importer) Create(ctx context.Context) (*int, error) { func (i *Importer) Create(ctx context.Context) (*int, error) {
if err := i.ReaderWriter.Create(ctx, &i.scene, nil); err != nil { var fileIDs []file.ID
for _, f := range i.scene.Files {
fileIDs = append(fileIDs, f.Base().ID)
}
if err := i.ReaderWriter.Create(ctx, &i.scene, fileIDs); err != nil {
return nil, fmt.Errorf("error creating scene: %v", err) return nil, fmt.Errorf("error creating scene: %v", err)
} }

File diff suppressed because it is too large Load diff

View file

@ -173,8 +173,8 @@ type fileQueryRow struct {
ParentFolderID null.Int `db:"parent_folder_id"` ParentFolderID null.Int `db:"parent_folder_id"`
Size null.Int `db:"size"` Size null.Int `db:"size"`
ModTime null.Time `db:"mod_time"` ModTime null.Time `db:"mod_time"`
CreatedAt null.Time `db:"created_at"` CreatedAt null.Time `db:"file_created_at"`
UpdatedAt null.Time `db:"updated_at"` UpdatedAt null.Time `db:"file_updated_at"`
ZipBasename null.String `db:"zip_basename"` ZipBasename null.String `db:"zip_basename"`
ZipFolderPath null.String `db:"zip_folder_path"` ZipFolderPath null.String `db:"zip_folder_path"`
@ -445,8 +445,8 @@ func (qb *FileStore) selectDataset() *goqu.SelectDataset {
table.Col("parent_folder_id"), table.Col("parent_folder_id"),
table.Col("size"), table.Col("size"),
table.Col("mod_time"), table.Col("mod_time"),
table.Col("created_at"), table.Col("created_at").As("file_created_at"),
table.Col("updated_at"), table.Col("updated_at").As("file_updated_at"),
folderTable.Col("path").As("parent_folder_path"), folderTable.Col("path").As("parent_folder_path"),
fingerprintTable.Col("type").As("fingerprint_type"), fingerprintTable.Col("type").As("fingerprint_type"),
fingerprintTable.Col("fingerprint"), fingerprintTable.Col("fingerprint"),

View file

@ -443,7 +443,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal
sq := dialect.From(table).LeftJoin( sq := dialect.From(table).LeftJoin(
galleriesFilesJoinTable, galleriesFilesJoinTable,
goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))), goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))),
).InnerJoin( ).LeftJoin(
filesTable, filesTable,
goqu.On(filesTable.Col(idColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))), goqu.On(filesTable.Col(idColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))),
).LeftJoin( ).LeftJoin(
@ -518,6 +518,26 @@ func (qb *GalleryStore) CountByImageID(ctx context.Context, imageID int) (int, e
return count(ctx, q) return count(ctx, q)
} }
func (qb *GalleryStore) FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) {
table := qb.table()
sq := dialect.From(table).LeftJoin(
galleriesFilesJoinTable,
goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))),
).Select(table.Col(idColumn)).Where(
table.Col("folder_id").IsNull(),
galleriesFilesJoinTable.Col("file_id").IsNull(),
table.Col("title").Eq(title),
)
ret, err := qb.findBySubquery(ctx, sq)
if err != nil {
return nil, fmt.Errorf("getting user galleries for title %s: %w", title, err)
}
return ret, nil
}
func (qb *GalleryStore) Count(ctx context.Context) (int, error) { func (qb *GalleryStore) Count(ctx context.Context) (int, error) {
q := dialect.Select(goqu.COUNT("*")).From(qb.table()) q := dialect.Select(goqu.COUNT("*")).From(qb.table())
return count(ctx, q) return count(ctx, q)

View file

@ -6,9 +6,9 @@ After migrating, please run a scan on your entire library to populate missing da
Please report all issues to the following Github issue: https://github.com/stashapp/stash/issues/2737 Please report all issues to the following Github issue: https://github.com/stashapp/stash/issues/2737
### 💥 Known issues ### 💥 Known issues and other changes
* Import/export functionality is currently disabled. Needs further design.
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign. * Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
* Import/export schema has changed and is incompatible with the previous version.
### ✨ New Features ### ✨ New Features
* Added support for identical files. Identical files are assigned to the same scene/gallery/image and can be viewed in File Info. ([#2676](https://github.com/stashapp/stash/pull/2676)) * Added support for identical files. Identical files are assigned to the same scene/gallery/image and can be viewed in File Info. ([#2676](https://github.com/stashapp/stash/pull/2676))

View file

@ -2,16 +2,29 @@
The metadata given to Stash can be exported into the JSON format. This structure can be modified, or replicated by other means. The resulting data can then be imported again, giving the possibility for automatic scraping of all kinds. The format of this metadata bulk is a folder structure, containing the following folders: The metadata given to Stash can be exported into the JSON format. This structure can be modified, or replicated by other means. The resulting data can then be imported again, giving the possibility for automatic scraping of all kinds. The format of this metadata bulk is a folder structure, containing the following folders:
* `downloads` * `files`
* `galleries` * `galleries`
* `images`
* `performers` * `performers`
* `scenes` * `scenes`
* `studios` * `studios`
* `movies` * `movies`
Additionally, it contains a `mappings.json` file. # File naming
The mappings file contains a reference to all files within the folders, by including their checksum. All files in the aforementioned folders are named by their checksum (like `967ddf2e028f10fc8d36901833c25732.json`), which (at least in the case of galleries and scenes) is generated from the file that this metadata relates to. The algorithm for the checksum is MD5. When exported, files are named with different formats depending on the object type:
| Type | Format |
|------|--------|
| Files/Folders | `<path depth in hex, two character width>.<basename>.<hash>.json` |
| Galleries | `<first zip filename>.<path hash>.json` or `<folder basename>.<path hash>.json` or `<title>.json` |
| Images | `<title or first file basename>.<hash>.json` |
| Performers | `<name>.json` |
| Scenes | `<title or first file basename>.<hash>.json` |
| Studios | `<name>.json` |
| Movies | `<name>.json` |
Note that the file naming is not significant when importing. All json files will be read from the subdirectories.
# Content of the json files # Content of the json files
@ -19,7 +32,7 @@ In the following, the values of the according jsons will be shown. If the value
The json values are given as strings, if not stated otherwise. Every new line will stand for a new value in the json. If the value is a list of objects, the values of that object will be shown indented. The json values are given as strings, if not stated otherwise. Every new line will stand for a new value in the json. If the value is a list of objects, the values of that object will be shown indented.
If a value is empty in any but the `mappings.json` file, it can be left out of the file entirely. In the `mappings.json` however, all values must be present, if there are no objects of a type (for example, no performers), the value is simply null. If a value is empty in any file, it can be left out of the file entirely.
Many files have an `created_at` and `updated_at`, both are kept in the following format: Many files have an `created_at` and `updated_at`, both are kept in the following format:
``` ```
YYYY-MM-DDThh:mm:ssTZD YYYY-MM-DDThh:mm:ssTZD
@ -29,22 +42,6 @@ Example:
"created_at": "2019-05-03T21:36:58+01:00" "created_at": "2019-05-03T21:36:58+01:00"
``` ```
## `mappings.json`
```
performers
name
checksum
studios
name
checksum
galleries
path
checksum
scenes
path
checksum
```
## Performer ## Performer
``` ```
name name
@ -112,100 +109,110 @@ created_at
updated_at updated_at
``` ```
## Gallery
No files of this kind are generated yet. ## Image
```
title
studio
rating (integer)
performers (list of strings, performers name)
tags (list of strings)
files (list of path strings)
galleries
zip_files (list of path strings)
folder_path
title (for user-created gallery)
created_at
updated_at
```
## Gallery
```
title
studio
url
date
rating (integer)
details
performers (list of strings, performers name)
tags (list of strings)
zip_files (list of path strings)
folder_path
created_at
updated_at
```
## Files
### Folder
```
zip_file (path to containing zip file)
mod_time
type (= folder)
path
created_at
updated_at
```
### Video file
```
zip_file (path to containing zip file)
mod_time
type (= video)
path
fingerprints
type
fingerprint
size
format
width
height
duration
video_codec
audio_codec
frame
bitrate
interactive (bool)
interactive_speed (integer)
created_at
updated_at
```
### Image file
```
zip_file (path to containing zip file)
mod_time
type (= image)
path
fingerprints
type
fingerprint
size
format
width
height
created_at
updated_at
```
### Other files
```
zip_file (path to containing zip file)
mod_time
type (= file)
path
fingerprints
type
fingerprint
size
created_at
updated_at
```
# In JSON format # In JSON format
For those preferring the json-format, defined [here](https://json-schema.org/), the following format may be more interesting: For those preferring the json-format, defined [here](https://json-schema.org/), the following format may be more interesting:
## mappings.json
```json
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://github.com/stashapp/stash/wiki/JSON-Specification/mappings.json",
"title": "mappings",
"description": "The base file for the metadata. Referring to all other files with names, as well as providing the path to files.",
"type": "object",
"properties": {
"performers": {
"description": "Link to the performers files along with names",
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"checksum": {
"type": "string"
}
},
"required": ["name", "checksum"]
},
"minItems": 0,
"uniqueItems": true
},
"studios": {
"description": "Link to the studio files along with names",
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"checksum": {
"type": "string"
}
},
"required": ["name", "checksum"]
},
"minItems": 0,
"uniqueItems": true
},
"galleries": {
"description": "Link to the gallery files along with the path to the content",
"type": "array",
"items": {
"type": "object",
"properties": {
"path": {
"type": "string"
},
"checksum": {
"type": "string"
}
},
"required": ["path", "checksum"]
},
"minItems": 0,
"uniqueItems": true
},
"scenes": {
"description": "Link to the scene files along with the path to the content",
"type": "array",
"items": {
"type": "object",
"properties": {
"path": {
"type": "string"
},
"checksum": {
"type": "string"
}
},
"required": ["path", "checksum"]
},
"minItems": 0,
"uniqueItems": true
}
},
"required": ["performers", "studios", "galleries", "scenes"]
}
```
## performer.json ## performer.json
``` json ``` json
@ -439,45 +446,14 @@ For those preferring the json-format, defined [here](https://json-schema.org/),
"minItems": 1, "minItems": 1,
"uniqueItems": true "uniqueItems": true
}, },
"file": { "files": {
"description": "Some technical data about the scenes file.", "description": "A list of paths of the files for this scene",
"type": "object", "type": "array",
"properties": { "items": {
"size": { "type": "string"
"description": "The size of the file in bytes",
"type": "string"
},
"duration": {
"description": "Duration of the scene in seconds. It is given with after comma values, such as 10.0 or 17.5",
"type": "string"
},
"video_codec": {
"description": "The coding of the video part of the scene file. An example would be h264",
"type": "string"
},
"audio_codec": {
"description": "The coding of the audio part of the scene file. An example would be aac",
"type": "string"
},
"width": {
"description": "The width of the scene in pixels",
"type": "integer"
},
"height": {
"description": "The height of the scene in pixels",
"type": "integer"
},
"framerate": {
"description": "Framerate of the scene. It is given with after comma values, such as 29.95",
"type": "string"
},
"bitrate": {
"description": "The bitrate of the video, in bits",
"type": "integer"
}
}, },
"required": ["size", "duration", "video_codec", "audio_codec", "height", "width", "framerate", "bitrate"] "minItems": 1,
"uniqueItems": true
}, },
"created_at": { "created_at": {
"description": "The time this studios data was added to the database. Format is YYYY-MM-DDThh:mm:ssTZD", "description": "The time this studios data was added to the database. Format is YYYY-MM-DDThh:mm:ssTZD",
@ -491,7 +467,3 @@ For those preferring the json-format, defined [here](https://json-schema.org/),
"required": ["files", "created_at", "updated_at"] "required": ["files", "created_at", "updated_at"]
} }
``` ```
## Gallery
No files of this kind are created here yet

View file

@ -9,10 +9,10 @@ Please report all issues to the following Github issue: https://github.com/stash
### **Warning:** if you are upgrading from an older `files-refactor` build, you will need to re-migrate your system from a schema version 31 database. ### **Warning:** if you are upgrading from an older `files-refactor` build, you will need to re-migrate your system from a schema version 31 database.
### 💥 Known issues ### 💥 Known issues
* Import/export functionality is currently disabled. Needs further design.
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign. * Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
### Other changes: ### Other changes:
* Import/export schema has changed and is incompatible with the previous version.
* Added support for filtering and sorting by file count. ([#2744](https://github.com/stashapp/stash/pull/2744)) * Added support for filtering and sorting by file count. ([#2744](https://github.com/stashapp/stash/pull/2744))
* Changelog has been moved from the stats page to a section in the Settings page. * Changelog has been moved from the stats page to a section in the Settings page.
* Object titles are now displayed as the file basename if the title is not explicitly set. The `Don't include file extension as part of the title` scan flag is no longer supported. * Object titles are now displayed as the file basename if the title is not explicitly set. The `Don't include file extension as part of the title` scan flag is no longer supported.