mirror of
https://github.com/stashapp/stash.git
synced 2025-12-06 08:26:00 +01:00
[Files Refactor] Performance tuning (#2784)
* Improve image query performance * Tune queries * Fix db generator * Don't show release notes in setup * Further tune indexes * Log when creating screenshot
This commit is contained in:
parent
abb574205a
commit
bc47932343
20 changed files with 810 additions and 354 deletions
|
|
@ -27,8 +27,8 @@ const testName = "Foo's Bar"
|
|||
const existingStudioName = "ExistingStudio"
|
||||
|
||||
const existingStudioSceneName = testName + ".dontChangeStudio.mp4"
|
||||
const existingStudioImageName = testName + ".dontChangeStudio.mp4"
|
||||
const existingStudioGalleryName = testName + ".dontChangeStudio.mp4"
|
||||
const existingStudioImageName = testName + ".dontChangeStudio.png"
|
||||
const existingStudioGalleryName = testName + ".dontChangeStudio.zip"
|
||||
|
||||
var existingStudioID int
|
||||
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ func (s *Cleaner) Clean(ctx context.Context, options CleanOptions, progress *job
|
|||
}
|
||||
|
||||
if err := j.execute(ctx); err != nil {
|
||||
logger.Errorf("error cleaning files: %w", err)
|
||||
logger.Errorf("error cleaning files: %v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -97,7 +97,6 @@ type ImageReader interface {
|
|||
FindByChecksum(ctx context.Context, checksum string) ([]*Image, error)
|
||||
FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error)
|
||||
CountByGalleryID(ctx context.Context, galleryID int) (int, error)
|
||||
FindByPath(ctx context.Context, path string) ([]*Image, error)
|
||||
Count(ctx context.Context) (int, error)
|
||||
Size(ctx context.Context) (float64, error)
|
||||
All(ctx context.Context) ([]*Image, error)
|
||||
|
|
|
|||
|
|
@ -32,6 +32,8 @@ func (g Generator) Screenshot(ctx context.Context, input string, hash string, vi
|
|||
}
|
||||
}
|
||||
|
||||
logger.Infof("Creating screenshot for %s", input)
|
||||
|
||||
at := screenshotDurationProportion * videoDuration
|
||||
if options.At != nil {
|
||||
at = *options.At
|
||||
|
|
|
|||
|
|
@ -738,7 +738,7 @@ func (qb *FileStore) makeFilter(ctx context.Context, fileFilter *models.FileFilt
|
|||
query.not(qb.makeFilter(ctx, fileFilter.Not))
|
||||
}
|
||||
|
||||
query.handleCriterion(ctx, pathCriterionHandler(fileFilter.Path, "folders.path", "files.basename"))
|
||||
query.handleCriterion(ctx, pathCriterionHandler(fileFilter.Path, "folders.path", "files.basename", nil))
|
||||
|
||||
return query
|
||||
}
|
||||
|
|
|
|||
|
|
@ -155,7 +155,7 @@ func Test_fileFileStore_Create(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
},
|
||||
false,
|
||||
true,
|
||||
},
|
||||
{
|
||||
"empty basename",
|
||||
|
|
@ -376,14 +376,14 @@ func Test_fileStore_Update(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
},
|
||||
false,
|
||||
true,
|
||||
},
|
||||
{
|
||||
"clear zip",
|
||||
&file.BaseFile{
|
||||
ID: fileIDs[fileIdxInZip],
|
||||
Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)),
|
||||
Basename: getFileBaseName(fileIdxZip),
|
||||
Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)+".renamed"),
|
||||
Basename: getFileBaseName(fileIdxZip) + ".renamed",
|
||||
ParentFolderID: folderIDs[folderIdxWithFiles],
|
||||
},
|
||||
false,
|
||||
|
|
|
|||
|
|
@ -420,9 +420,12 @@ func stringCriterionHandler(c *models.StringCriterionInput, column string) crite
|
|||
}
|
||||
}
|
||||
|
||||
func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, basenameColumn string) criterionHandlerFunc {
|
||||
func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, basenameColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
|
||||
return func(ctx context.Context, f *filterBuilder) {
|
||||
if c != nil {
|
||||
if addJoinFn != nil {
|
||||
addJoinFn(f)
|
||||
}
|
||||
addWildcards := true
|
||||
not := false
|
||||
|
||||
|
|
|
|||
|
|
@ -374,10 +374,8 @@ func (qb *GalleryStore) findBySubquery(ctx context.Context, sq *goqu.SelectDatas
|
|||
}
|
||||
|
||||
func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) {
|
||||
table := qb.queryTable()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("file_id").Eq(fileID),
|
||||
sq := dialect.From(galleriesFilesJoinTable).Select(galleriesFilesJoinTable.Col(galleryIDColumn)).Where(
|
||||
galleriesFilesJoinTable.Col(fileIDColumn).Eq(fileID),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
|
|
@ -396,18 +394,23 @@ func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID file.ID) (int,
|
|||
}
|
||||
|
||||
func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) {
|
||||
table := qb.queryTable()
|
||||
fingerprintTable := fingerprintTableMgr.table
|
||||
|
||||
var ex []exp.Expression
|
||||
|
||||
for _, v := range fp {
|
||||
ex = append(ex, goqu.And(
|
||||
table.Col("fingerprint_type").Eq(v.Type),
|
||||
table.Col("fingerprint").Eq(v.Fingerprint),
|
||||
fingerprintTable.Col("type").Eq(v.Type),
|
||||
fingerprintTable.Col("fingerprint").Eq(v.Fingerprint),
|
||||
))
|
||||
}
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(goqu.Or(ex...))
|
||||
sq := dialect.From(galleriesFilesJoinTable).
|
||||
InnerJoin(
|
||||
fingerprintTable,
|
||||
goqu.On(fingerprintTable.Col(fileIDColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))),
|
||||
).
|
||||
Select(galleriesFilesJoinTable.Col(galleryIDColumn)).Where(goqu.Or(ex...))
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
|
|
@ -418,50 +421,55 @@ func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Finger
|
|||
}
|
||||
|
||||
func (qb *GalleryStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Gallery, error) {
|
||||
table := galleriesQueryTable
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("fingerprint_type").Eq(file.FingerprintTypeMD5),
|
||||
table.Col("fingerprint").Eq(checksum),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting gallery by checksum %s: %w", checksum, err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
return qb.FindByFingerprints(ctx, []file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: checksum,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error) {
|
||||
table := galleriesQueryTable
|
||||
fingerprints := make([]file.Fingerprint, len(checksums))
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("fingerprint_type").Eq(file.FingerprintTypeMD5),
|
||||
table.Col("fingerprint").In(checksums),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting gallery by checksums: %w", err)
|
||||
for i, c := range checksums {
|
||||
fingerprints[i] = file.Fingerprint{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: c,
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
return qb.FindByFingerprints(ctx, fingerprints)
|
||||
}
|
||||
|
||||
func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gallery, error) {
|
||||
table := galleriesQueryTable
|
||||
table := qb.table()
|
||||
filesTable := fileTableMgr.table
|
||||
fileFoldersTable := folderTableMgr.table.As("file_folders")
|
||||
foldersTable := folderTableMgr.table
|
||||
|
||||
basename := filepath.Base(p)
|
||||
dir, _ := path(filepath.Dir(p)).Value()
|
||||
pp, _ := path(p).Value()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
sq := dialect.From(table).LeftJoin(
|
||||
galleriesFilesJoinTable,
|
||||
goqu.On(galleriesFilesJoinTable.Col(galleryIDColumn).Eq(table.Col(idColumn))),
|
||||
).InnerJoin(
|
||||
filesTable,
|
||||
goqu.On(filesTable.Col(idColumn).Eq(galleriesFilesJoinTable.Col(fileIDColumn))),
|
||||
).LeftJoin(
|
||||
fileFoldersTable,
|
||||
goqu.On(fileFoldersTable.Col(idColumn).Eq(filesTable.Col("parent_folder_id"))),
|
||||
).LeftJoin(
|
||||
foldersTable,
|
||||
goqu.On(foldersTable.Col(idColumn).Eq(table.Col("folder_id"))),
|
||||
).Select(table.Col(idColumn)).Where(
|
||||
goqu.Or(
|
||||
goqu.And(
|
||||
table.Col("parent_folder_path").Eq(dir),
|
||||
table.Col("basename").Eq(basename),
|
||||
fileFoldersTable.Col("path").Eq(dir),
|
||||
filesTable.Col("basename").Eq(basename),
|
||||
),
|
||||
table.Col("folder_path").Eq(pp),
|
||||
foldersTable.Col("path").Eq(pp),
|
||||
),
|
||||
)
|
||||
|
||||
|
|
@ -474,7 +482,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal
|
|||
}
|
||||
|
||||
func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) {
|
||||
table := galleriesQueryTable
|
||||
table := qb.table()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("folder_id").Eq(folderID),
|
||||
|
|
@ -489,10 +497,8 @@ func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID file.Folder
|
|||
}
|
||||
|
||||
func (qb *GalleryStore) FindBySceneID(ctx context.Context, sceneID int) ([]*models.Gallery, error) {
|
||||
table := galleriesQueryTable
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("scene_id").Eq(sceneID),
|
||||
sq := dialect.From(galleriesScenesJoinTable).Select(galleriesScenesJoinTable.Col(galleryIDColumn)).Where(
|
||||
galleriesScenesJoinTable.Col(sceneIDColumn).Eq(sceneID),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
|
|
@ -504,13 +510,8 @@ func (qb *GalleryStore) FindBySceneID(ctx context.Context, sceneID int) ([]*mode
|
|||
}
|
||||
|
||||
func (qb *GalleryStore) FindByImageID(ctx context.Context, imageID int) ([]*models.Gallery, error) {
|
||||
table := galleriesQueryTable
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).InnerJoin(
|
||||
galleriesImagesJoinTable,
|
||||
goqu.On(table.Col(idColumn).Eq(galleriesImagesJoinTable.Col(galleryIDColumn))),
|
||||
).Where(
|
||||
galleriesImagesJoinTable.Col("image_id").Eq(imageID),
|
||||
sq := dialect.From(galleriesImagesJoinTable).Select(galleriesImagesJoinTable.Col(galleryIDColumn)).Where(
|
||||
galleriesImagesJoinTable.Col(imageIDColumn).Eq(imageID),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
|
|
@ -602,7 +603,7 @@ func (qb *GalleryStore) makeFilter(ctx context.Context, galleryFilter *models.Ga
|
|||
}
|
||||
}))
|
||||
|
||||
query.handleCriterion(ctx, pathCriterionHandler(galleryFilter.Path, "galleries_query.parent_folder_path", "galleries_query.basename"))
|
||||
query.handleCriterion(ctx, pathCriterionHandler(galleryFilter.Path, "galleries_query.parent_folder_path", "galleries_query.basename", nil))
|
||||
query.handleCriterion(ctx, galleryFileCountCriterionHandler(qb, galleryFilter.FileCount))
|
||||
query.handleCriterion(ctx, intCriterionHandler(galleryFilter.Rating, "galleries.rating"))
|
||||
query.handleCriterion(ctx, stringCriterionHandler(galleryFilter.URL, "galleries.url"))
|
||||
|
|
|
|||
|
|
@ -1182,6 +1182,108 @@ func Test_galleryQueryBuilder_CountByImageID(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func galleriesToIDs(i []*models.Gallery) []int {
|
||||
var ret []int
|
||||
for _, ii := range i {
|
||||
ret = append(ret, ii.ID)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func Test_galleryStore_FindByFileID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
fileID file.ID
|
||||
include []int
|
||||
exclude []int
|
||||
}{
|
||||
{
|
||||
"valid",
|
||||
galleryFileIDs[galleryIdx1WithImage],
|
||||
[]int{galleryIdx1WithImage},
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"invalid",
|
||||
invalidFileID,
|
||||
nil,
|
||||
[]int{galleryIdx1WithImage},
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Gallery
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.FindByFileID(ctx, tt.fileID)
|
||||
if err != nil {
|
||||
t.Errorf("GalleryStore.FindByFileID() error = %v", err)
|
||||
return
|
||||
}
|
||||
for _, f := range got {
|
||||
clearGalleryFileIDs(f)
|
||||
}
|
||||
|
||||
ids := galleriesToIDs(got)
|
||||
include := indexesToIDs(galleryIDs, tt.include)
|
||||
exclude := indexesToIDs(galleryIDs, tt.exclude)
|
||||
|
||||
for _, i := range include {
|
||||
assert.Contains(ids, i)
|
||||
}
|
||||
for _, e := range exclude {
|
||||
assert.NotContains(ids, e)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_galleryStore_FindByFolderID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
folderID file.FolderID
|
||||
include []int
|
||||
exclude []int
|
||||
}{
|
||||
// TODO - add folder gallery
|
||||
{
|
||||
"invalid",
|
||||
invalidFolderID,
|
||||
nil,
|
||||
[]int{galleryIdxWithImage},
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Gallery
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.FindByFolderID(ctx, tt.folderID)
|
||||
if err != nil {
|
||||
t.Errorf("GalleryStore.FindByFolderID() error = %v", err)
|
||||
return
|
||||
}
|
||||
for _, f := range got {
|
||||
clearGalleryFileIDs(f)
|
||||
}
|
||||
|
||||
ids := galleriesToIDs(got)
|
||||
include := indexesToIDs(imageIDs, tt.include)
|
||||
exclude := indexesToIDs(imageIDs, tt.exclude)
|
||||
|
||||
for _, i := range include {
|
||||
assert.Contains(ids, i)
|
||||
}
|
||||
for _, e := range exclude {
|
||||
assert.NotContains(ids, e)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGalleryQueryQ(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
const galleryIdx = 0
|
||||
|
|
|
|||
|
|
@ -3,9 +3,7 @@ package sqlite
|
|||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
|
@ -365,11 +363,14 @@ func (qb *ImageStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset
|
|||
}
|
||||
|
||||
func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) {
|
||||
table := imagesQueryTable
|
||||
table := qb.table()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("file_id").Eq(fileID),
|
||||
)
|
||||
sq := dialect.From(table).
|
||||
InnerJoin(
|
||||
imagesFilesJoinTable,
|
||||
goqu.On(table.Col(idColumn).Eq(imagesFilesJoinTable.Col(imageIDColumn))),
|
||||
).
|
||||
Select(table.Col(idColumn)).Where(imagesFilesJoinTable.Col(fileIDColumn).Eq(fileID))
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
|
|
@ -387,18 +388,28 @@ func (qb *ImageStore) CountByFileID(ctx context.Context, fileID file.ID) (int, e
|
|||
}
|
||||
|
||||
func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) {
|
||||
table := imagesQueryTable
|
||||
table := qb.table()
|
||||
fingerprintTable := fingerprintTableMgr.table
|
||||
|
||||
var ex []exp.Expression
|
||||
|
||||
for _, v := range fp {
|
||||
ex = append(ex, goqu.And(
|
||||
table.Col("fingerprint_type").Eq(v.Type),
|
||||
table.Col("fingerprint").Eq(v.Fingerprint),
|
||||
fingerprintTable.Col("type").Eq(v.Type),
|
||||
fingerprintTable.Col("fingerprint").Eq(v.Fingerprint),
|
||||
))
|
||||
}
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(goqu.Or(ex...))
|
||||
sq := dialect.From(table).
|
||||
InnerJoin(
|
||||
imagesFilesJoinTable,
|
||||
goqu.On(table.Col(idColumn).Eq(imagesFilesJoinTable.Col(imageIDColumn))),
|
||||
).
|
||||
InnerJoin(
|
||||
fingerprintTable,
|
||||
goqu.On(fingerprintTable.Col(fileIDColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn))),
|
||||
).
|
||||
Select(table.Col(idColumn)).Where(goqu.Or(ex...))
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
|
|
@ -409,45 +420,32 @@ func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr
|
|||
}
|
||||
|
||||
func (qb *ImageStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error) {
|
||||
table := imagesQueryTable
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("fingerprint_type").Eq(file.FingerprintTypeMD5),
|
||||
table.Col("fingerprint").Eq(checksum),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting image by checksum %s: %w", checksum, err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *ImageStore) FindByPath(ctx context.Context, p string) ([]*models.Image, error) {
|
||||
table := imagesQueryTable
|
||||
basename := filepath.Base(p)
|
||||
dir, _ := path(filepath.Dir(p)).Value()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("parent_folder_path").Eq(dir),
|
||||
table.Col("basename").Eq(basename),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil && !errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, fmt.Errorf("getting image by path %s: %w", p, err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
return qb.FindByFingerprints(ctx, []file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: checksum,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (qb *ImageStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*models.Image, error) {
|
||||
table := qb.queryTable()
|
||||
table := qb.table()
|
||||
queryTable := qb.queryTable()
|
||||
|
||||
q := qb.selectDataset().Where(
|
||||
table.Col("gallery_id").Eq(galleryID),
|
||||
).GroupBy(table.Col(idColumn)).Order(table.Col("parent_folder_path").Asc(), table.Col("basename").Asc())
|
||||
sq := dialect.From(table).
|
||||
InnerJoin(
|
||||
galleriesImagesJoinTable,
|
||||
goqu.On(table.Col(idColumn).Eq(galleriesImagesJoinTable.Col(imageIDColumn))),
|
||||
).
|
||||
Select(table.Col(idColumn)).Where(
|
||||
galleriesImagesJoinTable.Col("gallery_id").Eq(galleryID),
|
||||
)
|
||||
|
||||
q := qb.selectDataset().Prepared(true).Where(
|
||||
queryTable.Col(idColumn).Eq(
|
||||
sq,
|
||||
),
|
||||
).Order(queryTable.Col("parent_folder_path").Asc(), queryTable.Col("basename").Asc())
|
||||
|
||||
ret, err := qb.getMany(ctx, q)
|
||||
if err != nil {
|
||||
|
|
@ -465,8 +463,21 @@ func (qb *ImageStore) CountByGalleryID(ctx context.Context, galleryID int) (int,
|
|||
}
|
||||
|
||||
func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) {
|
||||
table := qb.queryTable()
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(table.Col("parent_folder_id").Eq(folderID))
|
||||
table := qb.table()
|
||||
fileTable := goqu.T(fileTable)
|
||||
|
||||
sq := dialect.From(table).
|
||||
InnerJoin(
|
||||
imagesFilesJoinTable,
|
||||
goqu.On(table.Col(idColumn).Eq(imagesFilesJoinTable.Col(imageIDColumn))),
|
||||
).
|
||||
InnerJoin(
|
||||
fileTable,
|
||||
goqu.On(imagesFilesJoinTable.Col(fileIDColumn).Eq(fileTable.Col(idColumn))),
|
||||
).
|
||||
Select(table.Col(idColumn)).Where(
|
||||
fileTable.Col("parent_folder_id").Eq(folderID),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
|
|
@ -477,8 +488,21 @@ func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID
|
|||
}
|
||||
|
||||
func (qb *ImageStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) {
|
||||
table := qb.queryTable()
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(table.Col("zip_file_id").Eq(zipFileID))
|
||||
table := qb.table()
|
||||
fileTable := goqu.T(fileTable)
|
||||
|
||||
sq := dialect.From(table).
|
||||
InnerJoin(
|
||||
imagesFilesJoinTable,
|
||||
goqu.On(table.Col(idColumn).Eq(imagesFilesJoinTable.Col(imageIDColumn))),
|
||||
).
|
||||
InnerJoin(
|
||||
fileTable,
|
||||
goqu.On(imagesFilesJoinTable.Col(fileIDColumn).Eq(fileTable.Col(idColumn))),
|
||||
).
|
||||
Select(table.Col(idColumn)).Where(
|
||||
fileTable.Col("zip_file_id").Eq(zipFileID),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
|
|
@ -563,20 +587,21 @@ func (qb *ImageStore) makeFilter(ctx context.Context, imageFilter *models.ImageF
|
|||
|
||||
query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) {
|
||||
if imageFilter.Checksum != nil {
|
||||
f.addLeftJoin(fingerprintTable, "fingerprints_md5", "galleries_query.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'")
|
||||
qb.addQueryTable(f)
|
||||
f.addInnerJoin(fingerprintTable, "fingerprints_md5", "galleries_query.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'")
|
||||
}
|
||||
|
||||
stringCriterionHandler(imageFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f)
|
||||
}))
|
||||
query.handleCriterion(ctx, stringCriterionHandler(imageFilter.Title, "images.title"))
|
||||
|
||||
query.handleCriterion(ctx, pathCriterionHandler(imageFilter.Path, "images_query.parent_folder_path", "images_query.basename"))
|
||||
query.handleCriterion(ctx, pathCriterionHandler(imageFilter.Path, "images_query.parent_folder_path", "images_query.basename", qb.addQueryTable))
|
||||
query.handleCriterion(ctx, imageFileCountCriterionHandler(qb, imageFilter.FileCount))
|
||||
query.handleCriterion(ctx, intCriterionHandler(imageFilter.Rating, "images.rating"))
|
||||
query.handleCriterion(ctx, intCriterionHandler(imageFilter.OCounter, "images.o_counter"))
|
||||
query.handleCriterion(ctx, boolCriterionHandler(imageFilter.Organized, "images.organized"))
|
||||
|
||||
query.handleCriterion(ctx, resolutionCriterionHandler(imageFilter.Resolution, "images_query.image_height", "images_query.image_width"))
|
||||
query.handleCriterion(ctx, resolutionCriterionHandler(imageFilter.Resolution, "images_query.image_height", "images_query.image_width", qb.addQueryTable))
|
||||
query.handleCriterion(ctx, imageIsMissingCriterionHandler(qb, imageFilter.IsMissing))
|
||||
|
||||
query.handleCriterion(ctx, imageTagsCriterionHandler(qb, imageFilter.Tags))
|
||||
|
|
@ -591,6 +616,10 @@ func (qb *ImageStore) makeFilter(ctx context.Context, imageFilter *models.ImageF
|
|||
return query
|
||||
}
|
||||
|
||||
func (qb *ImageStore) addQueryTable(f *filterBuilder) {
|
||||
f.addInnerJoin(imagesQueryTable.GetTable(), "", "images.id = images_query.id")
|
||||
}
|
||||
|
||||
func (qb *ImageStore) makeQuery(ctx context.Context, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) {
|
||||
if imageFilter == nil {
|
||||
imageFilter = &models.ImageFilterType{}
|
||||
|
|
@ -602,15 +631,27 @@ func (qb *ImageStore) makeQuery(ctx context.Context, imageFilter *models.ImageFi
|
|||
query := qb.newQuery()
|
||||
distinctIDs(&query, imageTable)
|
||||
|
||||
// for convenience, join with the query view
|
||||
query.addJoins(join{
|
||||
table: imagesQueryTable.GetTable(),
|
||||
onClause: "images.id = images_query.id",
|
||||
joinType: "INNER",
|
||||
})
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
searchColumns := []string{"images.title", "images_query.parent_folder_path", "images_query.basename", "images_query.fingerprint"}
|
||||
query.addJoins(
|
||||
join{
|
||||
table: imagesFilesTable,
|
||||
onClause: "images_files.image_id = images.id",
|
||||
},
|
||||
join{
|
||||
table: fileTable,
|
||||
onClause: "images_files.file_id = files.id",
|
||||
},
|
||||
join{
|
||||
table: folderTable,
|
||||
onClause: "files.parent_folder_id = folders.id",
|
||||
},
|
||||
join{
|
||||
table: fingerprintTable,
|
||||
onClause: "files_fingerprints.file_id = images_files.file_id",
|
||||
},
|
||||
)
|
||||
|
||||
searchColumns := []string{"images.title", "folders.path", "files.basename", "files_fingerprints.fingerprint"}
|
||||
query.parseQueryString(searchColumns, *q)
|
||||
}
|
||||
|
||||
|
|
@ -621,7 +662,7 @@ func (qb *ImageStore) makeQuery(ctx context.Context, imageFilter *models.ImageFi
|
|||
|
||||
query.addFilter(filter)
|
||||
|
||||
query.sortAndPagination = qb.getImageSort(findFilter) + getPagination(findFilter)
|
||||
qb.setImageSortAndPagination(&query, findFilter)
|
||||
|
||||
return &query, nil
|
||||
}
|
||||
|
|
@ -769,8 +810,10 @@ func imageTagCountCriterionHandler(qb *ImageStore, tagCount *models.IntCriterion
|
|||
|
||||
func imageGalleriesCriterionHandler(qb *ImageStore, galleries *models.MultiCriterionInput) criterionHandlerFunc {
|
||||
addJoinsFunc := func(f *filterBuilder) {
|
||||
qb.galleriesRepository().join(f, "", "images.id")
|
||||
f.addLeftJoin(galleryTable, "", "galleries_images.gallery_id = galleries.id")
|
||||
if galleries.Modifier == models.CriterionModifierIncludes || galleries.Modifier == models.CriterionModifierIncludesAll {
|
||||
f.addInnerJoin(galleriesImagesTable, "", "galleries_images.image_id = images.id")
|
||||
f.addInnerJoin(galleryTable, "", "galleries_images.gallery_id = galleries.id")
|
||||
}
|
||||
}
|
||||
h := qb.getMultiCriterionHandlerBuilder(galleryTable, galleriesImagesTable, galleryIDColumn, addJoinsFunc)
|
||||
|
||||
|
|
@ -872,30 +915,54 @@ INNER JOIN (` + valuesClause + `) t ON t.column2 = pt.tag_id
|
|||
}
|
||||
}
|
||||
|
||||
func (qb *ImageStore) getImageSort(findFilter *models.FindFilterType) string {
|
||||
if findFilter == nil || findFilter.Sort == nil || *findFilter.Sort == "" {
|
||||
return ""
|
||||
}
|
||||
sort := findFilter.GetSort("title")
|
||||
direction := findFilter.GetDirection()
|
||||
func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *models.FindFilterType) {
|
||||
sortClause := ""
|
||||
|
||||
// translate sort field
|
||||
if sort == "file_mod_time" {
|
||||
sort = "mod_time"
|
||||
if findFilter != nil && findFilter.Sort != nil && *findFilter.Sort != "" {
|
||||
sort := findFilter.GetSort("title")
|
||||
direction := findFilter.GetDirection()
|
||||
|
||||
// translate sort field
|
||||
if sort == "file_mod_time" {
|
||||
sort = "mod_time"
|
||||
}
|
||||
|
||||
addFilesJoin := func() {
|
||||
q.addJoins(
|
||||
join{
|
||||
table: imagesFilesTable,
|
||||
onClause: "images_files.image_id = images.id",
|
||||
},
|
||||
join{
|
||||
table: fileTable,
|
||||
onClause: "images_files.file_id = files.id",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
switch sort {
|
||||
case "path":
|
||||
addFilesJoin()
|
||||
q.addJoins(join{
|
||||
table: folderTable,
|
||||
onClause: "files.parent_folder_id = folders.id",
|
||||
})
|
||||
sortClause = " ORDER BY folders.path " + direction + ", files.basename " + direction
|
||||
case "file_count":
|
||||
sortClause = getCountSort(imageTable, imagesFilesTable, imageIDColumn, direction)
|
||||
case "tag_count":
|
||||
sortClause = getCountSort(imageTable, imagesTagsTable, imageIDColumn, direction)
|
||||
case "performer_count":
|
||||
sortClause = getCountSort(imageTable, performersImagesTable, imageIDColumn, direction)
|
||||
case "mod_time", "size":
|
||||
addFilesJoin()
|
||||
sortClause = getSort(sort, direction, "files")
|
||||
default:
|
||||
sortClause = getSort(sort, direction, "images")
|
||||
}
|
||||
}
|
||||
|
||||
switch sort {
|
||||
case "path":
|
||||
return " ORDER BY images_query.parent_folder_path " + direction + ", images_query.basename " + direction
|
||||
case "file_count":
|
||||
return getCountSort(imageTable, imagesFilesTable, imageIDColumn, direction)
|
||||
case "tag_count":
|
||||
return getCountSort(imageTable, imagesTagsTable, imageIDColumn, direction)
|
||||
case "performer_count":
|
||||
return getCountSort(imageTable, performersImagesTable, imageIDColumn, direction)
|
||||
default:
|
||||
return getSort(sort, direction, "images_query")
|
||||
}
|
||||
q.sortAndPagination = sortClause + getPagination(findFilter)
|
||||
}
|
||||
|
||||
func (qb *ImageStore) galleriesRepository() *joinRepository {
|
||||
|
|
|
|||
|
|
@ -1035,38 +1035,58 @@ func Test_imageQueryBuilder_FindByChecksum(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func Test_imageQueryBuilder_FindByPath(t *testing.T) {
|
||||
getPath := func(index int) string {
|
||||
return getFilePath(folderIdxWithImageFiles, getImageBasename(index))
|
||||
func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) {
|
||||
getChecksum := func(index int) string {
|
||||
return getImageStringValue(index, checksumField)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
want []*models.Image
|
||||
wantErr bool
|
||||
name string
|
||||
fingerprints []file.Fingerprint
|
||||
want []*models.Image
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
"valid",
|
||||
getPath(imageIdxWithGallery),
|
||||
[]file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: getChecksum(imageIdxWithGallery),
|
||||
},
|
||||
},
|
||||
[]*models.Image{makeImageWithID(imageIdxWithGallery)},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"invalid",
|
||||
"invalid path",
|
||||
[]file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: "invalid checksum",
|
||||
},
|
||||
},
|
||||
nil,
|
||||
false,
|
||||
},
|
||||
{
|
||||
"with performers",
|
||||
getPath(imageIdxWithTwoPerformers),
|
||||
[]file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: getChecksum(imageIdxWithTwoPerformers),
|
||||
},
|
||||
},
|
||||
[]*models.Image{makeImageWithID(imageIdxWithTwoPerformers)},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"with tags",
|
||||
getPath(imageIdxWithTwoTags),
|
||||
[]file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: getChecksum(imageIdxWithTwoTags),
|
||||
},
|
||||
},
|
||||
[]*models.Image{makeImageWithID(imageIdxWithTwoTags)},
|
||||
false,
|
||||
},
|
||||
|
|
@ -1077,14 +1097,16 @@ func Test_imageQueryBuilder_FindByPath(t *testing.T) {
|
|||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.FindByPath(ctx, tt.path)
|
||||
got, err := qb.FindByFingerprints(ctx, tt.fingerprints)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("imageQueryBuilder.FindByPath() error = %v, wantErr %v", err, tt.wantErr)
|
||||
t.Errorf("imageQueryBuilder.FindByChecksum() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
for _, f := range got {
|
||||
clearImageFileIDs(f)
|
||||
}
|
||||
|
||||
assert.Equal(tt.want, got)
|
||||
})
|
||||
}
|
||||
|
|
@ -1178,6 +1200,55 @@ func imagesToIDs(i []*models.Image) []int {
|
|||
return ret
|
||||
}
|
||||
|
||||
func Test_imageStore_FindByFileID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
fileID file.ID
|
||||
include []int
|
||||
exclude []int
|
||||
}{
|
||||
{
|
||||
"valid",
|
||||
imageFileIDs[imageIdxWithGallery],
|
||||
[]int{imageIdxWithGallery},
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"invalid",
|
||||
invalidFileID,
|
||||
nil,
|
||||
[]int{imageIdxWithGallery},
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Image
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.FindByFileID(ctx, tt.fileID)
|
||||
if err != nil {
|
||||
t.Errorf("ImageStore.FindByFileID() error = %v", err)
|
||||
return
|
||||
}
|
||||
for _, f := range got {
|
||||
clearImageFileIDs(f)
|
||||
}
|
||||
|
||||
ids := imagesToIDs(got)
|
||||
include := indexesToIDs(imageIDs, tt.include)
|
||||
exclude := indexesToIDs(imageIDs, tt.exclude)
|
||||
|
||||
for _, i := range include {
|
||||
assert.Contains(ids, i)
|
||||
}
|
||||
for _, e := range exclude {
|
||||
assert.NotContains(ids, e)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_imageStore_FindByFolderID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ CREATE TABLE `folders` (
|
|||
);
|
||||
|
||||
CREATE INDEX `index_folders_on_parent_folder_id` on `folders` (`parent_folder_id`);
|
||||
CREATE UNIQUE INDEX `index_folders_on_path_unique` on `folders` (`path`);
|
||||
|
||||
-- require reference folders/zip files to be deleted manually first
|
||||
CREATE TABLE `files` (
|
||||
|
|
@ -26,12 +27,12 @@ CREATE TABLE `files` (
|
|||
CHECK (`basename` != '')
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX `index_files_zip_basename_unique` ON `files` (`zip_file_id`, `parent_folder_id`, `basename`);
|
||||
CREATE INDEX `index_files_on_parent_folder_id_basename` on `files` (`parent_folder_id`, `basename`);
|
||||
CREATE UNIQUE INDEX `index_files_zip_basename_unique` ON `files` (`zip_file_id`, `parent_folder_id`, `basename`) WHERE `zip_file_id` IS NOT NULL;
|
||||
CREATE UNIQUE INDEX `index_files_on_parent_folder_id_basename_unique` on `files` (`parent_folder_id`, `basename`);
|
||||
CREATE INDEX `index_files_on_basename` on `files` (`basename`);
|
||||
|
||||
ALTER TABLE `folders` ADD COLUMN `zip_file_id` integer REFERENCES `files`(`id`);
|
||||
CREATE UNIQUE INDEX `index_folders_path_unique` on `folders` (`zip_file_id`, `path`);
|
||||
CREATE INDEX `index_folders_on_zip_file_id` on `folders` (`zip_file_id`) WHERE `zip_file_id` IS NOT NULL;
|
||||
|
||||
CREATE TABLE `files_fingerprints` (
|
||||
`file_id` integer NOT NULL,
|
||||
|
|
@ -84,7 +85,7 @@ CREATE TABLE `images_files` (
|
|||
PRIMARY KEY(`image_id`, `file_id`)
|
||||
);
|
||||
|
||||
CREATE INDEX `index_images_files_file_id` ON `images_files` (`file_id`);
|
||||
CREATE INDEX `index_images_files_on_file_id` on `images_files` (`file_id`);
|
||||
|
||||
CREATE TABLE `galleries_files` (
|
||||
`gallery_id` integer NOT NULL,
|
||||
|
|
|
|||
|
|
@ -456,10 +456,8 @@ func (qb *SceneStore) find(ctx context.Context, id int) (*models.Scene, error) {
|
|||
}
|
||||
|
||||
func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) {
|
||||
table := qb.queryTable()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("file_id").Eq(fileID),
|
||||
sq := dialect.From(scenesFilesJoinTable).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where(
|
||||
scenesFilesJoinTable.Col(fileIDColumn).Eq(fileID),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
|
|
@ -478,18 +476,23 @@ func (qb *SceneStore) CountByFileID(ctx context.Context, fileID file.ID) (int, e
|
|||
}
|
||||
|
||||
func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) {
|
||||
table := qb.queryTable()
|
||||
fingerprintTable := fingerprintTableMgr.table
|
||||
|
||||
var ex []exp.Expression
|
||||
|
||||
for _, v := range fp {
|
||||
ex = append(ex, goqu.And(
|
||||
table.Col("fingerprint_type").Eq(v.Type),
|
||||
table.Col("fingerprint").Eq(v.Fingerprint),
|
||||
fingerprintTable.Col("type").Eq(v.Type),
|
||||
fingerprintTable.Col("fingerprint").Eq(v.Fingerprint),
|
||||
))
|
||||
}
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(goqu.Or(ex...))
|
||||
sq := dialect.From(scenesFilesJoinTable).
|
||||
InnerJoin(
|
||||
fingerprintTable,
|
||||
goqu.On(fingerprintTable.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn))),
|
||||
).
|
||||
Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where(goqu.Or(ex...))
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
|
|
@ -500,39 +503,26 @@ func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr
|
|||
}
|
||||
|
||||
func (qb *SceneStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) {
|
||||
table := qb.queryTable()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("fingerprint_type").Eq(file.FingerprintTypeMD5),
|
||||
table.Col("fingerprint").Eq(checksum),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting scenes by checksum %s: %w", checksum, err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
return qb.FindByFingerprints(ctx, []file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeMD5,
|
||||
Fingerprint: checksum,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (qb *SceneStore) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) {
|
||||
table := qb.queryTable()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("fingerprint_type").Eq(file.FingerprintTypeOshash),
|
||||
table.Col("fingerprint").Eq(oshash),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("getting scenes by oshash %s: %w", oshash, err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
return qb.FindByFingerprints(ctx, []file.Fingerprint{
|
||||
{
|
||||
Type: file.FingerprintTypeOshash,
|
||||
Fingerprint: oshash,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func (qb *SceneStore) FindByPath(ctx context.Context, p string) ([]*models.Scene, error) {
|
||||
table := scenesQueryTable
|
||||
filesTable := fileTableMgr.table
|
||||
foldersTable := folderTableMgr.table
|
||||
basename := filepath.Base(p)
|
||||
dirStr := filepath.Dir(p)
|
||||
|
||||
|
|
@ -542,9 +532,15 @@ func (qb *SceneStore) FindByPath(ctx context.Context, p string) ([]*models.Scene
|
|||
|
||||
dir, _ := path(dirStr).Value()
|
||||
|
||||
sq := dialect.From(table).Select(table.Col(idColumn)).Where(
|
||||
table.Col("parent_folder_path").Like(dir),
|
||||
table.Col("basename").Like(basename),
|
||||
sq := dialect.From(scenesFilesJoinTable).InnerJoin(
|
||||
filesTable,
|
||||
goqu.On(filesTable.Col(idColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn))),
|
||||
).InnerJoin(
|
||||
foldersTable,
|
||||
goqu.On(foldersTable.Col(idColumn).Eq(filesTable.Col("parent_folder_id"))),
|
||||
).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where(
|
||||
foldersTable.Col("path").Like(dir),
|
||||
filesTable.Col("basename").Like(basename),
|
||||
)
|
||||
|
||||
ret, err := qb.findBySubquery(ctx, sq)
|
||||
|
|
@ -670,18 +666,16 @@ func (qb *SceneStore) CountByTagID(ctx context.Context, tagID int) (int, error)
|
|||
}
|
||||
|
||||
func (qb *SceneStore) countMissingFingerprints(ctx context.Context, fpType string) (int, error) {
|
||||
table := qb.queryTable()
|
||||
fpTable := fingerprintTableMgr.table.As("fingerprints_temp")
|
||||
|
||||
q := dialect.Select(goqu.COUNT(goqu.DISTINCT(table.Col(idColumn)))).From(table).LeftJoin(
|
||||
q := dialect.From(scenesFilesJoinTable).LeftJoin(
|
||||
fpTable,
|
||||
goqu.On(
|
||||
table.Col("file_id").Eq(fpTable.Col("file_id")),
|
||||
scenesFilesJoinTable.Col(fileIDColumn).Eq(fpTable.Col(fileIDColumn)),
|
||||
fpTable.Col("type").Eq(fpType),
|
||||
),
|
||||
)
|
||||
).Select(goqu.COUNT(goqu.DISTINCT(scenesFilesJoinTable.Col(sceneIDColumn)))).Where(fpTable.Col("fingerprint").IsNull())
|
||||
|
||||
q.Where(fpTable.Col("fingerprint").IsNull())
|
||||
return count(ctx, q)
|
||||
}
|
||||
|
||||
|
|
@ -762,7 +756,7 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF
|
|||
query.not(qb.makeFilter(ctx, sceneFilter.Not))
|
||||
}
|
||||
|
||||
query.handleCriterion(ctx, pathCriterionHandler(sceneFilter.Path, "scenes_query.parent_folder_path", "scenes_query.basename"))
|
||||
query.handleCriterion(ctx, pathCriterionHandler(sceneFilter.Path, "scenes_query.parent_folder_path", "scenes_query.basename", nil))
|
||||
query.handleCriterion(ctx, sceneFileCountCriterionHandler(qb, sceneFilter.FileCount))
|
||||
query.handleCriterion(ctx, stringCriterionHandler(sceneFilter.Title, "scenes.title"))
|
||||
query.handleCriterion(ctx, stringCriterionHandler(sceneFilter.Details, "scenes.details"))
|
||||
|
|
@ -799,7 +793,7 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF
|
|||
query.handleCriterion(ctx, boolCriterionHandler(sceneFilter.Organized, "scenes.organized"))
|
||||
|
||||
query.handleCriterion(ctx, durationCriterionHandler(sceneFilter.Duration, "scenes_query.duration"))
|
||||
query.handleCriterion(ctx, resolutionCriterionHandler(sceneFilter.Resolution, "scenes_query.video_height", "scenes_query.video_width"))
|
||||
query.handleCriterion(ctx, resolutionCriterionHandler(sceneFilter.Resolution, "scenes_query.video_height", "scenes_query.video_width", nil))
|
||||
|
||||
query.handleCriterion(ctx, hasMarkersCriterionHandler(sceneFilter.HasMarkers))
|
||||
query.handleCriterion(ctx, sceneIsMissingCriterionHandler(qb, sceneFilter.IsMissing))
|
||||
|
|
@ -958,9 +952,13 @@ func durationCriterionHandler(durationFilter *models.IntCriterionInput, column s
|
|||
}
|
||||
}
|
||||
|
||||
func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, heightColumn string, widthColumn string) criterionHandlerFunc {
|
||||
func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc {
|
||||
return func(ctx context.Context, f *filterBuilder) {
|
||||
if resolution != nil && resolution.Value.IsValid() {
|
||||
if addJoinFn != nil {
|
||||
addJoinFn(f)
|
||||
}
|
||||
|
||||
min := resolution.Value.GetMinResolution()
|
||||
max := resolution.Value.GetMaxResolution()
|
||||
|
||||
|
|
|
|||
|
|
@ -1655,6 +1655,152 @@ func TestSceneCountByPerformerID(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func scenesToIDs(i []*models.Scene) []int {
|
||||
var ret []int
|
||||
for _, ii := range i {
|
||||
ret = append(ret, ii.ID)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func Test_sceneStore_FindByFileID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
fileID file.ID
|
||||
include []int
|
||||
exclude []int
|
||||
}{
|
||||
{
|
||||
"valid",
|
||||
sceneFileIDs[sceneIdx1WithPerformer],
|
||||
[]int{sceneIdx1WithPerformer},
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"invalid",
|
||||
invalidFileID,
|
||||
nil,
|
||||
[]int{sceneIdx1WithPerformer},
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Scene
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.FindByFileID(ctx, tt.fileID)
|
||||
if err != nil {
|
||||
t.Errorf("SceneStore.FindByFileID() error = %v", err)
|
||||
return
|
||||
}
|
||||
for _, f := range got {
|
||||
clearSceneFileIDs(f)
|
||||
}
|
||||
|
||||
ids := scenesToIDs(got)
|
||||
include := indexesToIDs(galleryIDs, tt.include)
|
||||
exclude := indexesToIDs(galleryIDs, tt.exclude)
|
||||
|
||||
for _, i := range include {
|
||||
assert.Contains(ids, i)
|
||||
}
|
||||
for _, e := range exclude {
|
||||
assert.NotContains(ids, e)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_sceneStore_CountByFileID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
fileID file.ID
|
||||
want int
|
||||
}{
|
||||
{
|
||||
"valid",
|
||||
sceneFileIDs[sceneIdxWithTwoPerformers],
|
||||
1,
|
||||
},
|
||||
{
|
||||
"invalid",
|
||||
invalidFileID,
|
||||
0,
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Scene
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.CountByFileID(ctx, tt.fileID)
|
||||
if err != nil {
|
||||
t.Errorf("SceneStore.CountByFileID() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_sceneStore_CountMissingChecksum(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
want int
|
||||
}{
|
||||
{
|
||||
"valid",
|
||||
0,
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Scene
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.CountMissingChecksum(ctx)
|
||||
if err != nil {
|
||||
t.Errorf("SceneStore.CountMissingChecksum() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_sceneStore_CountMissingOshash(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
want int
|
||||
}{
|
||||
{
|
||||
"valid",
|
||||
0,
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Scene
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
got, err := qb.CountMissingOSHash(ctx)
|
||||
if err != nil {
|
||||
t.Errorf("SceneStore.CountMissingOSHash() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSceneWall(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
sqb := db.Scene
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
//go:build ignore
|
||||
// +build ignore
|
||||
//go:build tools
|
||||
// +build tools
|
||||
|
||||
package main
|
||||
|
||||
|
|
@ -14,12 +14,12 @@ import (
|
|||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/database"
|
||||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/hash/md5"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
||||
"github.com/stashapp/stash/pkg/sqlite"
|
||||
"github.com/stashapp/stash/pkg/txn"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
|
|
@ -40,8 +40,12 @@ type config struct {
|
|||
Naming namingConfig `yaml:"naming"`
|
||||
}
|
||||
|
||||
var txnManager models.TransactionManager
|
||||
var c *config
|
||||
var (
|
||||
repo models.Repository
|
||||
c *config
|
||||
db *sqlite.Database
|
||||
folderID file.FolderID
|
||||
)
|
||||
|
||||
func main() {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
|
|
@ -54,9 +58,17 @@ func main() {
|
|||
|
||||
initNaming(*c)
|
||||
|
||||
if err = database.Initialize(c.Database); err != nil {
|
||||
db = sqlite.NewDatabase()
|
||||
repo = db.TxnRepository()
|
||||
|
||||
logf("Initializing database...")
|
||||
if err = db.Open(c.Database); err != nil {
|
||||
log.Fatalf("couldn't initialize database: %v", err)
|
||||
}
|
||||
logf("Populating database...")
|
||||
if err = makeFolder(); err != nil {
|
||||
log.Fatalf("couldn't create folder: %v", err)
|
||||
}
|
||||
populateDB()
|
||||
}
|
||||
|
||||
|
|
@ -89,12 +101,8 @@ func populateDB() {
|
|||
makeMarkers(c.Markers)
|
||||
}
|
||||
|
||||
func withTxn(f func(r models.Repository) error) error {
|
||||
if txnManager == nil {
|
||||
txnManager = sqlite.NewTransactionManager()
|
||||
}
|
||||
|
||||
return txnManager.WithTxn(context.TODO(), f)
|
||||
func withTxn(f func(ctx context.Context) error) error {
|
||||
return txn.WithTxn(context.Background(), db, f)
|
||||
}
|
||||
|
||||
func retry(attempts int, fn func() error) error {
|
||||
|
|
@ -109,28 +117,43 @@ func retry(attempts int, fn func() error) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func makeFolder() error {
|
||||
return withTxn(func(ctx context.Context) error {
|
||||
f := file.Folder{
|
||||
Path: ".",
|
||||
}
|
||||
if err := repo.Folder.Create(ctx, &f); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
folderID = f.ID
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func makeTags(n int) {
|
||||
logf("creating %d tags...", n)
|
||||
for i := 0; i < n; i++ {
|
||||
if err := retry(100, func() error {
|
||||
return withTxn(func(r models.Repository) error {
|
||||
return withTxn(func(ctx context.Context) error {
|
||||
name := names[c.Naming.Tags].generateName(1)
|
||||
tag := models.Tag{
|
||||
Name: name,
|
||||
}
|
||||
|
||||
created, err := r.Tag().Create(tag)
|
||||
created, err := repo.Tag.Create(ctx, tag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if rand.Intn(100) > 5 {
|
||||
t, _, err := r.Tag().Query(nil, getRandomFilter(1))
|
||||
t, _, err := repo.Tag.Query(ctx, nil, getRandomFilter(1))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(t) > 0 && t[0].ID != created.ID {
|
||||
if err := r.Tag().UpdateParentTags(created.ID, []int{t[0].ID}); err != nil {
|
||||
if err := repo.Tag.UpdateParentTags(ctx, created.ID, []int{t[0].ID}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -145,9 +168,10 @@ func makeTags(n int) {
|
|||
}
|
||||
|
||||
func makeStudios(n int) {
|
||||
logf("creating %d studios...", n)
|
||||
for i := 0; i < n; i++ {
|
||||
if err := retry(100, func() error {
|
||||
return withTxn(func(r models.Repository) error {
|
||||
return withTxn(func(ctx context.Context) error {
|
||||
name := names[c.Naming.Tags].generateName(rand.Intn(5) + 1)
|
||||
studio := models.Studio{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
|
|
@ -155,7 +179,7 @@ func makeStudios(n int) {
|
|||
}
|
||||
|
||||
if rand.Intn(100) > 5 {
|
||||
ss, _, err := r.Studio().Query(nil, getRandomFilter(1))
|
||||
ss, _, err := repo.Studio.Query(ctx, nil, getRandomFilter(1))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -168,7 +192,7 @@ func makeStudios(n int) {
|
|||
}
|
||||
}
|
||||
|
||||
_, err := r.Studio().Create(studio)
|
||||
_, err := repo.Studio.Create(ctx, studio)
|
||||
return err
|
||||
})
|
||||
}); err != nil {
|
||||
|
|
@ -178,9 +202,10 @@ func makeStudios(n int) {
|
|||
}
|
||||
|
||||
func makePerformers(n int) {
|
||||
logf("creating %d performers...", n)
|
||||
for i := 0; i < n; i++ {
|
||||
if err := retry(100, func() error {
|
||||
return withTxn(func(r models.Repository) error {
|
||||
return withTxn(func(ctx context.Context) error {
|
||||
name := generatePerformerName()
|
||||
performer := models.Performer{
|
||||
Name: sql.NullString{String: name, Valid: true},
|
||||
|
|
@ -193,7 +218,7 @@ func makePerformers(n int) {
|
|||
|
||||
// TODO - set tags
|
||||
|
||||
_, err := r.Performer().Create(performer)
|
||||
_, err := repo.Performer.Create(ctx, performer)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error creating performer with name: %s: %s", performer.Name.String, err.Error())
|
||||
}
|
||||
|
|
@ -205,23 +230,72 @@ func makePerformers(n int) {
|
|||
}
|
||||
}
|
||||
|
||||
func generateBaseFile(path string) *file.BaseFile {
|
||||
return &file.BaseFile{
|
||||
Path: path,
|
||||
Basename: path,
|
||||
ParentFolderID: folderID,
|
||||
Fingerprints: []file.Fingerprint{
|
||||
file.Fingerprint{
|
||||
Type: "md5",
|
||||
Fingerprint: md5.FromString(path),
|
||||
},
|
||||
file.Fingerprint{
|
||||
Type: "oshash",
|
||||
Fingerprint: md5.FromString(path),
|
||||
},
|
||||
},
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
func generateVideoFile(path string) file.File {
|
||||
w, h := getResolution()
|
||||
|
||||
return &file.VideoFile{
|
||||
BaseFile: generateBaseFile(path),
|
||||
Duration: rand.Float64() * 14400,
|
||||
Height: h,
|
||||
Width: w,
|
||||
}
|
||||
}
|
||||
|
||||
func makeVideoFile(ctx context.Context, path string) (file.File, error) {
|
||||
f := generateVideoFile(path)
|
||||
|
||||
if err := repo.File.Create(ctx, f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func logf(f string, args ...interface{}) {
|
||||
log.Printf(f+"\n", args...)
|
||||
}
|
||||
|
||||
func makeScenes(n int) {
|
||||
logger.Infof("creating %d scenes...", n)
|
||||
logf("creating %d scenes...", n)
|
||||
for i := 0; i < n; {
|
||||
// do in batches of 1000
|
||||
batch := i + batchSize
|
||||
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
if err := withTxn(func(ctx context.Context) error {
|
||||
for ; i < batch && i < n; i++ {
|
||||
scene := generateScene(i)
|
||||
scene.StudioID = getRandomStudioID(r)
|
||||
scene.StudioID = getRandomStudioID(ctx)
|
||||
makeSceneRelationships(ctx, &scene)
|
||||
|
||||
created, err := r.Scene().Create(scene)
|
||||
path := md5.FromString("scene/" + strconv.Itoa(i))
|
||||
f, err := makeVideoFile(ctx, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
makeSceneRelationships(r, created.ID)
|
||||
if err := repo.Scene.Create(ctx, &scene, []file.ID{f.Base().ID}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -229,14 +303,14 @@ func makeScenes(n int) {
|
|||
panic(err)
|
||||
}
|
||||
|
||||
logger.Infof("... created %d scenes", i)
|
||||
logf("... created %d scenes", i)
|
||||
}
|
||||
}
|
||||
|
||||
func getResolution() (int64, int64) {
|
||||
func getResolution() (int, int) {
|
||||
res := models.AllResolutionEnum[rand.Intn(len(models.AllResolutionEnum))]
|
||||
h := int64(res.GetMaxResolution())
|
||||
var w int64
|
||||
h := res.GetMaxResolution()
|
||||
var w int
|
||||
if h == 240 || h == 480 || rand.Intn(10) == 9 {
|
||||
w = h * 4 / 3
|
||||
} else {
|
||||
|
|
@ -250,54 +324,69 @@ func getResolution() (int64, int64) {
|
|||
return w, h
|
||||
}
|
||||
|
||||
func getDate() string {
|
||||
func getDate() time.Time {
|
||||
s := rand.Int63n(time.Now().Unix())
|
||||
|
||||
d := time.Unix(s, 0)
|
||||
return d.Format("2006-01-02")
|
||||
return time.Unix(s, 0)
|
||||
}
|
||||
|
||||
func generateScene(i int) models.Scene {
|
||||
path := md5.FromString("scene/" + strconv.Itoa(i))
|
||||
w, h := getResolution()
|
||||
|
||||
return models.Scene{
|
||||
Path: path,
|
||||
Title: sql.NullString{String: names[c.Naming.Scenes].generateName(rand.Intn(7) + 1), Valid: true},
|
||||
Checksum: sql.NullString{String: md5.FromString(path), Valid: true},
|
||||
OSHash: sql.NullString{String: md5.FromString(path), Valid: true},
|
||||
Duration: sql.NullFloat64{
|
||||
Float64: rand.Float64() * 14400,
|
||||
Valid: true,
|
||||
},
|
||||
Height: models.NullInt64(h),
|
||||
Width: models.NullInt64(w),
|
||||
Date: models.SQLiteDate{
|
||||
String: getDate(),
|
||||
Valid: true,
|
||||
Title: names[c.Naming.Scenes].generateName(rand.Intn(7) + 1),
|
||||
Date: &models.Date{
|
||||
Time: getDate(),
|
||||
},
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
func generateImageFile(path string) file.File {
|
||||
w, h := getResolution()
|
||||
|
||||
return &file.ImageFile{
|
||||
BaseFile: generateBaseFile(path),
|
||||
Height: h,
|
||||
Width: w,
|
||||
}
|
||||
}
|
||||
|
||||
func makeImageFile(ctx context.Context, path string) (file.File, error) {
|
||||
f := generateImageFile(path)
|
||||
|
||||
if err := repo.File.Create(ctx, f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func makeImages(n int) {
|
||||
logger.Infof("creating %d images...", n)
|
||||
logf("creating %d images...", n)
|
||||
for i := 0; i < n; {
|
||||
// do in batches of 1000
|
||||
batch := i + batchSize
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
if err := withTxn(func(ctx context.Context) error {
|
||||
for ; i < batch && i < n; i++ {
|
||||
image := generateImage(i)
|
||||
image.StudioID = getRandomStudioID(r)
|
||||
image.StudioID = getRandomStudioID(ctx)
|
||||
makeImageRelationships(ctx, &image)
|
||||
|
||||
created, err := r.Image().Create(image)
|
||||
path := md5.FromString("image/" + strconv.Itoa(i))
|
||||
f, err := makeImageFile(ctx, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
makeImageRelationships(r, created.ID)
|
||||
if err := repo.Image.Create(ctx, &models.ImageCreateInput{
|
||||
Image: &image,
|
||||
FileIDs: []file.ID{f.Base().ID},
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("... created %d images", i)
|
||||
logf("... created %d images", i)
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
|
|
@ -307,36 +396,37 @@ func makeImages(n int) {
|
|||
}
|
||||
|
||||
func generateImage(i int) models.Image {
|
||||
path := md5.FromString("image/" + strconv.Itoa(i))
|
||||
|
||||
w, h := getResolution()
|
||||
|
||||
return models.Image{
|
||||
Title: sql.NullString{String: names[c.Naming.Images].generateName(rand.Intn(7) + 1), Valid: true},
|
||||
Path: path,
|
||||
Checksum: md5.FromString(path),
|
||||
Height: models.NullInt64(h),
|
||||
Width: models.NullInt64(w),
|
||||
Title: names[c.Naming.Images].generateName(rand.Intn(7) + 1),
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
func makeGalleries(n int) {
|
||||
logger.Infof("creating %d galleries...", n)
|
||||
logf("creating %d galleries...", n)
|
||||
for i := 0; i < n; {
|
||||
// do in batches of 1000
|
||||
batch := i + batchSize
|
||||
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
if err := withTxn(func(ctx context.Context) error {
|
||||
for ; i < batch && i < n; i++ {
|
||||
gallery := generateGallery(i)
|
||||
gallery.StudioID = getRandomStudioID(r)
|
||||
gallery.StudioID = getRandomStudioID(ctx)
|
||||
gallery.TagIDs = getRandomTags(ctx, 0, 15)
|
||||
gallery.PerformerIDs = getRandomPerformers(ctx)
|
||||
|
||||
created, err := r.Gallery().Create(gallery)
|
||||
path := md5.FromString("gallery/" + strconv.Itoa(i))
|
||||
f, err := makeZipFile(ctx, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
makeGalleryRelationships(r, created.ID)
|
||||
if err := repo.Gallery.Create(ctx, &gallery, []file.ID{f.Base().ID}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
makeGalleryRelationships(ctx, &gallery)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -344,49 +434,60 @@ func makeGalleries(n int) {
|
|||
panic(err)
|
||||
}
|
||||
|
||||
logger.Infof("... created %d galleries", i)
|
||||
logf("... created %d galleries", i)
|
||||
}
|
||||
}
|
||||
|
||||
func generateGallery(i int) models.Gallery {
|
||||
path := md5.FromString("gallery/" + strconv.Itoa(i))
|
||||
func generateZipFile(path string) file.File {
|
||||
return generateBaseFile(path)
|
||||
}
|
||||
|
||||
func makeZipFile(ctx context.Context, path string) (file.File, error) {
|
||||
f := generateZipFile(path)
|
||||
|
||||
if err := repo.File.Create(ctx, f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func generateGallery(i int) models.Gallery {
|
||||
return models.Gallery{
|
||||
Title: sql.NullString{String: names[c.Naming.Galleries].generateName(rand.Intn(7) + 1), Valid: true},
|
||||
Path: sql.NullString{String: path, Valid: true},
|
||||
Checksum: md5.FromString(path),
|
||||
Date: models.SQLiteDate{
|
||||
String: getDate(),
|
||||
Valid: true,
|
||||
Title: names[c.Naming.Galleries].generateName(rand.Intn(7) + 1),
|
||||
Date: &models.Date{
|
||||
Time: getDate(),
|
||||
},
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
func makeMarkers(n int) {
|
||||
logger.Infof("creating %d markers...", n)
|
||||
logf("creating %d markers...", n)
|
||||
for i := 0; i < n; {
|
||||
// do in batches of 1000
|
||||
batch := i + batchSize
|
||||
if err := withTxn(func(r models.Repository) error {
|
||||
if err := withTxn(func(ctx context.Context) error {
|
||||
for ; i < batch && i < n; i++ {
|
||||
marker := generateMarker(i)
|
||||
marker.SceneID = models.NullInt64(int64(getRandomScene()))
|
||||
marker.PrimaryTagID = getRandomTags(r, 1, 1)[0]
|
||||
marker.PrimaryTagID = getRandomTags(ctx, 1, 1)[0]
|
||||
|
||||
created, err := r.SceneMarker().Create(marker)
|
||||
created, err := repo.SceneMarker.Create(ctx, marker)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tags := getRandomTags(r, 0, 5)
|
||||
tags := getRandomTags(ctx, 0, 5)
|
||||
// remove primary tag
|
||||
tags = intslice.IntExclude(tags, []int{marker.PrimaryTagID})
|
||||
if err := r.SceneMarker().UpdateTags(created.ID, tags); err != nil {
|
||||
if err := repo.SceneMarker.UpdateTags(ctx, created.ID, tags); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
logger.Infof("... created %d markers", i)
|
||||
logf("... created %d markers", i)
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
|
|
@ -410,9 +511,9 @@ func getRandomFilter(n int) *models.FindFilterType {
|
|||
}
|
||||
}
|
||||
|
||||
func getRandomStudioID(r models.Repository) sql.NullInt64 {
|
||||
func getRandomStudioID(ctx context.Context) *int {
|
||||
if rand.Intn(10) == 0 {
|
||||
return sql.NullInt64{}
|
||||
return nil
|
||||
}
|
||||
|
||||
// s, _, err := r.Studio().Query(nil, getRandomFilter(1))
|
||||
|
|
@ -420,82 +521,44 @@ func getRandomStudioID(r models.Repository) sql.NullInt64 {
|
|||
// panic(err)
|
||||
// }
|
||||
|
||||
return sql.NullInt64{
|
||||
Int64: int64(rand.Int63n(int64(c.Studios)) + 1),
|
||||
Valid: true,
|
||||
}
|
||||
v := rand.Intn(c.Studios) + 1
|
||||
return &v
|
||||
}
|
||||
|
||||
func makeSceneRelationships(r models.Repository, id int) {
|
||||
func makeSceneRelationships(ctx context.Context, s *models.Scene) {
|
||||
// add tags
|
||||
tagIDs := getRandomTags(r, 0, 15)
|
||||
if len(tagIDs) > 0 {
|
||||
if err := r.Scene().UpdateTags(id, tagIDs); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
s.TagIDs = getRandomTags(ctx, 0, 15)
|
||||
|
||||
// add performers
|
||||
performerIDs := getRandomPerformers(r)
|
||||
if len(tagIDs) > 0 {
|
||||
if err := r.Scene().UpdatePerformers(id, performerIDs); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
s.PerformerIDs = getRandomPerformers(ctx)
|
||||
}
|
||||
|
||||
func makeImageRelationships(r models.Repository, id int) {
|
||||
func makeImageRelationships(ctx context.Context, i *models.Image) {
|
||||
// there are typically many more images. For performance reasons
|
||||
// only a small proportion should have tags/performers
|
||||
|
||||
// add tags
|
||||
if rand.Intn(100) == 0 {
|
||||
tagIDs := getRandomTags(r, 1, 15)
|
||||
if len(tagIDs) > 0 {
|
||||
if err := r.Image().UpdateTags(id, tagIDs); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
i.TagIDs = getRandomTags(ctx, 1, 15)
|
||||
}
|
||||
|
||||
// add performers
|
||||
if rand.Intn(100) <= 1 {
|
||||
performerIDs := getRandomPerformers(r)
|
||||
if len(performerIDs) > 0 {
|
||||
if err := r.Image().UpdatePerformers(id, performerIDs); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
i.PerformerIDs = getRandomPerformers(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
func makeGalleryRelationships(r models.Repository, id int) {
|
||||
// add tags
|
||||
tagIDs := getRandomTags(r, 0, 15)
|
||||
if len(tagIDs) > 0 {
|
||||
if err := r.Gallery().UpdateTags(id, tagIDs); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// add performers
|
||||
performerIDs := getRandomPerformers(r)
|
||||
if len(tagIDs) > 0 {
|
||||
if err := r.Gallery().UpdatePerformers(id, performerIDs); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func makeGalleryRelationships(ctx context.Context, g *models.Gallery) {
|
||||
// add images
|
||||
imageIDs := getRandomImages(r)
|
||||
if len(tagIDs) > 0 {
|
||||
if err := r.Gallery().UpdateImages(id, imageIDs); err != nil {
|
||||
imageIDs := getRandomImages(ctx)
|
||||
if len(imageIDs) > 0 {
|
||||
if err := repo.Gallery.UpdateImages(ctx, g.ID, imageIDs); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getRandomPerformers(r models.Repository) []int {
|
||||
func getRandomPerformers(ctx context.Context) []int {
|
||||
n := rand.Intn(5)
|
||||
|
||||
var ret []int
|
||||
|
|
@ -521,7 +584,7 @@ func getRandomScene() int {
|
|||
return rand.Intn(c.Scenes) + 1
|
||||
}
|
||||
|
||||
func getRandomTags(r models.Repository, min, max int) []int {
|
||||
func getRandomTags(ctx context.Context, min, max int) []int {
|
||||
var n int
|
||||
if min == max {
|
||||
n = min
|
||||
|
|
@ -548,7 +611,7 @@ func getRandomTags(r models.Repository, min, max int) []int {
|
|||
return ret
|
||||
}
|
||||
|
||||
func getRandomImages(r models.Repository) []int {
|
||||
func getRandomImages(ctx context.Context) []int {
|
||||
n := rand.Intn(500)
|
||||
|
||||
var ret []int
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
// +build ignore
|
||||
//go:build tools
|
||||
// +build tools
|
||||
|
||||
package main
|
||||
|
||||
|
|
|
|||
|
|
@ -171,6 +171,10 @@ export const App: React.FC = () => {
|
|||
}
|
||||
|
||||
function maybeRenderReleaseNotes() {
|
||||
if (setupMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
const lastNoteSeen = (config.data?.configuration.ui as IUIConfig)
|
||||
?.lastNoteSeen;
|
||||
const notes = releaseNotes.filter((n) => {
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ Please report all issues to the following Github issue: https://github.com/stash
|
|||
### 💥 Known issues
|
||||
* Import/export functionality is currently disabled. Needs further design.
|
||||
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
|
||||
* Deleting galleries is currently slow.
|
||||
|
||||
### ✨ New Features
|
||||
* Added support for identical files. Identical files are assigned to the same scene/gallery/image and can be viewed in File Info. ([#2676](https://github.com/stashapp/stash/pull/2676))
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ interface IReleaseNotes {
|
|||
|
||||
export const releaseNotes: IReleaseNotes[] = [
|
||||
{
|
||||
date: 20220715,
|
||||
date: 20220801,
|
||||
content: v0170,
|
||||
},
|
||||
];
|
||||
|
|
|
|||
|
|
@ -6,14 +6,13 @@ After migrating, please run a scan on your entire library to populate missing da
|
|||
|
||||
Please report all issues to the following Github issue: https://github.com/stashapp/stash/issues/2737
|
||||
|
||||
### **Warning:** if you are upgrading from an older `files-refactor` build, you will need to re-migrate your system from a schema version 31 database.
|
||||
|
||||
### 💥 Known issues
|
||||
* Import/export functionality is currently disabled. Needs further design.
|
||||
* Missing covers are not currently regenerated. Need to consider further, especially around scene cover redesign.
|
||||
* Deleting galleries is currently slow.
|
||||
|
||||
|
||||
### Other changes:
|
||||
|
||||
* Added support for filtering and sorting by file count. ([#2744](https://github.com/stashapp/stash/pull/2744))
|
||||
* Changelog has been moved from the stats page to a section in the Settings page.
|
||||
* Object titles are now displayed as the file basename if the title is not explicitly set. The `Don't include file extension as part of the title` scan flag is no longer supported.
|
||||
|
|
|
|||
Loading…
Reference in a new issue