mirror of
https://github.com/stashapp/stash.git
synced 2025-12-15 21:03:22 +01:00
Handle zip file modification (#877)
* Rescan zip if updating mod time * Use inequality for mod time comparison * Add sort by file_mod_time (fixes #469)
This commit is contained in:
parent
9ec762ae9a
commit
5f482b7b8a
20 changed files with 612 additions and 163 deletions
|
|
@ -19,7 +19,7 @@ import (
|
|||
|
||||
var DB *sqlx.DB
|
||||
var dbPath string
|
||||
var appSchemaVersion uint = 14
|
||||
var appSchemaVersion uint = 15
|
||||
var databaseSchemaVersion uint
|
||||
|
||||
const sqlite3Driver = "sqlite3ex"
|
||||
|
|
|
|||
3
pkg/database/migrations/15_file_mod_time.up.sql
Normal file
3
pkg/database/migrations/15_file_mod_time.up.sql
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE `scenes` ADD COLUMN `file_mod_time` datetime;
|
||||
ALTER TABLE `images` ADD COLUMN `file_mod_time` datetime;
|
||||
ALTER TABLE `galleries` ADD COLUMN `file_mod_time` datetime;
|
||||
33
pkg/database/transaction.go
Normal file
33
pkg/database/transaction.go
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
// WithTxn executes the provided function within a transaction. It rolls back
|
||||
// the transaction if the function returns an error, otherwise the transaction
|
||||
// is committed.
|
||||
func WithTxn(fn func(tx *sqlx.Tx) error) error {
|
||||
ctx := context.TODO()
|
||||
tx := DB.MustBeginTx(ctx, nil)
|
||||
|
||||
var err error
|
||||
defer func() {
|
||||
if p := recover(); p != nil {
|
||||
// a panic occurred, rollback and repanic
|
||||
tx.Rollback()
|
||||
panic(p)
|
||||
} else if err != nil {
|
||||
// something went wrong, rollback
|
||||
tx.Rollback()
|
||||
} else {
|
||||
// all good, commit
|
||||
err = tx.Commit()
|
||||
}
|
||||
}()
|
||||
|
||||
err = fn(tx)
|
||||
return err
|
||||
}
|
||||
|
|
@ -20,6 +20,10 @@ func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) {
|
|||
newGalleryJSON.Path = gallery.Path.String
|
||||
}
|
||||
|
||||
if gallery.FileModTime.Valid {
|
||||
newGalleryJSON.FileModTime = models.JSONTime{Time: gallery.FileModTime.Timestamp}
|
||||
}
|
||||
|
||||
if gallery.Title.Valid {
|
||||
newGalleryJSON.Title = gallery.Title.String
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,6 +33,10 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image {
|
|||
func getImageFileJSON(image *models.Image) *jsonschema.ImageFile {
|
||||
ret := &jsonschema.ImageFile{}
|
||||
|
||||
if image.FileModTime.Valid {
|
||||
ret.ModTime = models.JSONTime{Time: image.FileModTime.Timestamp}
|
||||
}
|
||||
|
||||
if image.Size.Valid {
|
||||
ret.Size = int(image.Size.Int64)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import (
|
|||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
|
|
@ -120,6 +121,21 @@ func getFilePath(path string) (zipFilename, filename string) {
|
|||
return
|
||||
}
|
||||
|
||||
// GetFileDetails returns a pointer to an Image object with the
|
||||
// width, height and size populated.
|
||||
func GetFileDetails(path string) (*models.Image, error) {
|
||||
i := &models.Image{
|
||||
Path: path,
|
||||
}
|
||||
|
||||
err := SetFileDetails(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return i, nil
|
||||
}
|
||||
|
||||
func SetFileDetails(i *models.Image) error {
|
||||
f, err := stat(i.Path)
|
||||
if err != nil {
|
||||
|
|
@ -147,6 +163,20 @@ func SetFileDetails(i *models.Image) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// GetFileModTime gets the file modification time, handling files in zip files.
|
||||
func GetFileModTime(path string) (time.Time, error) {
|
||||
fi, err := stat(path)
|
||||
if err != nil {
|
||||
return time.Time{}, fmt.Errorf("error performing stat on %s: %s", path, err.Error())
|
||||
}
|
||||
|
||||
ret := fi.ModTime()
|
||||
// truncate to seconds, since we don't store beyond that in the database
|
||||
ret = ret.Truncate(time.Second)
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func stat(path string) (os.FileInfo, error) {
|
||||
// may need to read from a zip file
|
||||
zipFilename, filename := getFilePath(path)
|
||||
|
|
|
|||
|
|
@ -9,19 +9,20 @@ import (
|
|||
)
|
||||
|
||||
type Gallery struct {
|
||||
Path string `json:"path,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
Zip bool `json:"zip,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
CreatedAt models.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt models.JSONTime `json:"updated_at,omitempty"`
|
||||
Path string `json:"path,omitempty"`
|
||||
Checksum string `json:"checksum,omitempty"`
|
||||
Zip bool `json:"zip,omitempty"`
|
||||
Title string `json:"title,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Date string `json:"date,omitempty"`
|
||||
Details string `json:"details,omitempty"`
|
||||
Rating int `json:"rating,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Performers []string `json:"performers,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
FileModTime models.JSONTime `json:"file_mod_time,omitempty"`
|
||||
CreatedAt models.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt models.JSONTime `json:"updated_at,omitempty"`
|
||||
}
|
||||
|
||||
func LoadGalleryFile(filePath string) (*Gallery, error) {
|
||||
|
|
|
|||
|
|
@ -9,9 +9,10 @@ import (
|
|||
)
|
||||
|
||||
type ImageFile struct {
|
||||
Size int `json:"size"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
ModTime models.JSONTime `json:"mod_time,omitempty"`
|
||||
Size int `json:"size"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
}
|
||||
|
||||
type Image struct {
|
||||
|
|
|
|||
|
|
@ -18,15 +18,16 @@ type SceneMarker struct {
|
|||
}
|
||||
|
||||
type SceneFile struct {
|
||||
Size string `json:"size"`
|
||||
Duration string `json:"duration"`
|
||||
VideoCodec string `json:"video_codec"`
|
||||
AudioCodec string `json:"audio_codec"`
|
||||
Format string `json:"format"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
Framerate string `json:"framerate"`
|
||||
Bitrate int `json:"bitrate"`
|
||||
ModTime models.JSONTime `json:"mod_time,omitempty"`
|
||||
Size string `json:"size"`
|
||||
Duration string `json:"duration"`
|
||||
VideoCodec string `json:"video_codec"`
|
||||
AudioCodec string `json:"audio_codec"`
|
||||
Format string `json:"format"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
Framerate string `json:"framerate"`
|
||||
Bitrate int `json:"bitrate"`
|
||||
}
|
||||
|
||||
type SceneMovie struct {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import (
|
|||
"archive/zip"
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
|
@ -47,9 +48,65 @@ func (t *ScanTask) scanGallery() {
|
|||
qb := models.NewGalleryQueryBuilder()
|
||||
gallery, _ := qb.FindByPath(t.FilePath)
|
||||
|
||||
fileModTime, err := t.getFileModTime()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if gallery != nil {
|
||||
// We already have this item in the database, keep going
|
||||
|
||||
// if file mod time is not set, set it now
|
||||
// we will also need to rescan the zip contents
|
||||
updateModTime := false
|
||||
if !gallery.FileModTime.Valid {
|
||||
updateModTime = true
|
||||
t.updateFileModTime(gallery.ID, fileModTime, &qb)
|
||||
|
||||
// update our copy of the gallery
|
||||
var err error
|
||||
gallery, err = qb.Find(gallery.ID, nil)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// if the mod time of the zip file is different than that of the associated
|
||||
// gallery, then recalculate the checksum
|
||||
modified := t.isFileModified(fileModTime, gallery.FileModTime)
|
||||
if modified {
|
||||
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
||||
|
||||
// update the checksum and the modification time
|
||||
checksum, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
currentTime := time.Now()
|
||||
galleryPartial := models.GalleryPartial{
|
||||
ID: gallery.ID,
|
||||
Checksum: &checksum,
|
||||
FileModTime: &models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
},
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
err = database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
_, err := qb.UpdatePartial(galleryPartial, tx)
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// scan the zip files if the gallery has no images
|
||||
iqb := models.NewImageQueryBuilder()
|
||||
images, err := iqb.CountByGalleryID(gallery.ID)
|
||||
|
|
@ -57,7 +114,7 @@ func (t *ScanTask) scanGallery() {
|
|||
logger.Errorf("error getting images for zip gallery %s: %s", t.FilePath, err.Error())
|
||||
}
|
||||
|
||||
if images == 0 {
|
||||
if images == 0 || modified || updateModTime {
|
||||
t.scanZipImages(gallery)
|
||||
} else {
|
||||
// in case thumbnails have been deleted, regenerate them
|
||||
|
|
@ -85,7 +142,6 @@ func (t *ScanTask) scanGallery() {
|
|||
if exists {
|
||||
logger.Infof("%s already exists. Duplicate of %s ", t.FilePath, gallery.Path.String)
|
||||
} else {
|
||||
|
||||
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||
gallery.Path = sql.NullString{
|
||||
String: t.FilePath,
|
||||
|
|
@ -103,6 +159,10 @@ func (t *ScanTask) scanGallery() {
|
|||
String: t.FilePath,
|
||||
Valid: true,
|
||||
},
|
||||
FileModTime: models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
|
@ -138,6 +198,44 @@ func (t *ScanTask) scanGallery() {
|
|||
}
|
||||
}
|
||||
|
||||
type fileModTimeUpdater interface {
|
||||
UpdateFileModTime(id int, modTime models.NullSQLiteTimestamp, tx *sqlx.Tx) error
|
||||
}
|
||||
|
||||
func (t *ScanTask) updateFileModTime(id int, fileModTime time.Time, updater fileModTimeUpdater) error {
|
||||
logger.Infof("setting file modification time on %s", t.FilePath)
|
||||
|
||||
err := database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
return updater.UpdateFileModTime(id, models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
}, tx)
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *ScanTask) getFileModTime() (time.Time, error) {
|
||||
fi, err := os.Stat(t.FilePath)
|
||||
if err != nil {
|
||||
return time.Time{}, fmt.Errorf("error performing stat on %s: %s", t.FilePath, err.Error())
|
||||
}
|
||||
|
||||
ret := fi.ModTime()
|
||||
// truncate to seconds, since we don't store beyond that in the database
|
||||
ret = ret.Truncate(time.Second)
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (t *ScanTask) isFileModified(fileModTime time.Time, modTime models.NullSQLiteTimestamp) bool {
|
||||
return !modTime.Timestamp.Equal(fileModTime)
|
||||
}
|
||||
|
||||
// associates a gallery to a scene with the same basename
|
||||
func (t *ScanTask) associateGallery(wg *sync.WaitGroup) {
|
||||
qb := models.NewGalleryQueryBuilder()
|
||||
|
|
@ -196,7 +294,38 @@ func (t *ScanTask) associateGallery(wg *sync.WaitGroup) {
|
|||
func (t *ScanTask) scanScene() {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
scene, _ := qb.FindByPath(t.FilePath)
|
||||
|
||||
fileModTime, err := t.getFileModTime()
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if scene != nil {
|
||||
// if file mod time is not set, set it now
|
||||
if !scene.FileModTime.Valid {
|
||||
t.updateFileModTime(scene.ID, fileModTime, &qb)
|
||||
|
||||
// update our copy of the scene
|
||||
var err error
|
||||
scene, err = qb.Find(scene.ID)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// if the mod time of the file is different than that of the associated
|
||||
// scene, then recalculate the checksum and regenerate the thumbnail
|
||||
modified := t.isFileModified(fileModTime, scene.FileModTime)
|
||||
if modified {
|
||||
scene, err = t.rescanScene(scene, fileModTime)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// We already have this item in the database
|
||||
// check for thumbnails,screenshots
|
||||
t.makeScreenshots(nil, scene.GetHash(t.fileNamingAlgorithm))
|
||||
|
|
@ -362,8 +491,12 @@ func (t *ScanTask) scanScene() {
|
|||
Framerate: sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true},
|
||||
Bitrate: sql.NullInt64{Int64: videoFile.Bitrate, Valid: true},
|
||||
Size: sql.NullString{String: strconv.Itoa(int(videoFile.Size)), Valid: true},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
FileModTime: models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
if t.UseFileMetadata {
|
||||
|
|
@ -381,6 +514,77 @@ func (t *ScanTask) scanScene() {
|
|||
}
|
||||
}
|
||||
|
||||
func (t *ScanTask) rescanScene(scene *models.Scene, fileModTime time.Time) (*models.Scene, error) {
|
||||
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
||||
|
||||
// update the oshash/checksum and the modification time
|
||||
logger.Infof("Calculating oshash for existing file %s ...", t.FilePath)
|
||||
oshash, err := utils.OSHashFromFilePath(t.FilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var checksum *sql.NullString
|
||||
if t.calculateMD5 {
|
||||
cs, err := t.calculateChecksum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
checksum = &sql.NullString{
|
||||
String: cs,
|
||||
Valid: true,
|
||||
}
|
||||
}
|
||||
|
||||
// regenerate the file details as well
|
||||
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.FilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
container := ffmpeg.MatchContainer(videoFile.Container, t.FilePath)
|
||||
|
||||
currentTime := time.Now()
|
||||
scenePartial := models.ScenePartial{
|
||||
ID: scene.ID,
|
||||
Checksum: checksum,
|
||||
OSHash: &sql.NullString{
|
||||
String: oshash,
|
||||
Valid: true,
|
||||
},
|
||||
Duration: &sql.NullFloat64{Float64: videoFile.Duration, Valid: true},
|
||||
VideoCodec: &sql.NullString{String: videoFile.VideoCodec, Valid: true},
|
||||
AudioCodec: &sql.NullString{String: videoFile.AudioCodec, Valid: true},
|
||||
Format: &sql.NullString{String: string(container), Valid: true},
|
||||
Width: &sql.NullInt64{Int64: int64(videoFile.Width), Valid: true},
|
||||
Height: &sql.NullInt64{Int64: int64(videoFile.Height), Valid: true},
|
||||
Framerate: &sql.NullFloat64{Float64: videoFile.FrameRate, Valid: true},
|
||||
Bitrate: &sql.NullInt64{Int64: videoFile.Bitrate, Valid: true},
|
||||
Size: &sql.NullString{String: strconv.Itoa(int(videoFile.Size)), Valid: true},
|
||||
FileModTime: &models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
},
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
var ret *models.Scene
|
||||
err = database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
qb := models.NewSceneQueryBuilder()
|
||||
var txnErr error
|
||||
ret, txnErr = qb.Update(scenePartial, tx)
|
||||
return txnErr
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// leave the generated files as is - the scene file may have been moved
|
||||
// elsewhere
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
func (t *ScanTask) makeScreenshots(probeResult *ffmpeg.VideoFile, checksum string) {
|
||||
thumbPath := instance.Paths.Scene.GetThumbnailScreenshotPath(checksum)
|
||||
normalPath := instance.Paths.Scene.GetScreenshotPath(checksum)
|
||||
|
|
@ -453,7 +657,38 @@ func (t *ScanTask) regenerateZipImages(zipGallery *models.Gallery) {
|
|||
func (t *ScanTask) scanImage() {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
i, _ := qb.FindByPath(t.FilePath)
|
||||
|
||||
fileModTime, err := image.GetFileModTime(t.FilePath)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
if i != nil {
|
||||
// if file mod time is not set, set it now
|
||||
if !i.FileModTime.Valid {
|
||||
t.updateFileModTime(i.ID, fileModTime, &qb)
|
||||
|
||||
// update our copy of the gallery
|
||||
var err error
|
||||
i, err = qb.Find(i.ID)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// if the mod time of the file is different than that of the associated
|
||||
// image, then recalculate the checksum and regenerate the thumbnail
|
||||
modified := t.isFileModified(fileModTime, i.FileModTime)
|
||||
if modified {
|
||||
i, err = t.rescanImage(i, fileModTime)
|
||||
if err != nil {
|
||||
logger.Error(err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// We already have this item in the database
|
||||
// check for thumbnails
|
||||
t.generateThumbnail(i)
|
||||
|
|
@ -469,7 +704,7 @@ func (t *ScanTask) scanImage() {
|
|||
var checksum string
|
||||
|
||||
logger.Infof("%s not found. Calculating checksum...", t.FilePath)
|
||||
checksum, err := t.calculateImageChecksum()
|
||||
checksum, err = t.calculateImageChecksum()
|
||||
if err != nil {
|
||||
logger.Errorf("error calculating checksum for %s: %s", t.FilePath, err.Error())
|
||||
return
|
||||
|
|
@ -497,8 +732,12 @@ func (t *ScanTask) scanImage() {
|
|||
logger.Infof("%s doesn't exist. Creating new item...", image.PathDisplayName(t.FilePath))
|
||||
currentTime := time.Now()
|
||||
newImage := models.Image{
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
Checksum: checksum,
|
||||
Path: t.FilePath,
|
||||
FileModTime: models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
},
|
||||
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
|
@ -532,6 +771,59 @@ func (t *ScanTask) scanImage() {
|
|||
t.generateThumbnail(i)
|
||||
}
|
||||
|
||||
func (t *ScanTask) rescanImage(i *models.Image, fileModTime time.Time) (*models.Image, error) {
|
||||
logger.Infof("%s has been updated: rescanning", t.FilePath)
|
||||
|
||||
oldChecksum := i.Checksum
|
||||
|
||||
// update the checksum and the modification time
|
||||
checksum, err := t.calculateImageChecksum()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// regenerate the file details as well
|
||||
fileDetails, err := image.GetFileDetails(t.FilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
currentTime := time.Now()
|
||||
imagePartial := models.ImagePartial{
|
||||
ID: i.ID,
|
||||
Checksum: &checksum,
|
||||
Width: &fileDetails.Width,
|
||||
Height: &fileDetails.Height,
|
||||
Size: &fileDetails.Size,
|
||||
FileModTime: &models.NullSQLiteTimestamp{
|
||||
Timestamp: fileModTime,
|
||||
Valid: true,
|
||||
},
|
||||
UpdatedAt: &models.SQLiteTimestamp{Timestamp: currentTime},
|
||||
}
|
||||
|
||||
var ret *models.Image
|
||||
err = database.WithTxn(func(tx *sqlx.Tx) error {
|
||||
qb := models.NewImageQueryBuilder()
|
||||
var txnErr error
|
||||
ret, txnErr = qb.Update(imagePartial, tx)
|
||||
return txnErr
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// remove the old thumbnail if the checksum changed - we'll regenerate it
|
||||
if oldChecksum != checksum {
|
||||
err = os.Remove(GetInstance().Paths.Generated.GetThumbnailPath(oldChecksum, models.DefaultGthumbWidth)) // remove cache dir of gallery
|
||||
if err != nil {
|
||||
logger.Errorf("Error deleting thumbnail image: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (t *ScanTask) associateImageWithFolderGallery(imageID int, tx *sqlx.Tx) error {
|
||||
// find a gallery with the path specified
|
||||
path := filepath.Dir(t.FilePath)
|
||||
|
|
@ -574,7 +866,6 @@ func (t *ScanTask) generateThumbnail(i *models.Image) {
|
|||
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
|
||||
exists, _ := utils.FileExists(thumbPath)
|
||||
if exists {
|
||||
logger.Debug("Thumbnail already exists for this path... skipping")
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,36 +5,38 @@ import (
|
|||
)
|
||||
|
||||
type Gallery struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Path sql.NullString `db:"path" json:"path"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Zip bool `db:"zip" json:"zip"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date SQLiteDate `db:"date" json:"date"`
|
||||
Details sql.NullString `db:"details" json:"details"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
SceneID sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Path sql.NullString `db:"path" json:"path"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Zip bool `db:"zip" json:"zip"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date SQLiteDate `db:"date" json:"date"`
|
||||
Details sql.NullString `db:"details" json:"details"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
SceneID sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
// GalleryPartial represents part of a Gallery object. It is used to update
|
||||
// the database entry. Only non-nil fields will be updated.
|
||||
type GalleryPartial struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Path *sql.NullString `db:"path" json:"path"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Title *sql.NullString `db:"title" json:"title"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
Date *SQLiteDate `db:"date" json:"date"`
|
||||
Details *sql.NullString `db:"details" json:"details"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
SceneID *sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Path *sql.NullString `db:"path" json:"path"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Title *sql.NullString `db:"title" json:"title"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
Date *SQLiteDate `db:"date" json:"date"`
|
||||
Details *sql.NullString `db:"details" json:"details"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
SceneID *sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
const DefaultGthumbWidth int = 640
|
||||
|
|
|
|||
|
|
@ -6,34 +6,36 @@ import (
|
|||
|
||||
// Image stores the metadata for a single image.
|
||||
type Image struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Path string `db:"path" json:"path"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
OCounter int `db:"o_counter" json:"o_counter"`
|
||||
Size sql.NullInt64 `db:"size" json:"size"`
|
||||
Width sql.NullInt64 `db:"width" json:"width"`
|
||||
Height sql.NullInt64 `db:"height" json:"height"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum string `db:"checksum" json:"checksum"`
|
||||
Path string `db:"path" json:"path"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
OCounter int `db:"o_counter" json:"o_counter"`
|
||||
Size sql.NullInt64 `db:"size" json:"size"`
|
||||
Width sql.NullInt64 `db:"width" json:"width"`
|
||||
Height sql.NullInt64 `db:"height" json:"height"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
// ImagePartial represents part of a Image object. It is used to update
|
||||
// the database entry. Only non-nil fields will be updated.
|
||||
type ImagePartial struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Path *string `db:"path" json:"path"`
|
||||
Title *sql.NullString `db:"title" json:"title"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
Size *sql.NullInt64 `db:"size" json:"size"`
|
||||
Width *sql.NullInt64 `db:"width" json:"width"`
|
||||
Height *sql.NullInt64 `db:"height" json:"height"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum *string `db:"checksum" json:"checksum"`
|
||||
Path *string `db:"path" json:"path"`
|
||||
Title *sql.NullString `db:"title" json:"title"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
Size *sql.NullInt64 `db:"size" json:"size"`
|
||||
Width *sql.NullInt64 `db:"width" json:"width"`
|
||||
Height *sql.NullInt64 `db:"height" json:"height"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
// ImageFileType represents the file metadata for an image.
|
||||
|
|
|
|||
|
|
@ -7,54 +7,57 @@ import (
|
|||
|
||||
// Scene stores the metadata for a single video scene.
|
||||
type Scene struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum sql.NullString `db:"checksum" json:"checksum"`
|
||||
OSHash sql.NullString `db:"oshash" json:"oshash"`
|
||||
Path string `db:"path" json:"path"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Details sql.NullString `db:"details" json:"details"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date SQLiteDate `db:"date" json:"date"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
OCounter int `db:"o_counter" json:"o_counter"`
|
||||
Size sql.NullString `db:"size" json:"size"`
|
||||
Duration sql.NullFloat64 `db:"duration" json:"duration"`
|
||||
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
|
||||
Format sql.NullString `db:"format" json:"format_name"`
|
||||
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||
Width sql.NullInt64 `db:"width" json:"width"`
|
||||
Height sql.NullInt64 `db:"height" json:"height"`
|
||||
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum sql.NullString `db:"checksum" json:"checksum"`
|
||||
OSHash sql.NullString `db:"oshash" json:"oshash"`
|
||||
Path string `db:"path" json:"path"`
|
||||
Title sql.NullString `db:"title" json:"title"`
|
||||
Details sql.NullString `db:"details" json:"details"`
|
||||
URL sql.NullString `db:"url" json:"url"`
|
||||
Date SQLiteDate `db:"date" json:"date"`
|
||||
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||
OCounter int `db:"o_counter" json:"o_counter"`
|
||||
Size sql.NullString `db:"size" json:"size"`
|
||||
Duration sql.NullFloat64 `db:"duration" json:"duration"`
|
||||
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
|
||||
Format sql.NullString `db:"format" json:"format_name"`
|
||||
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||
Width sql.NullInt64 `db:"width" json:"width"`
|
||||
Height sql.NullInt64 `db:"height" json:"height"`
|
||||
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
// ScenePartial represents part of a Scene object. It is used to update
|
||||
// the database entry. Only non-nil fields will be updated.
|
||||
type ScenePartial struct {
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum *sql.NullString `db:"checksum" json:"checksum"`
|
||||
OSHash *sql.NullString `db:"oshash" json:"oshash"`
|
||||
Path *string `db:"path" json:"path"`
|
||||
Title *sql.NullString `db:"title" json:"title"`
|
||||
Details *sql.NullString `db:"details" json:"details"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
Date *SQLiteDate `db:"date" json:"date"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
Size *sql.NullString `db:"size" json:"size"`
|
||||
Duration *sql.NullFloat64 `db:"duration" json:"duration"`
|
||||
VideoCodec *sql.NullString `db:"video_codec" json:"video_codec"`
|
||||
AudioCodec *sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||
Width *sql.NullInt64 `db:"width" json:"width"`
|
||||
Height *sql.NullInt64 `db:"height" json:"height"`
|
||||
Framerate *sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||
Bitrate *sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
ID int `db:"id" json:"id"`
|
||||
Checksum *sql.NullString `db:"checksum" json:"checksum"`
|
||||
OSHash *sql.NullString `db:"oshash" json:"oshash"`
|
||||
Path *string `db:"path" json:"path"`
|
||||
Title *sql.NullString `db:"title" json:"title"`
|
||||
Details *sql.NullString `db:"details" json:"details"`
|
||||
URL *sql.NullString `db:"url" json:"url"`
|
||||
Date *SQLiteDate `db:"date" json:"date"`
|
||||
Rating *sql.NullInt64 `db:"rating" json:"rating"`
|
||||
Size *sql.NullString `db:"size" json:"size"`
|
||||
Duration *sql.NullFloat64 `db:"duration" json:"duration"`
|
||||
VideoCodec *sql.NullString `db:"video_codec" json:"video_codec"`
|
||||
Format *sql.NullString `db:"format" json:"format_name"`
|
||||
AudioCodec *sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||
Width *sql.NullInt64 `db:"width" json:"width"`
|
||||
Height *sql.NullInt64 `db:"height" json:"height"`
|
||||
Framerate *sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||
Bitrate *sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||
MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
|
||||
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
|
||||
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
// GetTitle returns the title of the scene. If the Title field is empty,
|
||||
|
|
|
|||
|
|
@ -21,8 +21,8 @@ func NewGalleryQueryBuilder() GalleryQueryBuilder {
|
|||
func (qb *GalleryQueryBuilder) Create(newGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO galleries (path, checksum, zip, title, date, details, url, studio_id, rating, scene_id, created_at, updated_at)
|
||||
VALUES (:path, :checksum, :zip, :title, :date, :details, :url, :studio_id, :rating, :scene_id, :created_at, :updated_at)
|
||||
`INSERT INTO galleries (path, checksum, zip, title, date, details, url, studio_id, rating, scene_id, file_mod_time, created_at, updated_at)
|
||||
VALUES (:path, :checksum, :zip, :title, :date, :details, :url, :studio_id, :rating, :scene_id, :file_mod_time, :created_at, :updated_at)
|
||||
`,
|
||||
newGallery,
|
||||
)
|
||||
|
|
@ -68,6 +68,32 @@ func (qb *GalleryQueryBuilder) UpdatePartial(updatedGallery GalleryPartial, tx *
|
|||
return qb.Find(updatedGallery.ID, tx)
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) UpdateChecksum(id int, checksum string, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
_, err := tx.Exec(
|
||||
`UPDATE galleries SET checksum = ? WHERE galleries.id = ? `,
|
||||
checksum, id,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) UpdateFileModTime(id int, modTime NullSQLiteTimestamp, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
_, err := tx.Exec(
|
||||
`UPDATE galleries SET file_mod_time = ? WHERE galleries.id = ? `,
|
||||
modTime, id,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *GalleryQueryBuilder) Destroy(id int, tx *sqlx.Tx) error {
|
||||
return executeDeleteQuery("galleries", strconv.Itoa(id), tx)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -62,9 +62,9 @@ func (qb *ImageQueryBuilder) Create(newImage Image, tx *sqlx.Tx) (*Image, error)
|
|||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO images (checksum, path, title, rating, o_counter, size,
|
||||
width, height, studio_id, created_at, updated_at)
|
||||
width, height, studio_id, file_mod_time, created_at, updated_at)
|
||||
VALUES (:checksum, :path, :title, :rating, :o_counter, :size,
|
||||
:width, :height, :studio_id, :created_at, :updated_at)
|
||||
:width, :height, :studio_id, :file_mod_time, :created_at, :updated_at)
|
||||
`,
|
||||
newImage,
|
||||
)
|
||||
|
|
@ -107,6 +107,19 @@ func (qb *ImageQueryBuilder) UpdateFull(updatedImage Image, tx *sqlx.Tx) (*Image
|
|||
return qb.find(updatedImage.ID, tx)
|
||||
}
|
||||
|
||||
func (qb *ImageQueryBuilder) UpdateFileModTime(id int, modTime NullSQLiteTimestamp, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
_, err := tx.Exec(
|
||||
`UPDATE images SET file_mod_time = ? WHERE images.id = ? `,
|
||||
modTime, id,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *ImageQueryBuilder) IncrementOCounter(id int, tx *sqlx.Tx) (int, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.Exec(
|
||||
|
|
|
|||
|
|
@ -60,9 +60,9 @@ func (qb *SceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error)
|
|||
ensureTx(tx)
|
||||
result, err := tx.NamedExec(
|
||||
`INSERT INTO scenes (oshash, checksum, path, title, details, url, date, rating, o_counter, size, duration, video_codec,
|
||||
audio_codec, format, width, height, framerate, bitrate, studio_id, created_at, updated_at)
|
||||
audio_codec, format, width, height, framerate, bitrate, studio_id, file_mod_time, created_at, updated_at)
|
||||
VALUES (:oshash, :checksum, :path, :title, :details, :url, :date, :rating, :o_counter, :size, :duration, :video_codec,
|
||||
:audio_codec, :format, :width, :height, :framerate, :bitrate, :studio_id, :created_at, :updated_at)
|
||||
:audio_codec, :format, :width, :height, :framerate, :bitrate, :studio_id, :file_mod_time, :created_at, :updated_at)
|
||||
`,
|
||||
newScene,
|
||||
)
|
||||
|
|
@ -105,6 +105,19 @@ func (qb *SceneQueryBuilder) UpdateFull(updatedScene Scene, tx *sqlx.Tx) (*Scene
|
|||
return qb.find(updatedScene.ID, tx)
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) UpdateFileModTime(id int, modTime NullSQLiteTimestamp, tx *sqlx.Tx) error {
|
||||
ensureTx(tx)
|
||||
_, err := tx.Exec(
|
||||
`UPDATE scenes SET file_mod_time = ? WHERE scenes.id = ? `,
|
||||
modTime, id,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (qb *SceneQueryBuilder) IncrementOCounter(id int, tx *sqlx.Tx) (int, error) {
|
||||
ensureTx(tx)
|
||||
_, err := tx.Exec(
|
||||
|
|
|
|||
|
|
@ -414,51 +414,39 @@ func sqlGenKeys(i interface{}, partial bool) string {
|
|||
if key == "id" {
|
||||
continue
|
||||
}
|
||||
|
||||
var add bool
|
||||
switch t := v.Field(i).Interface().(type) {
|
||||
case string:
|
||||
if partial || t != "" {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t != ""
|
||||
case int:
|
||||
if partial || t != 0 {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t != 0
|
||||
case float64:
|
||||
if partial || t != 0 {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t != 0
|
||||
case bool:
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
add = true
|
||||
case SQLiteTimestamp:
|
||||
if partial || !t.Timestamp.IsZero() {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || !t.Timestamp.IsZero()
|
||||
case NullSQLiteTimestamp:
|
||||
add = partial || t.Valid
|
||||
case SQLiteDate:
|
||||
if partial || t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t.Valid
|
||||
case sql.NullString:
|
||||
if partial || t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t.Valid
|
||||
case sql.NullBool:
|
||||
if partial || t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t.Valid
|
||||
case sql.NullInt64:
|
||||
if partial || t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t.Valid
|
||||
case sql.NullFloat64:
|
||||
if partial || t.Valid {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = partial || t.Valid
|
||||
default:
|
||||
reflectValue := reflect.ValueOf(t)
|
||||
isNil := reflectValue.IsNil()
|
||||
if !isNil {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
add = !isNil
|
||||
}
|
||||
|
||||
if add {
|
||||
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||
}
|
||||
}
|
||||
return strings.Join(query, ", ")
|
||||
|
|
|
|||
|
|
@ -19,3 +19,31 @@ func (t *SQLiteTimestamp) Scan(value interface{}) error {
|
|||
func (t SQLiteTimestamp) Value() (driver.Value, error) {
|
||||
return t.Timestamp.Format(time.RFC3339), nil
|
||||
}
|
||||
|
||||
type NullSQLiteTimestamp struct {
|
||||
Timestamp time.Time
|
||||
Valid bool
|
||||
}
|
||||
|
||||
// Scan implements the Scanner interface.
|
||||
func (t *NullSQLiteTimestamp) Scan(value interface{}) error {
|
||||
var ok bool
|
||||
t.Timestamp, ok = value.(time.Time)
|
||||
if !ok {
|
||||
t.Timestamp = time.Time{}
|
||||
t.Valid = false
|
||||
return nil
|
||||
}
|
||||
|
||||
t.Valid = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value implements the driver Valuer interface.
|
||||
func (t NullSQLiteTimestamp) Value() (driver.Value, error) {
|
||||
if t.Timestamp.IsZero() {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return t.Timestamp.Format(time.RFC3339), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -66,6 +66,10 @@ func ToBasicJSON(reader models.SceneReader, scene *models.Scene) (*jsonschema.Sc
|
|||
func getSceneFileJSON(scene *models.Scene) *jsonschema.SceneFile {
|
||||
ret := &jsonschema.SceneFile{}
|
||||
|
||||
if scene.FileModTime.Valid {
|
||||
ret.ModTime = models.JSONTime{Time: scene.FileModTime.Timestamp}
|
||||
}
|
||||
|
||||
if scene.Size.Valid {
|
||||
ret.Size = scene.Size.String
|
||||
}
|
||||
|
|
|
|||
|
|
@ -118,6 +118,7 @@ export class ListFilterModel {
|
|||
"o_counter",
|
||||
"date",
|
||||
"filesize",
|
||||
"file_mod_time",
|
||||
"duration",
|
||||
"framerate",
|
||||
"bitrate",
|
||||
|
|
@ -152,6 +153,7 @@ export class ListFilterModel {
|
|||
"rating",
|
||||
"o_counter",
|
||||
"filesize",
|
||||
"file_mod_time",
|
||||
"random",
|
||||
];
|
||||
this.displayModeOptions = [DisplayMode.Grid, DisplayMode.Wall];
|
||||
|
|
@ -226,7 +228,7 @@ export class ListFilterModel {
|
|||
break;
|
||||
case FilterMode.Galleries:
|
||||
this.sortBy = "path";
|
||||
this.sortByOptions = ["path", "images_count"];
|
||||
this.sortByOptions = ["path", "file_mod_time", "images_count"];
|
||||
this.displayModeOptions = [DisplayMode.Grid, DisplayMode.List];
|
||||
this.criterionOptions = [
|
||||
new NoneCriterionOption(),
|
||||
|
|
|
|||
Loading…
Reference in a new issue