Scene play and o-counter history view and editing (#4532)

Co-authored-by: randemgame <61895715+randemgame@users.noreply.github.com>
This commit is contained in:
WithoutPants 2024-02-22 11:28:18 +11:00 committed by GitHub
parent 0c2a2190e5
commit a303446bb7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
51 changed files with 3581 additions and 564 deletions

View file

@ -237,9 +237,15 @@ type Mutation {
scenesUpdate(input: [SceneUpdateInput!]!): [Scene] scenesUpdate(input: [SceneUpdateInput!]!): [Scene]
"Increments the o-counter for a scene. Returns the new value" "Increments the o-counter for a scene. Returns the new value"
sceneIncrementO(id: ID!): Int! sceneIncrementO(id: ID!): Int! @deprecated(reason: "Use sceneAddO instead")
"Decrements the o-counter for a scene. Returns the new value" "Decrements the o-counter for a scene. Returns the new value"
sceneDecrementO(id: ID!): Int! sceneDecrementO(id: ID!): Int! @deprecated(reason: "Use sceneRemoveO instead")
"Increments the o-counter for a scene. Uses the current time if none provided."
sceneAddO(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Decrements the o-counter for a scene, removing the last recorded time if specific time not provided. Returns the new value"
sceneDeleteO(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Resets the o-counter for a scene to 0. Returns the new value" "Resets the o-counter for a scene to 0. Returns the new value"
sceneResetO(id: ID!): Int! sceneResetO(id: ID!): Int!
@ -248,6 +254,14 @@ type Mutation {
"Increments the play count for the scene. Returns the new play count value." "Increments the play count for the scene. Returns the new play count value."
sceneIncrementPlayCount(id: ID!): Int! sceneIncrementPlayCount(id: ID!): Int!
@deprecated(reason: "Use sceneAddPlay instead")
"Increments the play count for the scene. Uses the current time if none provided."
sceneAddPlay(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Decrements the play count for the scene, removing the specific times or the last recorded time if not provided."
sceneDeletePlay(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Resets the play count for a scene to 0. Returns the new play count value."
sceneResetPlayCount(id: ID!): Int!
"Generates screenshot at specified time in seconds. Leave empty to generate default screenshot" "Generates screenshot at specified time in seconds. Leave empty to generate default screenshot"
sceneGenerateScreenshot(id: ID!, at: Float): String! sceneGenerateScreenshot(id: ID!, at: Float): String!

View file

@ -58,6 +58,11 @@ type Scene {
"The number ot times a scene has been played" "The number ot times a scene has been played"
play_count: Int play_count: Int
"Times a scene was played"
play_history: [Time!]!
"Times the o counter was incremented"
o_history: [Time!]!
files: [VideoFile!]! files: [VideoFile!]!
paths: ScenePathsType! # Resolver paths: ScenePathsType! # Resolver
scene_markers: [SceneMarker!]! scene_markers: [SceneMarker!]!
@ -118,6 +123,7 @@ input SceneUpdateInput {
# rating expressed as 1-100 # rating expressed as 1-100
rating100: Int rating100: Int
o_counter: Int o_counter: Int
@deprecated(reason: "Unsupported - Use sceneIncrementO/sceneDecrementO")
organized: Boolean organized: Boolean
studio_id: ID studio_id: ID
gallery_ids: [ID!] gallery_ids: [ID!]
@ -134,6 +140,9 @@ input SceneUpdateInput {
play_duration: Float play_duration: Float
"The number ot times a scene has been played" "The number ot times a scene has been played"
play_count: Int play_count: Int
@deprecated(
reason: "Unsupported - Use sceneIncrementPlayCount/sceneDecrementPlayCount"
)
primary_file_id: ID primary_file_id: ID
} }
@ -251,4 +260,13 @@ input SceneMergeInput {
destination: ID! destination: ID!
# values defined here will override values in the destination # values defined here will override values in the destination
values: SceneUpdateInput values: SceneUpdateInput
# if true, the source history will be combined with the destination
play_history: Boolean
o_history: Boolean
}
type HistoryMutationResult {
count: Int!
history: [Time!]!
} }

View file

@ -9,7 +9,11 @@
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden SceneOCountLoader int int
//go:generate go run github.com/vektah/dataloaden ScenePlayCountLoader int int
//go:generate go run github.com/vektah/dataloaden SceneOHistoryLoader int []time.Time
//go:generate go run github.com/vektah/dataloaden ScenePlayHistoryLoader int []time.Time
//go:generate go run github.com/vektah/dataloaden SceneLastPlayedLoader int *time.Time
package loaders package loaders
import ( import (
@ -32,8 +36,14 @@ const (
) )
type Loaders struct { type Loaders struct {
SceneByID *SceneLoader SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader SceneFiles *SceneFileIDsLoader
ScenePlayCount *ScenePlayCountLoader
SceneOCount *SceneOCountLoader
ScenePlayHistory *ScenePlayHistoryLoader
SceneOHistory *SceneOHistoryLoader
SceneLastPlayed *SceneLastPlayedLoader
ImageFiles *ImageFileIDsLoader ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader GalleryFiles *GalleryFileIDsLoader
@ -109,6 +119,31 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch, maxBatch: maxBatch,
fetch: m.fetchGalleriesFileIDs(ctx), fetch: m.fetchGalleriesFileIDs(ctx),
}, },
ScenePlayCount: &ScenePlayCountLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesPlayCount(ctx),
},
SceneOCount: &SceneOCountLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesOCount(ctx),
},
ScenePlayHistory: &ScenePlayHistoryLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesPlayHistory(ctx),
},
SceneLastPlayed: &SceneLastPlayedLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesLastPlayed(ctx),
},
SceneOHistory: &SceneOHistoryLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesOHistory(ctx),
},
} }
newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs) newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs)
@ -251,3 +286,58 @@ func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int)
return ret, toErrorSlice(err) return ret, toErrorSlice(err)
} }
} }
func (m Middleware) fetchScenesOCount(ctx context.Context) func(keys []int) ([]int, []error) {
return func(keys []int) (ret []int, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyOCount(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesPlayCount(ctx context.Context) func(keys []int) ([]int, []error) {
return func(keys []int) (ret []int, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyViewCount(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesOHistory(ctx context.Context) func(keys []int) ([][]time.Time, []error) {
return func(keys []int) (ret [][]time.Time, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyODates(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesPlayHistory(ctx context.Context) func(keys []int) ([][]time.Time, []error) {
return func(keys []int) (ret [][]time.Time, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyViewDates(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesLastPlayed(ctx context.Context) func(keys []int) ([]*time.Time, []error) {
return func(keys []int) (ret []*time.Time, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyLastViewed(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}

View file

@ -0,0 +1,222 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// SceneLastPlayedLoaderConfig captures the config to create a new SceneLastPlayedLoader
type SceneLastPlayedLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*time.Time, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneLastPlayedLoader creates a new SceneLastPlayedLoader given a fetch, wait, and maxBatch
func NewSceneLastPlayedLoader(config SceneLastPlayedLoaderConfig) *SceneLastPlayedLoader {
return &SceneLastPlayedLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneLastPlayedLoader batches and caches requests
type SceneLastPlayedLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*time.Time, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*time.Time
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneLastPlayedLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneLastPlayedLoaderBatch struct {
keys []int
data []*time.Time
error []error
closing bool
done chan struct{}
}
// Load a Time by key, batching and caching will be applied automatically
func (l *SceneLastPlayedLoader) Load(key int) (*time.Time, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Time.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneLastPlayedLoader) LoadThunk(key int) func() (*time.Time, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*time.Time, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneLastPlayedLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*time.Time, error) {
<-batch.done
var data *time.Time
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneLastPlayedLoader) LoadAll(keys []int) ([]*time.Time, []error) {
results := make([]func() (*time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
times := make([]*time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Times.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneLastPlayedLoader) LoadAllThunk(keys []int) func() ([]*time.Time, []error) {
results := make([]func() (*time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*time.Time, []error) {
times := make([]*time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneLastPlayedLoader) Prime(key int, value *time.Time) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneLastPlayedLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneLastPlayedLoader) unsafeSet(key int, value *time.Time) {
if l.cache == nil {
l.cache = map[int]*time.Time{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneLastPlayedLoaderBatch) keyIndex(l *SceneLastPlayedLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneLastPlayedLoaderBatch) startTimer(l *SceneLastPlayedLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneLastPlayedLoaderBatch) end(l *SceneLastPlayedLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,219 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// SceneOCountLoaderConfig captures the config to create a new SceneOCountLoader
type SceneOCountLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]int, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneOCountLoader creates a new SceneOCountLoader given a fetch, wait, and maxBatch
func NewSceneOCountLoader(config SceneOCountLoaderConfig) *SceneOCountLoader {
return &SceneOCountLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneOCountLoader batches and caches requests
type SceneOCountLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]int, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]int
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneOCountLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneOCountLoaderBatch struct {
keys []int
data []int
error []error
closing bool
done chan struct{}
}
// Load a int by key, batching and caching will be applied automatically
func (l *SceneOCountLoader) Load(key int) (int, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a int.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOCountLoader) LoadThunk(key int) func() (int, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (int, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneOCountLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (int, error) {
<-batch.done
var data int
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneOCountLoader) LoadAll(keys []int) ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
// LoadAllThunk returns a function that when called will block waiting for a ints.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOCountLoader) LoadAllThunk(keys []int) func() ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]int, []error) {
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneOCountLoader) Prime(key int, value int) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneOCountLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneOCountLoader) unsafeSet(key int, value int) {
if l.cache == nil {
l.cache = map[int]int{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneOCountLoaderBatch) keyIndex(l *SceneOCountLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneOCountLoaderBatch) startTimer(l *SceneOCountLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneOCountLoaderBatch) end(l *SceneOCountLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,223 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// SceneOHistoryLoaderConfig captures the config to create a new SceneOHistoryLoader
type SceneOHistoryLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]time.Time, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneOHistoryLoader creates a new SceneOHistoryLoader given a fetch, wait, and maxBatch
func NewSceneOHistoryLoader(config SceneOHistoryLoaderConfig) *SceneOHistoryLoader {
return &SceneOHistoryLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneOHistoryLoader batches and caches requests
type SceneOHistoryLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]time.Time, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]time.Time
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneOHistoryLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneOHistoryLoaderBatch struct {
keys []int
data [][]time.Time
error []error
closing bool
done chan struct{}
}
// Load a Time by key, batching and caching will be applied automatically
func (l *SceneOHistoryLoader) Load(key int) ([]time.Time, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Time.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOHistoryLoader) LoadThunk(key int) func() ([]time.Time, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]time.Time, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneOHistoryLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]time.Time, error) {
<-batch.done
var data []time.Time
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneOHistoryLoader) LoadAll(keys []int) ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Times.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOHistoryLoader) LoadAllThunk(keys []int) func() ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]time.Time, []error) {
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneOHistoryLoader) Prime(key int, value []time.Time) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]time.Time, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneOHistoryLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneOHistoryLoader) unsafeSet(key int, value []time.Time) {
if l.cache == nil {
l.cache = map[int][]time.Time{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneOHistoryLoaderBatch) keyIndex(l *SceneOHistoryLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneOHistoryLoaderBatch) startTimer(l *SceneOHistoryLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneOHistoryLoaderBatch) end(l *SceneOHistoryLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,219 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// ScenePlayCountLoaderConfig captures the config to create a new ScenePlayCountLoader
type ScenePlayCountLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]int, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewScenePlayCountLoader creates a new ScenePlayCountLoader given a fetch, wait, and maxBatch
func NewScenePlayCountLoader(config ScenePlayCountLoaderConfig) *ScenePlayCountLoader {
return &ScenePlayCountLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// ScenePlayCountLoader batches and caches requests
type ScenePlayCountLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]int, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]int
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *scenePlayCountLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type scenePlayCountLoaderBatch struct {
keys []int
data []int
error []error
closing bool
done chan struct{}
}
// Load a int by key, batching and caching will be applied automatically
func (l *ScenePlayCountLoader) Load(key int) (int, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a int.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayCountLoader) LoadThunk(key int) func() (int, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (int, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &scenePlayCountLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (int, error) {
<-batch.done
var data int
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ScenePlayCountLoader) LoadAll(keys []int) ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
// LoadAllThunk returns a function that when called will block waiting for a ints.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayCountLoader) LoadAllThunk(keys []int) func() ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]int, []error) {
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ScenePlayCountLoader) Prime(key int, value int) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ScenePlayCountLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ScenePlayCountLoader) unsafeSet(key int, value int) {
if l.cache == nil {
l.cache = map[int]int{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *scenePlayCountLoaderBatch) keyIndex(l *ScenePlayCountLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *scenePlayCountLoaderBatch) startTimer(l *ScenePlayCountLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *scenePlayCountLoaderBatch) end(l *ScenePlayCountLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,223 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// ScenePlayHistoryLoaderConfig captures the config to create a new ScenePlayHistoryLoader
type ScenePlayHistoryLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]time.Time, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewScenePlayHistoryLoader creates a new ScenePlayHistoryLoader given a fetch, wait, and maxBatch
func NewScenePlayHistoryLoader(config ScenePlayHistoryLoaderConfig) *ScenePlayHistoryLoader {
return &ScenePlayHistoryLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// ScenePlayHistoryLoader batches and caches requests
type ScenePlayHistoryLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]time.Time, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]time.Time
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *scenePlayHistoryLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type scenePlayHistoryLoaderBatch struct {
keys []int
data [][]time.Time
error []error
closing bool
done chan struct{}
}
// Load a Time by key, batching and caching will be applied automatically
func (l *ScenePlayHistoryLoader) Load(key int) ([]time.Time, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Time.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayHistoryLoader) LoadThunk(key int) func() ([]time.Time, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]time.Time, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &scenePlayHistoryLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]time.Time, error) {
<-batch.done
var data []time.Time
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ScenePlayHistoryLoader) LoadAll(keys []int) ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Times.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayHistoryLoader) LoadAllThunk(keys []int) func() ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]time.Time, []error) {
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ScenePlayHistoryLoader) Prime(key int, value []time.Time) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]time.Time, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ScenePlayHistoryLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ScenePlayHistoryLoader) unsafeSet(key int, value []time.Time) {
if l.cache == nil {
l.cache = map[int][]time.Time{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *scenePlayHistoryLoaderBatch) keyIndex(l *ScenePlayHistoryLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *scenePlayHistoryLoaderBatch) startTimer(l *ScenePlayHistoryLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *scenePlayHistoryLoaderBatch) end(l *ScenePlayHistoryLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -228,7 +228,7 @@ func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) {
return err return err
} }
scenesTotalOCount, err := sceneQB.OCount(ctx) scenesTotalOCount, err := sceneQB.GetAllOCount(ctx)
if err != nil { if err != nil {
return err return err
} }
@ -243,12 +243,12 @@ func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) {
return err return err
} }
totalPlayCount, err := sceneQB.PlayCount(ctx) totalPlayCount, err := sceneQB.CountAllViews(ctx)
if err != nil { if err != nil {
return err return err
} }
uniqueScenePlayCount, err := sceneQB.UniqueScenePlayCount(ctx) uniqueScenePlayCount, err := sceneQB.CountUniqueViews(ctx)
if err != nil { if err != nil {
return err return err
} }

View file

@ -3,6 +3,7 @@ package api
import ( import (
"context" "context"
"fmt" "fmt"
"time"
"github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/api/urlbuilders"
@ -319,3 +320,62 @@ func (r *sceneResolver) Urls(ctx context.Context, obj *models.Scene) ([]string,
return obj.URLs.List(), nil return obj.URLs.List(), nil
} }
func (r *sceneResolver) OCounter(ctx context.Context, obj *models.Scene) (*int, error) {
ret, err := loaders.From(ctx).SceneOCount.Load(obj.ID)
if err != nil {
return nil, err
}
return &ret, nil
}
func (r *sceneResolver) LastPlayedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
ret, err := loaders.From(ctx).SceneLastPlayed.Load(obj.ID)
if err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) PlayCount(ctx context.Context, obj *models.Scene) (*int, error) {
ret, err := loaders.From(ctx).ScenePlayCount.Load(obj.ID)
if err != nil {
return nil, err
}
return &ret, nil
}
func (r *sceneResolver) PlayHistory(ctx context.Context, obj *models.Scene) ([]*time.Time, error) {
ret, err := loaders.From(ctx).ScenePlayHistory.Load(obj.ID)
if err != nil {
return nil, err
}
// convert to pointer slice
ptrRet := make([]*time.Time, len(ret))
for i, t := range ret {
tt := t
ptrRet[i] = &tt
}
return ptrRet, nil
}
func (r *sceneResolver) OHistory(ctx context.Context, obj *models.Scene) ([]*time.Time, error) {
ret, err := loaders.From(ctx).SceneOHistory.Load(obj.ID)
if err != nil {
return nil, err
}
// convert to pointer slice
ptrRet := make([]*time.Time, len(ret))
for i, t := range ret {
tt := t
ptrRet[i] = &tt
}
return ptrRet, nil
}

View file

@ -5,9 +5,11 @@ import (
"errors" "errors"
"fmt" "fmt"
"strconv" "strconv"
"time"
"github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene"
@ -169,8 +171,15 @@ func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTr
updatedScene.Details = translator.optionalString(input.Details, "details") updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.Director = translator.optionalString(input.Director, "director") updatedScene.Director = translator.optionalString(input.Director, "director")
updatedScene.Rating = translator.optionalInt(input.Rating100, "rating100") updatedScene.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedScene.OCounter = translator.optionalInt(input.OCounter, "o_counter")
updatedScene.PlayCount = translator.optionalInt(input.PlayCount, "play_count") if input.OCounter != nil {
logger.Warnf("o_counter is deprecated and no longer supported, use sceneIncrementO/sceneDecrementO instead")
}
if input.PlayCount != nil {
logger.Warnf("play_count is deprecated and no longer supported, use sceneIncrementPlayCount/sceneDecrementPlayCount instead")
}
updatedScene.PlayDuration = translator.optionalFloat64(input.PlayDuration, "play_duration") updatedScene.PlayDuration = translator.optionalFloat64(input.PlayDuration, "play_duration")
updatedScene.Organized = translator.optionalBool(input.Organized, "organized") updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
updatedScene.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") updatedScene.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids")
@ -569,7 +578,11 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
var ret *models.Scene var ret *models.Scene
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
if err := r.Resolver.sceneService.Merge(ctx, srcIDs, destID, *values, fileDeleter); err != nil { if err := r.Resolver.sceneService.Merge(ctx, srcIDs, destID, fileDeleter, scene.MergeOptions{
ScenePartial: *values,
IncludePlayHistory: utils.IsTrue(input.PlayHistory),
IncludeOHistory: utils.IsTrue(input.OHistory),
}); err != nil {
return err return err
} }
@ -811,16 +824,96 @@ func (r *mutationResolver) SceneSaveActivity(ctx context.Context, id string, res
return ret, nil return ret, nil
} }
// deprecated
func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id string) (ret int, err error) { func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id) sceneID, err := strconv.Atoi(id)
if err != nil { if err != nil {
return 0, fmt.Errorf("converting id: %w", err) return 0, fmt.Errorf("converting id: %w", err)
} }
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene qb := r.repository.Scene
ret, err = qb.IncrementWatchCount(ctx, sceneID) updatedTimes, err = qb.AddViews(ctx, sceneID, nil)
return err
}); err != nil {
return 0, err
}
return len(updatedTimes), nil
}
func (r *mutationResolver) SceneAddPlay(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
var times []time.Time
// convert time to local time, so that sorting is consistent
for _, tt := range t {
times = append(times, tt.Local())
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.AddViews(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneDeletePlay(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, err
}
var times []time.Time
for _, tt := range t {
times = append(times, *tt)
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.DeleteViews(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneResetPlayCount(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return 0, err
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
ret, err = qb.DeleteAllViews(ctx, sceneID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
@ -829,40 +922,46 @@ func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id strin
return ret, nil return ret, nil
} }
// deprecated
func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret int, err error) { func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id) sceneID, err := strconv.Atoi(id)
if err != nil { if err != nil {
return 0, fmt.Errorf("converting id: %w", err) return 0, fmt.Errorf("converting id: %w", err)
} }
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene qb := r.repository.Scene
ret, err = qb.IncrementOCounter(ctx, sceneID) updatedTimes, err = qb.AddO(ctx, sceneID, nil)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
} }
return ret, nil return len(updatedTimes), nil
} }
// deprecated
func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret int, err error) { func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id) sceneID, err := strconv.Atoi(id)
if err != nil { if err != nil {
return 0, fmt.Errorf("converting id: %w", err) return 0, fmt.Errorf("converting id: %w", err)
} }
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene qb := r.repository.Scene
ret, err = qb.DecrementOCounter(ctx, sceneID) updatedTimes, err = qb.DeleteO(ctx, sceneID, nil)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
} }
return ret, nil return len(updatedTimes), nil
} }
func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int, err error) { func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int, err error) {
@ -874,7 +973,7 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int,
if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene qb := r.repository.Scene
ret, err = qb.ResetOCounter(ctx, sceneID) ret, err = qb.ResetO(ctx, sceneID)
return err return err
}); err != nil { }); err != nil {
return 0, err return 0, err
@ -883,6 +982,65 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int,
return ret, nil return ret, nil
} }
func (r *mutationResolver) SceneAddO(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
var times []time.Time
// convert time to local time, so that sorting is consistent
for _, tt := range t {
times = append(times, tt.Local())
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.AddO(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneDeleteO(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
var times []time.Time
for _, tt := range t {
times = append(times, *tt)
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.DeleteO(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) { func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) {
if at != nil { if at != nil {
manager.GetInstance().GenerateScreenshot(ctx, id, *at) manager.GetInstance().GenerateScreenshot(ctx, id, *at)

View file

@ -2,14 +2,9 @@ package api
import ( import (
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil"
) )
func stashIDsSliceToPtrSlice(v []models.StashID) []*models.StashID { func stashIDsSliceToPtrSlice(v []models.StashID) []*models.StashID {
ret := make([]*models.StashID, len(v)) return sliceutil.ValuesToPtrs(v)
for i, vv := range v {
c := vv
ret[i] = &c
}
return ret
} }

View file

@ -11,7 +11,7 @@ import (
type SceneService interface { type SceneService interface {
Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error)
AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error
Merge(ctx context.Context, sourceIDs []int, destinationID int, values models.ScenePartial, fileDeleter *scene.FileDeleter) error Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
} }

View file

@ -527,7 +527,6 @@ func (t *ExportTask) exportScene(ctx context.Context, wg *sync.WaitGroup, jobCha
newSceneJSON.Galleries = gallery.GetRefs(galleries) newSceneJSON.Galleries = gallery.GetRefs(galleries)
newSceneJSON.ResumeTime = s.ResumeTime newSceneJSON.ResumeTime = s.ResumeTime
newSceneJSON.PlayCount = s.PlayCount
newSceneJSON.PlayDuration = s.PlayDuration newSceneJSON.PlayDuration = s.PlayDuration
performers, err := performerReader.FindBySceneID(ctx, s.ID) performers, err := performerReader.FindBySceneID(ctx, s.ID)

View file

@ -46,25 +46,37 @@ type Scene struct {
// deprecated - for import only // deprecated - for import only
URL string `json:"url,omitempty"` URL string `json:"url,omitempty"`
URLs []string `json:"urls,omitempty"` URLs []string `json:"urls,omitempty"`
Date string `json:"date,omitempty"` Date string `json:"date,omitempty"`
Rating int `json:"rating,omitempty"` Rating int `json:"rating,omitempty"`
Organized bool `json:"organized,omitempty"` Organized bool `json:"organized,omitempty"`
OCounter int `json:"o_counter,omitempty"`
Details string `json:"details,omitempty"` // deprecated - for import only
Director string `json:"director,omitempty"` OCounter int `json:"o_counter,omitempty"`
Galleries []GalleryRef `json:"galleries,omitempty"`
Performers []string `json:"performers,omitempty"` Details string `json:"details,omitempty"`
Movies []SceneMovie `json:"movies,omitempty"` Director string `json:"director,omitempty"`
Tags []string `json:"tags,omitempty"` Galleries []GalleryRef `json:"galleries,omitempty"`
Markers []SceneMarker `json:"markers,omitempty"` Performers []string `json:"performers,omitempty"`
Files []string `json:"files,omitempty"` Movies []SceneMovie `json:"movies,omitempty"`
Cover string `json:"cover,omitempty"` Tags []string `json:"tags,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"` Markers []SceneMarker `json:"markers,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"` Files []string `json:"files,omitempty"`
LastPlayedAt json.JSONTime `json:"last_played_at,omitempty"` Cover string `json:"cover,omitempty"`
ResumeTime float64 `json:"resume_time,omitempty"` CreatedAt json.JSONTime `json:"created_at,omitempty"`
PlayCount int `json:"play_count,omitempty"` UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
// deprecated - for import only
LastPlayedAt json.JSONTime `json:"last_played_at,omitempty"`
ResumeTime float64 `json:"resume_time,omitempty"`
// deprecated - for import only
PlayCount int `json:"play_count,omitempty"`
PlayHistory []json.JSONTime `json:"play_history,omitempty"`
OHistory []json.JSONTime `json:"o_history,omitempty"`
PlayDuration float64 `json:"play_duration,omitempty"` PlayDuration float64 `json:"play_duration,omitempty"`
StashIDs []models.StashID `json:"stash_ids,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"`
} }

View file

@ -7,6 +7,8 @@ import (
models "github.com/stashapp/stash/pkg/models" models "github.com/stashapp/stash/pkg/models"
mock "github.com/stretchr/testify/mock" mock "github.com/stretchr/testify/mock"
time "time"
) )
// SceneReaderWriter is an autogenerated mock type for the SceneReaderWriter type // SceneReaderWriter is an autogenerated mock type for the SceneReaderWriter type
@ -42,6 +44,52 @@ func (_m *SceneReaderWriter) AddGalleryIDs(ctx context.Context, sceneID int, gal
return r0 return r0
} }
// AddO provides a mock function with given fields: ctx, id, dates
func (_m *SceneReaderWriter) AddO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, id, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, id, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, id, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// AddViews provides a mock function with given fields: ctx, sceneID, dates
func (_m *SceneReaderWriter) AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, sceneID, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, sceneID, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, sceneID, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// All provides a mock function with given fields: ctx // All provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) { func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) {
ret := _m.Called(ctx) ret := _m.Called(ctx)
@ -100,6 +148,27 @@ func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) {
return r0, r1 return r0, r1
} }
// CountAllViews provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) CountAllViews(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// CountByFileID provides a mock function with given fields: ctx, fileID // CountByFileID provides a mock function with given fields: ctx, fileID
func (_m *SceneReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { func (_m *SceneReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) {
ret := _m.Called(ctx, fileID) ret := _m.Called(ctx, fileID)
@ -247,6 +316,48 @@ func (_m *SceneReaderWriter) CountMissingOSHash(ctx context.Context) (int, error
return r0, r1 return r0, r1
} }
// CountUniqueViews provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) CountUniqueViews(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// CountViews provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) CountViews(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, id)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Create provides a mock function with given fields: ctx, newScene, fileIDs // Create provides a mock function with given fields: ctx, newScene, fileIDs
func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error { func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error {
ret := _m.Called(ctx, newScene, fileIDs) ret := _m.Called(ctx, newScene, fileIDs)
@ -261,8 +372,8 @@ func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene,
return r0 return r0
} }
// DecrementOCounter provides a mock function with given fields: ctx, id // DeleteAllViews provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) DecrementOCounter(ctx context.Context, id int) (int, error) { func (_m *SceneReaderWriter) DeleteAllViews(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id) ret := _m.Called(ctx, id)
var r0 int var r0 int
@ -282,6 +393,52 @@ func (_m *SceneReaderWriter) DecrementOCounter(ctx context.Context, id int) (int
return r0, r1 return r0, r1
} }
// DeleteO provides a mock function with given fields: ctx, id, dates
func (_m *SceneReaderWriter) DeleteO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, id, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, id, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, id, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// DeleteViews provides a mock function with given fields: ctx, id, dates
func (_m *SceneReaderWriter) DeleteViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, id, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, id, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, id, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Destroy provides a mock function with given fields: ctx, id // Destroy provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) Destroy(ctx context.Context, id int) error { func (_m *SceneReaderWriter) Destroy(ctx context.Context, id int) error {
ret := _m.Called(ctx, id) ret := _m.Called(ctx, id)
@ -593,6 +750,27 @@ func (_m *SceneReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models
return r0, r1 return r0, r1
} }
// GetAllOCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) GetAllOCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetCover provides a mock function with given fields: ctx, sceneID // GetCover provides a mock function with given fields: ctx, sceneID
func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, error) { func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, error) {
ret := _m.Called(ctx, sceneID) ret := _m.Called(ctx, sceneID)
@ -685,6 +863,121 @@ func (_m *SceneReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][
return r0, r1 return r0, r1
} }
// GetManyLastViewed provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyLastViewed(ctx context.Context, ids []int) ([]*time.Time, error) {
ret := _m.Called(ctx, ids)
var r0 []*time.Time
if rf, ok := ret.Get(0).(func(context.Context, []int) []*time.Time); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyOCount provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyOCount(ctx context.Context, ids []int) ([]int, error) {
ret := _m.Called(ctx, ids)
var r0 []int
if rf, ok := ret.Get(0).(func(context.Context, []int) []int); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]int)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyODates provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyODates(ctx context.Context, ids []int) ([][]time.Time, error) {
ret := _m.Called(ctx, ids)
var r0 [][]time.Time
if rf, ok := ret.Get(0).(func(context.Context, []int) [][]time.Time); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([][]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyViewCount provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyViewCount(ctx context.Context, ids []int) ([]int, error) {
ret := _m.Called(ctx, ids)
var r0 []int
if rf, ok := ret.Get(0).(func(context.Context, []int) []int); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]int)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyViewDates provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyViewDates(ctx context.Context, ids []int) ([][]time.Time, error) {
ret := _m.Called(ctx, ids)
var r0 [][]time.Time
if rf, ok := ret.Get(0).(func(context.Context, []int) [][]time.Time); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([][]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetMovies provides a mock function with given fields: ctx, id // GetMovies provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) { func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) {
ret := _m.Called(ctx, id) ret := _m.Called(ctx, id)
@ -708,6 +1001,50 @@ func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.Mo
return r0, r1 return r0, r1
} }
// GetOCount provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) GetOCount(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, id)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetODates provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetODates(ctx context.Context, relatedID int) ([]time.Time, error) {
ret := _m.Called(ctx, relatedID)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int) []time.Time); ok {
r0 = rf(ctx, relatedID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, relatedID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetPerformerIDs provides a mock function with given fields: ctx, relatedID // GetPerformerIDs provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) { func (_m *SceneReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) {
ret := _m.Called(ctx, relatedID) ret := _m.Called(ctx, relatedID)
@ -800,6 +1137,29 @@ func (_m *SceneReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]stri
return r0, r1 return r0, r1
} }
// GetViewDates provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetViewDates(ctx context.Context, relatedID int) ([]time.Time, error) {
ret := _m.Called(ctx, relatedID)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int) []time.Time); ok {
r0 = rf(ctx, relatedID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, relatedID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// HasCover provides a mock function with given fields: ctx, sceneID // HasCover provides a mock function with given fields: ctx, sceneID
func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, error) { func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, error) {
ret := _m.Called(ctx, sceneID) ret := _m.Called(ctx, sceneID)
@ -821,69 +1181,6 @@ func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, e
return r0, r1 return r0, r1
} }
// IncrementOCounter provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) IncrementOCounter(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, id)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// IncrementWatchCount provides a mock function with given fields: ctx, sceneID
func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, sceneID int) (int, error) {
ret := _m.Called(ctx, sceneID)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, sceneID)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, sceneID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// OCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) OCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// OCountByPerformerID provides a mock function with given fields: ctx, performerID // OCountByPerformerID provides a mock function with given fields: ctx, performerID
func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) { func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) {
ret := _m.Called(ctx, performerID) ret := _m.Called(ctx, performerID)
@ -905,27 +1202,6 @@ func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerI
return r0, r1 return r0, r1
} }
// PlayCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) PlayCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// PlayDuration provides a mock function with given fields: ctx // PlayDuration provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) PlayDuration(ctx context.Context) (float64, error) { func (_m *SceneReaderWriter) PlayDuration(ctx context.Context) (float64, error) {
ret := _m.Called(ctx) ret := _m.Called(ctx)
@ -991,8 +1267,8 @@ func (_m *SceneReaderWriter) QueryCount(ctx context.Context, sceneFilter *models
return r0, r1 return r0, r1
} }
// ResetOCounter provides a mock function with given fields: ctx, id // ResetO provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) ResetOCounter(ctx context.Context, id int) (int, error) { func (_m *SceneReaderWriter) ResetO(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id) ret := _m.Called(ctx, id)
var r0 int var r0 int
@ -1054,27 +1330,6 @@ func (_m *SceneReaderWriter) Size(ctx context.Context) (float64, error) {
return r0, r1 return r0, r1
} }
// UniqueScenePlayCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) UniqueScenePlayCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Update provides a mock function with given fields: ctx, updatedScene // Update provides a mock function with given fields: ctx, updatedScene
func (_m *SceneReaderWriter) Update(ctx context.Context, updatedScene *models.Scene) error { func (_m *SceneReaderWriter) Update(ctx context.Context, updatedScene *models.Scene) error {
ret := _m.Called(ctx, updatedScene) ret := _m.Called(ctx, updatedScene)

View file

@ -19,7 +19,6 @@ type Scene struct {
// Rating expressed in 1-100 scale // Rating expressed in 1-100 scale
Rating *int `json:"rating"` Rating *int `json:"rating"`
Organized bool `json:"organized"` Organized bool `json:"organized"`
OCounter int `json:"o_counter"`
StudioID *int `json:"studio_id"` StudioID *int `json:"studio_id"`
// transient - not persisted // transient - not persisted
@ -35,10 +34,8 @@ type Scene struct {
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"` UpdatedAt time.Time `json:"updated_at"`
LastPlayedAt *time.Time `json:"last_played_at"` ResumeTime float64 `json:"resume_time"`
ResumeTime float64 `json:"resume_time"` PlayDuration float64 `json:"play_duration"`
PlayDuration float64 `json:"play_duration"`
PlayCount int `json:"play_count"`
URLs RelatedStrings `json:"urls"` URLs RelatedStrings `json:"urls"`
GalleryIDs RelatedIDs `json:"gallery_ids"` GalleryIDs RelatedIDs `json:"gallery_ids"`
@ -67,14 +64,11 @@ type ScenePartial struct {
// Rating expressed in 1-100 scale // Rating expressed in 1-100 scale
Rating OptionalInt Rating OptionalInt
Organized OptionalBool Organized OptionalBool
OCounter OptionalInt
StudioID OptionalInt StudioID OptionalInt
CreatedAt OptionalTime CreatedAt OptionalTime
UpdatedAt OptionalTime UpdatedAt OptionalTime
ResumeTime OptionalFloat64 ResumeTime OptionalFloat64
PlayDuration OptionalFloat64 PlayDuration OptionalFloat64
PlayCount OptionalInt
LastPlayedAt OptionalTime
URLs *UpdateStrings URLs *UpdateStrings
GalleryIDs *UpdateIDs GalleryIDs *UpdateIDs

View file

@ -1,6 +1,8 @@
package models package models
import "context" import (
"context"
)
type SceneIDLoader interface { type SceneIDLoader interface {
GetSceneIDs(ctx context.Context, relatedID int) ([]int, error) GetSceneIDs(ctx context.Context, relatedID int) ([]int, error)

View file

@ -1,6 +1,9 @@
package models package models
import "context" import (
"context"
"time"
)
// SceneGetter provides methods to get scenes by ID. // SceneGetter provides methods to get scenes by ID.
type SceneGetter interface { type SceneGetter interface {
@ -40,10 +43,7 @@ type SceneCounter interface {
CountByTagID(ctx context.Context, tagID int) (int, error) CountByTagID(ctx context.Context, tagID int) (int, error)
CountMissingChecksum(ctx context.Context) (int, error) CountMissingChecksum(ctx context.Context) (int, error)
CountMissingOSHash(ctx context.Context) (int, error) CountMissingOSHash(ctx context.Context) (int, error)
OCount(ctx context.Context) (int, error)
OCountByPerformerID(ctx context.Context, performerID int) (int, error) OCountByPerformerID(ctx context.Context, performerID int) (int, error)
PlayCount(ctx context.Context) (int, error)
UniqueScenePlayCount(ctx context.Context) (int, error)
} }
// SceneCreator provides methods to create scenes. // SceneCreator provides methods to create scenes.
@ -68,6 +68,24 @@ type SceneCreatorUpdater interface {
SceneUpdater SceneUpdater
} }
type ViewDateReader interface {
CountViews(ctx context.Context, id int) (int, error)
CountAllViews(ctx context.Context) (int, error)
CountUniqueViews(ctx context.Context) (int, error)
GetManyViewCount(ctx context.Context, ids []int) ([]int, error)
GetViewDates(ctx context.Context, relatedID int) ([]time.Time, error)
GetManyViewDates(ctx context.Context, ids []int) ([][]time.Time, error)
GetManyLastViewed(ctx context.Context, ids []int) ([]*time.Time, error)
}
type ODateReader interface {
GetOCount(ctx context.Context, id int) (int, error)
GetManyOCount(ctx context.Context, ids []int) ([]int, error)
GetAllOCount(ctx context.Context) (int, error)
GetODates(ctx context.Context, relatedID int) ([]time.Time, error)
GetManyODates(ctx context.Context, ids []int) ([][]time.Time, error)
}
// SceneReader provides all methods to read scenes. // SceneReader provides all methods to read scenes.
type SceneReader interface { type SceneReader interface {
SceneFinder SceneFinder
@ -75,6 +93,8 @@ type SceneReader interface {
SceneCounter SceneCounter
URLLoader URLLoader
ViewDateReader
ODateReader
FileIDLoader FileIDLoader
GalleryIDLoader GalleryIDLoader
PerformerIDLoader PerformerIDLoader
@ -92,6 +112,18 @@ type SceneReader interface {
HasCover(ctx context.Context, sceneID int) (bool, error) HasCover(ctx context.Context, sceneID int) (bool, error)
} }
type OHistoryWriter interface {
AddO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error)
DeleteO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error)
ResetO(ctx context.Context, id int) (int, error)
}
type ViewHistoryWriter interface {
AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error)
DeleteViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error)
DeleteAllViews(ctx context.Context, id int) (int, error)
}
// SceneWriter provides all methods to modify scenes. // SceneWriter provides all methods to modify scenes.
type SceneWriter interface { type SceneWriter interface {
SceneCreator SceneCreator
@ -101,11 +133,10 @@ type SceneWriter interface {
AddFileID(ctx context.Context, id int, fileID FileID) error AddFileID(ctx context.Context, id int, fileID FileID) error
AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error
AssignFiles(ctx context.Context, sceneID int, fileID []FileID) error AssignFiles(ctx context.Context, sceneID int, fileID []FileID) error
IncrementOCounter(ctx context.Context, id int) (int, error)
DecrementOCounter(ctx context.Context, id int) (int, error) OHistoryWriter
ResetOCounter(ctx context.Context, id int) (int, error) ViewHistoryWriter
SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error)
IncrementWatchCount(ctx context.Context, sceneID int) (int, error)
} }
// SceneReaderWriter provides all scene methods. // SceneReaderWriter provides all scene methods.

View file

@ -14,7 +14,9 @@ import (
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
type CoverGetter interface { type ExportGetter interface {
models.ViewDateReader
models.ODateReader
GetCover(ctx context.Context, sceneID int) ([]byte, error) GetCover(ctx context.Context, sceneID int) ([]byte, error)
} }
@ -27,7 +29,7 @@ type TagFinder interface {
// ToBasicJSON converts a scene object into its JSON object equivalent. It // ToBasicJSON converts a scene object into its JSON object equivalent. It
// does not convert the relationships to other objects, with the exception // does not convert the relationships to other objects, with the exception
// of cover image. // of cover image.
func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (*jsonschema.Scene, error) { func ToBasicJSON(ctx context.Context, reader ExportGetter, scene *models.Scene) (*jsonschema.Scene, error) {
newSceneJSON := jsonschema.Scene{ newSceneJSON := jsonschema.Scene{
Title: scene.Title, Title: scene.Title,
Code: scene.Code, Code: scene.Code,
@ -47,7 +49,6 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
} }
newSceneJSON.Organized = scene.Organized newSceneJSON.Organized = scene.Organized
newSceneJSON.OCounter = scene.OCounter
for _, f := range scene.Files.List() { for _, f := range scene.Files.List() {
newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path) newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path)
@ -73,6 +74,24 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
newSceneJSON.StashIDs = ret newSceneJSON.StashIDs = ret
dates, err := reader.GetViewDates(ctx, scene.ID)
if err != nil {
return nil, fmt.Errorf("error getting view dates: %v", err)
}
for _, date := range dates {
newSceneJSON.PlayHistory = append(newSceneJSON.PlayHistory, json.JSONTime{Time: date})
}
odates, err := reader.GetODates(ctx, scene.ID)
if err != nil {
return nil, fmt.Errorf("error getting o dates: %v", err)
}
for _, date := range odates {
newSceneJSON.OHistory = append(newSceneJSON.OHistory, json.JSONTime{Time: date})
}
return &newSceneJSON, nil return &newSceneJSON, nil
} }

View file

@ -8,6 +8,7 @@ import (
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"testing" "testing"
"time" "time"
@ -40,7 +41,6 @@ var (
date = "2001-01-01" date = "2001-01-01"
dateObj, _ = models.ParseDate(date) dateObj, _ = models.ParseDate(date)
rating = 5 rating = 5
ocounter = 2
organized = true organized = true
details = "details" details = "details"
) )
@ -88,7 +88,6 @@ func createFullScene(id int) models.Scene {
Title: title, Title: title,
Date: &dateObj, Date: &dateObj,
Details: details, Details: details,
OCounter: ocounter,
Rating: &rating, Rating: &rating,
Organized: organized, Organized: organized,
URLs: models.NewRelatedStrings([]string{url}), URLs: models.NewRelatedStrings([]string{url}),
@ -130,7 +129,6 @@ func createFullJSONScene(image string) *jsonschema.Scene {
Files: []string{path}, Files: []string{path},
Date: date, Date: date,
Details: details, Details: details,
OCounter: ocounter,
Rating: rating, Rating: rating,
Organized: organized, Organized: organized,
URLs: []string{url}, URLs: []string{url},
@ -193,6 +191,8 @@ func TestToJSON(t *testing.T) {
db.Scene.On("GetCover", testCtx, sceneID).Return(imageBytes, nil).Once() db.Scene.On("GetCover", testCtx, sceneID).Return(imageBytes, nil).Once()
db.Scene.On("GetCover", testCtx, noImageID).Return(nil, nil).Once() db.Scene.On("GetCover", testCtx, noImageID).Return(nil, nil).Once()
db.Scene.On("GetCover", testCtx, errImageID).Return(nil, imageErr).Once() db.Scene.On("GetCover", testCtx, errImageID).Return(nil, imageErr).Once()
db.Scene.On("GetViewDates", testCtx, mock.Anything).Return(nil, nil)
db.Scene.On("GetODates", testCtx, mock.Anything).Return(nil, nil)
for i, s := range scenarios { for i, s := range scenarios {
scene := s.input scene := s.input

View file

@ -4,8 +4,10 @@ import (
"context" "context"
"fmt" "fmt"
"strings" "strings"
"time"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/sliceutil"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
@ -13,6 +15,8 @@ import (
type ImporterReaderWriter interface { type ImporterReaderWriter interface {
models.SceneCreatorUpdater models.SceneCreatorUpdater
models.ViewHistoryWriter
models.OHistoryWriter
FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error)
} }
@ -31,6 +35,8 @@ type Importer struct {
ID int ID int
scene models.Scene scene models.Scene
coverImageData []byte coverImageData []byte
viewHistory []time.Time
oHistory []time.Time
} }
func (i *Importer) PreImport(ctx context.Context) error { func (i *Importer) PreImport(ctx context.Context) error {
@ -68,6 +74,9 @@ func (i *Importer) PreImport(ctx context.Context) error {
} }
} }
i.populateViewHistory()
i.populateOHistory()
return nil return nil
} }
@ -101,20 +110,54 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
} }
newScene.Organized = sceneJSON.Organized newScene.Organized = sceneJSON.Organized
newScene.OCounter = sceneJSON.OCounter
newScene.CreatedAt = sceneJSON.CreatedAt.GetTime() newScene.CreatedAt = sceneJSON.CreatedAt.GetTime()
newScene.UpdatedAt = sceneJSON.UpdatedAt.GetTime() newScene.UpdatedAt = sceneJSON.UpdatedAt.GetTime()
if !sceneJSON.LastPlayedAt.IsZero() {
t := sceneJSON.LastPlayedAt.GetTime()
newScene.LastPlayedAt = &t
}
newScene.ResumeTime = sceneJSON.ResumeTime newScene.ResumeTime = sceneJSON.ResumeTime
newScene.PlayDuration = sceneJSON.PlayDuration newScene.PlayDuration = sceneJSON.PlayDuration
newScene.PlayCount = sceneJSON.PlayCount
return newScene return newScene
} }
func getHistory(historyJSON []json.JSONTime, count int, last json.JSONTime, createdAt json.JSONTime) []time.Time {
var ret []time.Time
if len(historyJSON) > 0 {
for _, d := range historyJSON {
ret = append(ret, d.GetTime())
}
} else if count > 0 {
createdAt := createdAt.GetTime()
for j := 0; j < count; j++ {
t := createdAt
if j+1 == count && !last.IsZero() {
// last one, use last play date
t = last.GetTime()
}
ret = append(ret, t)
}
}
return ret
}
func (i *Importer) populateViewHistory() {
i.viewHistory = getHistory(
i.Input.PlayHistory,
i.Input.PlayCount,
i.Input.LastPlayedAt,
i.Input.CreatedAt,
)
}
func (i *Importer) populateOHistory() {
i.viewHistory = getHistory(
i.Input.OHistory,
i.Input.OCounter,
i.Input.CreatedAt, // no last o count date
i.Input.CreatedAt,
)
}
func (i *Importer) populateFiles(ctx context.Context) error { func (i *Importer) populateFiles(ctx context.Context) error {
files := make([]*models.VideoFile, 0) files := make([]*models.VideoFile, 0)
@ -365,6 +408,28 @@ func (i *Importer) populateTags(ctx context.Context) error {
return nil return nil
} }
func (i *Importer) addViewHistory(ctx context.Context) error {
if len(i.viewHistory) > 0 {
_, err := i.ReaderWriter.AddViews(ctx, i.ID, i.viewHistory)
if err != nil {
return fmt.Errorf("error adding view date: %v", err)
}
}
return nil
}
func (i *Importer) addOHistory(ctx context.Context) error {
if len(i.oHistory) > 0 {
_, err := i.ReaderWriter.AddO(ctx, i.ID, i.oHistory)
if err != nil {
return fmt.Errorf("error adding o date: %v", err)
}
}
return nil
}
func (i *Importer) PostImport(ctx context.Context, id int) error { func (i *Importer) PostImport(ctx context.Context, id int) error {
if len(i.coverImageData) > 0 { if len(i.coverImageData) > 0 {
if err := i.ReaderWriter.UpdateCover(ctx, id, i.coverImageData); err != nil { if err := i.ReaderWriter.UpdateCover(ctx, id, i.coverImageData); err != nil {
@ -372,6 +437,15 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
} }
} }
// add histories
if err := i.addViewHistory(ctx); err != nil {
return err
}
if err := i.addOHistory(ctx); err != nil {
return err
}
return nil return nil
} }

View file

@ -6,6 +6,7 @@ import (
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
"time"
"github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
@ -14,13 +15,15 @@ import (
"github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/txn"
) )
func (s *Service) Merge( type MergeOptions struct {
ctx context.Context, ScenePartial models.ScenePartial
sourceIDs []int, IncludePlayHistory bool
destinationID int, IncludeOHistory bool
scenePartial models.ScenePartial, }
fileDeleter *FileDeleter,
) error { func (s *Service) Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *FileDeleter, options MergeOptions) error {
scenePartial := options.ScenePartial
// ensure source ids are unique // ensure source ids are unique
sourceIDs = sliceutil.AppendUniques(nil, sourceIDs) sourceIDs = sliceutil.AppendUniques(nil, sourceIDs)
@ -74,6 +77,44 @@ func (s *Service) Merge(
return fmt.Errorf("updating scene: %w", err) return fmt.Errorf("updating scene: %w", err)
} }
// merge play history
if options.IncludePlayHistory {
var allDates []time.Time
for _, src := range sources {
thisDates, err := s.Repository.GetViewDates(ctx, src.ID)
if err != nil {
return fmt.Errorf("getting view dates for scene %d: %w", src.ID, err)
}
allDates = append(allDates, thisDates...)
}
if len(allDates) > 0 {
if _, err := s.Repository.AddViews(ctx, destinationID, allDates); err != nil {
return fmt.Errorf("adding view dates to scene %d: %w", destinationID, err)
}
}
}
// merge o history
if options.IncludeOHistory {
var allDates []time.Time
for _, src := range sources {
thisDates, err := s.Repository.GetODates(ctx, src.ID)
if err != nil {
return fmt.Errorf("getting o dates for scene %d: %w", src.ID, err)
}
allDates = append(allDates, thisDates...)
}
if len(allDates) > 0 {
if _, err := s.Repository.AddO(ctx, destinationID, allDates); err != nil {
return fmt.Errorf("adding o dates to scene %d: %w", destinationID, err)
}
}
}
// delete old scenes // delete old scenes
for _, src := range sources { for _, src := range sources {
const deleteGenerated = true const deleteGenerated = true

View file

@ -153,3 +153,20 @@ func Map[T any, V any](vs []T, f func(T) V) []V {
} }
return ret return ret
} }
func PtrsToValues[T any](vs []*T) []T {
ret := make([]T, len(vs))
for i, v := range vs {
ret[i] = *v
}
return ret
}
func ValuesToPtrs[T any](vs []T) []*T {
ret := make([]*T, len(vs))
for i, v := range vs {
vv := v
ret[i] = &vv
}
return ret
}

View file

@ -33,7 +33,7 @@ const (
dbConnTimeout = 30 dbConnTimeout = 30
) )
var appSchemaVersion uint = 54 var appSchemaVersion uint = 55
//go:embed migrations/*.sql //go:embed migrations/*.sql
var migrationsBox embed.FS var migrationsBox embed.FS

View file

@ -777,28 +777,6 @@ func (m *countCriterionHandlerBuilder) handler(criterion *models.IntCriterionInp
} }
} }
type joinedMultiSumCriterionHandlerBuilder struct {
primaryTable string
foreignTable1 string
joinTable1 string
foreignTable2 string
joinTable2 string
primaryFK string
foreignFK1 string
foreignFK2 string
sum string
}
func (m *joinedMultiSumCriterionHandlerBuilder) handler(criterion *models.IntCriterionInput) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if criterion != nil {
clause, args := getJoinedMultiSumCriterionClause(m.primaryTable, m.foreignTable1, m.joinTable1, m.foreignTable2, m.joinTable2, m.primaryFK, m.foreignFK1, m.foreignFK2, m.sum, *criterion)
f.addWhere(clause, args...)
}
}
}
// handler for StringCriterion for string list fields // handler for StringCriterion for string list fields
type stringListCriterionHandlerBuilder struct { type stringListCriterionHandlerBuilder struct {
// table joining primary and foreign objects // table joining primary and foreign objects

95
pkg/sqlite/history.go Normal file
View file

@ -0,0 +1,95 @@
package sqlite
import (
"context"
"time"
)
type viewDateManager struct {
tableMgr *viewHistoryTable
}
func (qb *viewDateManager) GetViewDates(ctx context.Context, id int) ([]time.Time, error) {
return qb.tableMgr.getDates(ctx, id)
}
func (qb *viewDateManager) GetManyViewDates(ctx context.Context, ids []int) ([][]time.Time, error) {
return qb.tableMgr.getManyDates(ctx, ids)
}
func (qb *viewDateManager) CountViews(ctx context.Context, id int) (int, error) {
return qb.tableMgr.getCount(ctx, id)
}
func (qb *viewDateManager) GetManyViewCount(ctx context.Context, ids []int) ([]int, error) {
return qb.tableMgr.getManyCount(ctx, ids)
}
func (qb *viewDateManager) CountAllViews(ctx context.Context) (int, error) {
return qb.tableMgr.getAllCount(ctx)
}
func (qb *viewDateManager) CountUniqueViews(ctx context.Context) (int, error) {
return qb.tableMgr.getUniqueCount(ctx)
}
func (qb *viewDateManager) LastView(ctx context.Context, id int) (*time.Time, error) {
return qb.tableMgr.getLastDate(ctx, id)
}
func (qb *viewDateManager) GetManyLastViewed(ctx context.Context, ids []int) ([]*time.Time, error) {
return qb.tableMgr.getManyLastDate(ctx, ids)
}
func (qb *viewDateManager) AddViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.addDates(ctx, id, dates)
}
func (qb *viewDateManager) DeleteViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.deleteDates(ctx, id, dates)
}
func (qb *viewDateManager) DeleteAllViews(ctx context.Context, id int) (int, error) {
return qb.tableMgr.deleteAllDates(ctx, id)
}
type oDateManager struct {
tableMgr *viewHistoryTable
}
func (qb *oDateManager) GetODates(ctx context.Context, id int) ([]time.Time, error) {
return qb.tableMgr.getDates(ctx, id)
}
func (qb *oDateManager) GetManyODates(ctx context.Context, ids []int) ([][]time.Time, error) {
return qb.tableMgr.getManyDates(ctx, ids)
}
func (qb *oDateManager) GetOCount(ctx context.Context, id int) (int, error) {
return qb.tableMgr.getCount(ctx, id)
}
func (qb *oDateManager) GetManyOCount(ctx context.Context, ids []int) ([]int, error) {
return qb.tableMgr.getManyCount(ctx, ids)
}
func (qb *oDateManager) GetAllOCount(ctx context.Context) (int, error) {
return qb.tableMgr.getAllCount(ctx)
}
func (qb *oDateManager) GetUniqueOCount(ctx context.Context) (int, error) {
return qb.tableMgr.getUniqueCount(ctx)
}
func (qb *oDateManager) AddO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.addDates(ctx, id, dates)
}
func (qb *oDateManager) DeleteO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.deleteDates(ctx, id, dates)
}
func (qb *oDateManager) ResetO(ctx context.Context, id int) (int, error) {
return qb.tableMgr.deleteAllDates(ctx, id)
}

View file

@ -17,7 +17,7 @@ import (
"github.com/doug-martin/goqu/v9/exp" "github.com/doug-martin/goqu/v9/exp"
) )
var imageTable = "images" const imageTable = "images"
const ( const (
imageIDColumn = "image_id" imageIDColumn = "image_id"

View file

@ -0,0 +1,111 @@
PRAGMA foreign_keys=OFF;
CREATE TABLE `scenes_view_dates` (
`scene_id` integer,
`view_date` datetime not null,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);
CREATE TABLE `scenes_o_dates` (
`scene_id` integer,
`o_date` datetime not null,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);
-- drop o_counter, play_count and last_played_at
CREATE TABLE "scenes_new" (
`id` integer not null primary key autoincrement,
`title` varchar(255),
`details` text,
`date` date,
`rating` tinyint,
`studio_id` integer,
`organized` boolean not null default '0',
`created_at` datetime not null,
`updated_at` datetime not null,
`code` text,
`director` text,
`resume_time` float not null default 0,
`play_duration` float not null default 0,
`cover_blob` varchar(255) REFERENCES `blobs`(`checksum`),
foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL
);
INSERT INTO `scenes_new`
(
`id`,
`title`,
`details`,
`date`,
`rating`,
`studio_id`,
`organized`,
`created_at`,
`updated_at`,
`code`,
`director`,
`resume_time`,
`play_duration`,
`cover_blob`
)
SELECT
`id`,
`title`,
`details`,
`date`,
`rating`,
`studio_id`,
`organized`,
`created_at`,
`updated_at`,
`code`,
`director`,
`resume_time`,
`play_duration`,
`cover_blob`
FROM `scenes`;
WITH max_view_count AS (
SELECT MAX(play_count) AS max_count
FROM scenes
), numbers AS (
SELECT 1 AS n
FROM max_view_count
UNION ALL
SELECT n + 1
FROM numbers
WHERE n < (SELECT max_count FROM max_view_count)
)
INSERT INTO scenes_view_dates (scene_id, view_date)
SELECT scenes.id,
CASE
WHEN numbers.n = scenes.play_count THEN COALESCE(scenes.last_played_at, scenes.created_at)
ELSE scenes.created_at
END AS view_date
FROM scenes
JOIN numbers
WHERE numbers.n <= scenes.play_count;
WITH numbers AS (
SELECT 1 AS n
UNION ALL
SELECT n + 1
FROM numbers
WHERE n < (SELECT MAX(o_counter) FROM scenes)
)
INSERT INTO scenes_o_dates (scene_id, o_date)
SELECT scenes.id,
CASE
WHEN numbers.n <= scenes.o_counter THEN scenes.created_at
END AS o_date
FROM scenes
CROSS JOIN numbers
WHERE numbers.n <= scenes.o_counter;
DROP INDEX `index_scenes_on_studio_id`;
DROP TABLE `scenes`;
ALTER TABLE `scenes_new` rename to `scenes`;
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
PRAGMA foreign_keys=ON;

View file

@ -0,0 +1,71 @@
package migrations
import (
"context"
"fmt"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/sqlite"
)
type schema55Migrator struct {
migrator
}
func post55(ctx context.Context, db *sqlx.DB) error {
logger.Info("Running post-migration for schema version 55")
m := schema55Migrator{
migrator: migrator{
db: db,
},
}
return m.migrate(ctx)
}
func (m *schema55Migrator) migrate(ctx context.Context) error {
// the last_played_at column was storing in a different format than the rest of the timestamps
// convert the play history date to the correct format
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
query := "SELECT DISTINCT `scene_id`, `view_date` FROM `scenes_view_dates`"
rows, err := m.db.Query(query)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var (
id int
viewDate sqlite.Timestamp
)
err := rows.Scan(&id, &viewDate)
if err != nil {
return err
}
utcTimestamp := sqlite.UTCTimestamp{
Timestamp: viewDate,
}
// convert the timestamp to the correct format
if _, err := m.db.Exec("UPDATE scenes_view_dates SET view_date = ? WHERE view_date = ?", utcTimestamp, viewDate.Timestamp); err != nil {
return fmt.Errorf("error correcting view date %s to %s: %w", viewDate.Timestamp, viewDate, err)
}
}
return rows.Err()
}); err != nil {
return err
}
return nil
}
func init() {
sqlite.RegisterPostMigration(55, post55)
}

View file

@ -847,20 +847,44 @@ func performerGalleryCountCriterionHandler(qb *PerformerStore, count *models.Int
return h.handler(count) return h.handler(count)
} }
func performerOCounterCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { // used for sorting and filtering on performer o-count
h := joinedMultiSumCriterionHandlerBuilder{ var selectPerformerOCountSQL = utils.StrFormat(
primaryTable: performerTable, "SELECT SUM(o_counter) "+
foreignTable1: sceneTable, "FROM ("+
joinTable1: performersScenesTable, "SELECT SUM(o_counter) as o_counter from {performers_images} s "+
foreignTable2: imageTable, "LEFT JOIN {images} ON {images}.id = s.{images_id} "+
joinTable2: performersImagesTable, "WHERE s.{performer_id} = {performers}.id "+
primaryFK: performerIDColumn, "UNION ALL "+
foreignFK1: sceneIDColumn, "SELECT COUNT({scenes_o_dates}.{o_date}) as o_counter from {performers_scenes} s "+
foreignFK2: imageIDColumn, "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+
sum: "o_counter", "LEFT JOIN {scenes_o_dates} ON {scenes_o_dates}.{scene_id} = {scenes}.id "+
} "WHERE s.{performer_id} = {performers}.id "+
")",
map[string]interface{}{
"performers_images": performersImagesTable,
"images": imageTable,
"performer_id": performerIDColumn,
"images_id": imageIDColumn,
"performers": performerTable,
"performers_scenes": performersScenesTable,
"scenes": sceneTable,
"scene_id": sceneIDColumn,
"scenes_o_dates": scenesODatesTable,
"o_date": sceneODateColumn,
},
)
return h.handler(count) func performerOCounterCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if count == nil {
return
}
lhs := "(" + selectPerformerOCountSQL + ")"
clause, args := getIntCriterionWhereClause(lhs, *count)
f.addWhere(clause, args...)
}
} }
func performerStudiosCriterionHandler(qb *PerformerStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { func performerStudiosCriterionHandler(qb *PerformerStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc {
@ -998,6 +1022,11 @@ func performerAppearsWithCriterionHandler(qb *PerformerStore, performers *models
} }
} }
func (qb *PerformerStore) sortByOCounter(direction string) string {
// need to sum the o_counter from scenes and images
return " ORDER BY (" + selectPerformerOCountSQL + ") " + direction
}
func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) string { func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) string {
var sort string var sort string
var direction string var direction string
@ -1019,12 +1048,11 @@ func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) st
sortQuery += getCountSort(performerTable, performersImagesTable, performerIDColumn, direction) sortQuery += getCountSort(performerTable, performersImagesTable, performerIDColumn, direction)
case "galleries_count": case "galleries_count":
sortQuery += getCountSort(performerTable, performersGalleriesTable, performerIDColumn, direction) sortQuery += getCountSort(performerTable, performersGalleriesTable, performerIDColumn, direction)
case "o_counter":
sortQuery += qb.sortByOCounter(direction)
default: default:
sortQuery += getSort(sort, direction, "performers") sortQuery += getSort(sort, direction, "performers")
} }
if sort == "o_counter" {
return getMultiSumSort("o_counter", performerTable, sceneTable, performersScenesTable, imageTable, performersImagesTable, performerIDColumn, sceneIDColumn, imageIDColumn, direction)
}
// Whatever the sorting, always use name/id as a final sort // Whatever the sorting, always use name/id as a final sort
sortQuery += ", COALESCE(performers.name, performers.id) COLLATE NATURAL_CI ASC" sortQuery += ", COALESCE(performers.name, performers.id) COLLATE NATURAL_CI ASC"

View file

@ -93,6 +93,7 @@ func (r *updateRecord) setTimestamp(destField string, v models.OptionalTime) {
} }
} }
//nolint:golint,unused
func (r *updateRecord) setNullTimestamp(destField string, v models.OptionalTime) { func (r *updateRecord) setNullTimestamp(destField string, v models.OptionalTime) {
if v.Set { if v.Set {
r.set(destField, NullTimestampFromTimePtr(v.Ptr())) r.set(destField, NullTimestampFromTimePtr(v.Ptr()))

View file

@ -447,6 +447,14 @@ type relatedFileRow struct {
Primary bool `db:"primary"` Primary bool `db:"primary"`
} }
func idToIndexMap(ids []int) map[int]int {
ret := make(map[int]int)
for i, id := range ids {
ret[id] = i
}
return ret
}
func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]models.FileID, error) { func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]models.FileID, error) {
var primaryClause string var primaryClause string
if primaryOnly { if primaryOnly {
@ -476,10 +484,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo
} }
ret := make([][]models.FileID, len(ids)) ret := make([][]models.FileID, len(ids))
idToIndex := make(map[int]int) idToIndex := idToIndexMap(ids)
for i, id := range ids {
idToIndex[id] = i
}
for _, row := range fileRows { for _, row := range fileRows {
id := row.ID id := row.ID

View file

@ -9,7 +9,6 @@ import (
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
"time"
"github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp" "github.com/doug-martin/goqu/v9/exp"
@ -32,6 +31,10 @@ const (
moviesScenesTable = "movies_scenes" moviesScenesTable = "movies_scenes"
scenesURLsTable = "scene_urls" scenesURLsTable = "scene_urls"
sceneURLColumn = "url" sceneURLColumn = "url"
scenesViewDatesTable = "scenes_view_dates"
sceneViewDateColumn = "view_date"
scenesODatesTable = "scenes_o_dates"
sceneODateColumn = "o_date"
sceneCoverBlobColumn = "cover_blob" sceneCoverBlobColumn = "cover_blob"
) )
@ -79,16 +82,13 @@ type sceneRow struct {
Director zero.String `db:"director"` Director zero.String `db:"director"`
Date NullDate `db:"date"` Date NullDate `db:"date"`
// expressed as 1-100 // expressed as 1-100
Rating null.Int `db:"rating"` Rating null.Int `db:"rating"`
Organized bool `db:"organized"` Organized bool `db:"organized"`
OCounter int `db:"o_counter"` StudioID null.Int `db:"studio_id,omitempty"`
StudioID null.Int `db:"studio_id,omitempty"` CreatedAt Timestamp `db:"created_at"`
CreatedAt Timestamp `db:"created_at"` UpdatedAt Timestamp `db:"updated_at"`
UpdatedAt Timestamp `db:"updated_at"` ResumeTime float64 `db:"resume_time"`
LastPlayedAt NullTimestamp `db:"last_played_at"` PlayDuration float64 `db:"play_duration"`
ResumeTime float64 `db:"resume_time"`
PlayDuration float64 `db:"play_duration"`
PlayCount int `db:"play_count"`
// not used in resolutions or updates // not used in resolutions or updates
CoverBlob zero.String `db:"cover_blob"` CoverBlob zero.String `db:"cover_blob"`
@ -103,14 +103,11 @@ func (r *sceneRow) fromScene(o models.Scene) {
r.Date = NullDateFromDatePtr(o.Date) r.Date = NullDateFromDatePtr(o.Date)
r.Rating = intFromPtr(o.Rating) r.Rating = intFromPtr(o.Rating)
r.Organized = o.Organized r.Organized = o.Organized
r.OCounter = o.OCounter
r.StudioID = intFromPtr(o.StudioID) r.StudioID = intFromPtr(o.StudioID)
r.CreatedAt = Timestamp{Timestamp: o.CreatedAt} r.CreatedAt = Timestamp{Timestamp: o.CreatedAt}
r.UpdatedAt = Timestamp{Timestamp: o.UpdatedAt} r.UpdatedAt = Timestamp{Timestamp: o.UpdatedAt}
r.LastPlayedAt = NullTimestampFromTimePtr(o.LastPlayedAt)
r.ResumeTime = o.ResumeTime r.ResumeTime = o.ResumeTime
r.PlayDuration = o.PlayDuration r.PlayDuration = o.PlayDuration
r.PlayCount = o.PlayCount
} }
type sceneQueryRow struct { type sceneQueryRow struct {
@ -132,7 +129,6 @@ func (r *sceneQueryRow) resolve() *models.Scene {
Date: r.Date.DatePtr(), Date: r.Date.DatePtr(),
Rating: nullIntPtr(r.Rating), Rating: nullIntPtr(r.Rating),
Organized: r.Organized, Organized: r.Organized,
OCounter: r.OCounter,
StudioID: nullIntPtr(r.StudioID), StudioID: nullIntPtr(r.StudioID),
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID), PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
@ -142,10 +138,8 @@ func (r *sceneQueryRow) resolve() *models.Scene {
CreatedAt: r.CreatedAt.Timestamp, CreatedAt: r.CreatedAt.Timestamp,
UpdatedAt: r.UpdatedAt.Timestamp, UpdatedAt: r.UpdatedAt.Timestamp,
LastPlayedAt: r.LastPlayedAt.TimePtr(),
ResumeTime: r.ResumeTime, ResumeTime: r.ResumeTime,
PlayDuration: r.PlayDuration, PlayDuration: r.PlayDuration,
PlayCount: r.PlayCount,
} }
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid { if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
@ -167,14 +161,11 @@ func (r *sceneRowRecord) fromPartial(o models.ScenePartial) {
r.setNullDate("date", o.Date) r.setNullDate("date", o.Date)
r.setNullInt("rating", o.Rating) r.setNullInt("rating", o.Rating)
r.setBool("organized", o.Organized) r.setBool("organized", o.Organized)
r.setInt("o_counter", o.OCounter)
r.setNullInt("studio_id", o.StudioID) r.setNullInt("studio_id", o.StudioID)
r.setTimestamp("created_at", o.CreatedAt) r.setTimestamp("created_at", o.CreatedAt)
r.setTimestamp("updated_at", o.UpdatedAt) r.setTimestamp("updated_at", o.UpdatedAt)
r.setNullTimestamp("last_played_at", o.LastPlayedAt)
r.setFloat64("resume_time", o.ResumeTime) r.setFloat64("resume_time", o.ResumeTime)
r.setFloat64("play_duration", o.PlayDuration) r.setFloat64("play_duration", o.PlayDuration)
r.setInt("play_count", o.PlayCount)
} }
type SceneStore struct { type SceneStore struct {
@ -182,7 +173,8 @@ type SceneStore struct {
blobJoinQueryBuilder blobJoinQueryBuilder
tableMgr *table tableMgr *table
oCounterManager oDateManager
viewDateManager
fileStore *FileStore fileStore *FileStore
} }
@ -199,7 +191,8 @@ func NewSceneStore(fileStore *FileStore, blobStore *BlobStore) *SceneStore {
}, },
tableMgr: sceneTableMgr, tableMgr: sceneTableMgr,
oCounterManager: oCounterManager{sceneTableMgr}, viewDateManager: viewDateManager{scenesViewTableMgr},
oDateManager: oDateManager{scenesOTableMgr},
fileStore: fileStore, fileStore: fileStore,
} }
} }
@ -710,20 +703,18 @@ func (qb *SceneStore) CountByPerformerID(ctx context.Context, performerID int) (
func (qb *SceneStore) OCountByPerformerID(ctx context.Context, performerID int) (int, error) { func (qb *SceneStore) OCountByPerformerID(ctx context.Context, performerID int) (int, error) {
table := qb.table() table := qb.table()
joinTable := scenesPerformersJoinTable joinTable := scenesPerformersJoinTable
oHistoryTable := goqu.T(scenesODatesTable)
q := dialect.Select(goqu.COALESCE(goqu.SUM("o_counter"), 0)).From(table).InnerJoin(joinTable, goqu.On(table.Col(idColumn).Eq(joinTable.Col(sceneIDColumn)))).Where(joinTable.Col(performerIDColumn).Eq(performerID)) q := dialect.Select(goqu.COUNT("*")).From(table).InnerJoin(
var ret int oHistoryTable,
if err := querySimple(ctx, q, &ret); err != nil { goqu.On(table.Col(idColumn).Eq(oHistoryTable.Col(sceneIDColumn))),
return 0, err ).InnerJoin(
} joinTable,
goqu.On(
table.Col(idColumn).Eq(joinTable.Col(sceneIDColumn)),
),
).Where(joinTable.Col(performerIDColumn).Eq(performerID))
return ret, nil
}
func (qb *SceneStore) OCount(ctx context.Context) (int, error) {
table := qb.table()
q := dialect.Select(goqu.COALESCE(goqu.SUM("o_counter"), 0)).From(table)
var ret int var ret int
if err := querySimple(ctx, q, &ret); err != nil { if err := querySimple(ctx, q, &ret); err != nil {
return 0, err return 0, err
@ -757,24 +748,6 @@ func (qb *SceneStore) Count(ctx context.Context) (int, error) {
return count(ctx, q) return count(ctx, q)
} }
func (qb *SceneStore) PlayCount(ctx context.Context) (int, error) {
q := dialect.Select(goqu.COALESCE(goqu.SUM("play_count"), 0)).From(qb.table())
var ret int
if err := querySimple(ctx, q, &ret); err != nil {
return 0, err
}
return ret, nil
}
func (qb *SceneStore) UniqueScenePlayCount(ctx context.Context) (int, error) {
table := qb.table()
q := dialect.Select(goqu.COUNT("*")).From(table).Where(table.Col("play_count").Gt(0))
return count(ctx, q)
}
func (qb *SceneStore) Size(ctx context.Context) (float64, error) { func (qb *SceneStore) Size(ctx context.Context) (float64, error) {
table := qb.table() table := qb.table()
fileTable := fileTableMgr.table fileTable := fileTableMgr.table
@ -977,7 +950,7 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF
query.handleCriterion(ctx, scenePhashDistanceCriterionHandler(qb, sceneFilter.PhashDistance)) query.handleCriterion(ctx, scenePhashDistanceCriterionHandler(qb, sceneFilter.PhashDistance))
query.handleCriterion(ctx, intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil)) query.handleCriterion(ctx, intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil))
query.handleCriterion(ctx, intCriterionHandler(sceneFilter.OCounter, "scenes.o_counter", nil)) query.handleCriterion(ctx, sceneOCountCriterionHandler(sceneFilter.OCounter))
query.handleCriterion(ctx, boolCriterionHandler(sceneFilter.Organized, "scenes.organized", nil)) query.handleCriterion(ctx, boolCriterionHandler(sceneFilter.Organized, "scenes.organized", nil))
query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.Duration, "video_files.duration", qb.addVideoFilesTable)) query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.Duration, "video_files.duration", qb.addVideoFilesTable))
@ -1011,7 +984,7 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF
query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.ResumeTime, "scenes.resume_time", nil)) query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.ResumeTime, "scenes.resume_time", nil))
query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.PlayDuration, "scenes.play_duration", nil)) query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.PlayDuration, "scenes.play_duration", nil))
query.handleCriterion(ctx, intCriterionHandler(sceneFilter.PlayCount, "scenes.play_count", nil)) query.handleCriterion(ctx, scenePlayCountCriterionHandler(sceneFilter.PlayCount))
query.handleCriterion(ctx, sceneTagsCriterionHandler(qb, sceneFilter.Tags)) query.handleCriterion(ctx, sceneTagsCriterionHandler(qb, sceneFilter.Tags))
query.handleCriterion(ctx, sceneTagCountCriterionHandler(qb, sceneFilter.TagCount)) query.handleCriterion(ctx, sceneTagCountCriterionHandler(qb, sceneFilter.TagCount))
@ -1194,6 +1167,26 @@ func (qb *SceneStore) QueryCount(ctx context.Context, sceneFilter *models.SceneF
return query.executeCount(ctx) return query.executeCount(ctx)
} }
func scenePlayCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc {
h := countCriterionHandlerBuilder{
primaryTable: sceneTable,
joinTable: scenesViewDatesTable,
primaryFK: sceneIDColumn,
}
return h.handler(count)
}
func sceneOCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc {
h := countCriterionHandlerBuilder{
primaryTable: sceneTable,
joinTable: scenesODatesTable,
primaryFK: sceneIDColumn,
}
return h.handler(count)
}
func sceneFileCountCriterionHandler(qb *SceneStore, fileCount *models.IntCriterionInput) criterionHandlerFunc { func sceneFileCountCriterionHandler(qb *SceneStore, fileCount *models.IntCriterionInput) criterionHandlerFunc {
h := countCriterionHandlerBuilder{ h := countCriterionHandlerBuilder{
primaryTable: sceneTable, primaryTable: sceneTable,
@ -1600,8 +1593,11 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
addFolderTable() addFolderTable()
query.sortAndPagination += " ORDER BY COALESCE(scenes.title, files.basename) COLLATE NATURAL_CI " + direction + ", folders.path COLLATE NATURAL_CI " + direction query.sortAndPagination += " ORDER BY COALESCE(scenes.title, files.basename) COLLATE NATURAL_CI " + direction + ", folders.path COLLATE NATURAL_CI " + direction
case "play_count": case "play_count":
// handle here since getSort has special handling for _count suffix query.sortAndPagination += getCountSort(sceneTable, scenesViewDatesTable, sceneIDColumn, direction)
query.sortAndPagination += " ORDER BY scenes.play_count " + direction case "last_played_at":
query.sortAndPagination += fmt.Sprintf(" ORDER BY (SELECT MAX(view_date) FROM %s AS sort WHERE sort.%s = %s.id) %s", scenesViewDatesTable, sceneIDColumn, sceneTable, getSortDirection(direction))
case "o_counter":
query.sortAndPagination += getCountSort(sceneTable, scenesODatesTable, sceneIDColumn, direction)
default: default:
query.sortAndPagination += getSort(sort, direction, "scenes") query.sortAndPagination += getSort(sort, direction, "scenes")
} }
@ -1610,23 +1606,6 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
query.sortAndPagination += ", COALESCE(scenes.title, scenes.id) COLLATE NATURAL_CI ASC" query.sortAndPagination += ", COALESCE(scenes.title, scenes.id) COLLATE NATURAL_CI ASC"
} }
func (qb *SceneStore) getPlayCount(ctx context.Context, id int) (int, error) {
q := dialect.From(qb.tableMgr.table).Select("play_count").Where(goqu.Ex{"id": id})
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (qb *SceneStore) SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) { func (qb *SceneStore) SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) {
if err := qb.tableMgr.checkIDExists(ctx, id); err != nil { if err := qb.tableMgr.checkIDExists(ctx, id); err != nil {
return false, err return false, err
@ -1651,21 +1630,6 @@ func (qb *SceneStore) SaveActivity(ctx context.Context, id int, resumeTime *floa
return true, nil return true, nil
} }
func (qb *SceneStore) IncrementWatchCount(ctx context.Context, id int) (int, error) {
if err := qb.tableMgr.checkIDExists(ctx, id); err != nil {
return 0, err
}
if err := qb.tableMgr.updateByID(ctx, id, goqu.Record{
"play_count": goqu.L("play_count + 1"),
"last_played_at": time.Now(),
}); err != nil {
return 0, err
}
return qb.getPlayCount(ctx, id)
}
func (qb *SceneStore) GetURLs(ctx context.Context, sceneID int) ([]string, error) { func (qb *SceneStore) GetURLs(ctx context.Context, sceneID int) ([]string, error) {
return scenesURLsTableMgr.get(ctx, sceneID) return scenesURLsTableMgr.get(ctx, sceneID)
} }

View file

@ -82,10 +82,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
director = "director" director = "director"
url = "url" url = "url"
rating = 60 rating = 60
ocounter = 5
lastPlayedAt = time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC)
resumeTime = 10.0 resumeTime = 10.0
playCount = 3
playDuration = 34.0 playDuration = 34.0
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@ -117,7 +114,6 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Date: &date, Date: &date,
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene], StudioID: &studioIDs[studioIdxWithScene],
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
@ -144,9 +140,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Endpoint: endpoint2, Endpoint: endpoint2,
}, },
}), }),
LastPlayedAt: &lastPlayedAt,
ResumeTime: float64(resumeTime), ResumeTime: float64(resumeTime),
PlayCount: playCount,
PlayDuration: playDuration, PlayDuration: playDuration,
}, },
false, false,
@ -162,7 +156,6 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Date: &date, Date: &date,
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene], StudioID: &studioIDs[studioIdxWithScene],
Files: models.NewRelatedVideoFiles([]*models.VideoFile{ Files: models.NewRelatedVideoFiles([]*models.VideoFile{
videoFile.(*models.VideoFile), videoFile.(*models.VideoFile),
@ -192,9 +185,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Endpoint: endpoint2, Endpoint: endpoint2,
}, },
}), }),
LastPlayedAt: &lastPlayedAt,
ResumeTime: resumeTime, ResumeTime: resumeTime,
PlayCount: playCount,
PlayDuration: playDuration, PlayDuration: playDuration,
}, },
false, false,
@ -321,10 +312,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
director = "director" director = "director"
url = "url" url = "url"
rating = 60 rating = 60
ocounter = 5
lastPlayedAt = time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC)
resumeTime = 10.0 resumeTime = 10.0
playCount = 3
playDuration = 34.0 playDuration = 34.0
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@ -355,7 +343,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
Date: &date, Date: &date,
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene], StudioID: &studioIDs[studioIdxWithScene],
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
@ -382,9 +369,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
Endpoint: endpoint2, Endpoint: endpoint2,
}, },
}), }),
LastPlayedAt: &lastPlayedAt,
ResumeTime: resumeTime, ResumeTime: resumeTime,
PlayCount: playCount,
PlayDuration: playDuration, PlayDuration: playDuration,
}, },
false, false,
@ -537,10 +522,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
director = "director" director = "director"
url = "url" url = "url"
rating = 60 rating = 60
ocounter = 5
lastPlayedAt = time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC)
resumeTime = 10.0 resumeTime = 10.0
playCount = 3
playDuration = 34.0 playDuration = 34.0
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@ -576,7 +558,6 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
Date: models.NewOptionalDate(date), Date: models.NewOptionalDate(date),
Rating: models.NewOptionalInt(rating), Rating: models.NewOptionalInt(rating),
Organized: models.NewOptionalBool(true), Organized: models.NewOptionalBool(true),
OCounter: models.NewOptionalInt(ocounter),
StudioID: models.NewOptionalInt(studioIDs[studioIdxWithScene]), StudioID: models.NewOptionalInt(studioIDs[studioIdxWithScene]),
CreatedAt: models.NewOptionalTime(createdAt), CreatedAt: models.NewOptionalTime(createdAt),
UpdatedAt: models.NewOptionalTime(updatedAt), UpdatedAt: models.NewOptionalTime(updatedAt),
@ -618,9 +599,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
}, },
Mode: models.RelationshipUpdateModeSet, Mode: models.RelationshipUpdateModeSet,
}, },
LastPlayedAt: models.NewOptionalTime(lastPlayedAt),
ResumeTime: models.NewOptionalFloat64(resumeTime), ResumeTime: models.NewOptionalFloat64(resumeTime),
PlayCount: models.NewOptionalInt(playCount),
PlayDuration: models.NewOptionalFloat64(playDuration), PlayDuration: models.NewOptionalFloat64(playDuration),
}, },
models.Scene{ models.Scene{
@ -636,7 +615,6 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
Date: &date, Date: &date,
Rating: &rating, Rating: &rating,
Organized: true, Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene], StudioID: &studioIDs[studioIdxWithScene],
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
@ -663,9 +641,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
Endpoint: endpoint2, Endpoint: endpoint2,
}, },
}), }),
LastPlayedAt: &lastPlayedAt,
ResumeTime: resumeTime, ResumeTime: resumeTime,
PlayCount: playCount,
PlayDuration: playDuration, PlayDuration: playDuration,
}, },
false, false,
@ -675,8 +651,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
sceneIDs[sceneIdxWithSpacedName], sceneIDs[sceneIdxWithSpacedName],
clearScenePartial(), clearScenePartial(),
models.Scene{ models.Scene{
ID: sceneIDs[sceneIdxWithSpacedName], ID: sceneIDs[sceneIdxWithSpacedName],
OCounter: getOCounter(sceneIdxWithSpacedName),
Files: models.NewRelatedVideoFiles([]*models.VideoFile{ Files: models.NewRelatedVideoFiles([]*models.VideoFile{
makeSceneFile(sceneIdxWithSpacedName), makeSceneFile(sceneIdxWithSpacedName),
}), }),
@ -685,9 +660,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
PerformerIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}),
Movies: models.NewRelatedMovies([]models.MoviesScenes{}), Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
PlayCount: getScenePlayCount(sceneIdxWithSpacedName),
PlayDuration: getScenePlayDuration(sceneIdxWithSpacedName), PlayDuration: getScenePlayDuration(sceneIdxWithSpacedName),
LastPlayedAt: getSceneLastPlayed(sceneIdxWithSpacedName),
ResumeTime: getSceneResumeTime(sceneIdxWithSpacedName), ResumeTime: getSceneResumeTime(sceneIdxWithSpacedName),
}, },
false, false,
@ -1296,7 +1269,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
} }
} }
func Test_sceneQueryBuilder_IncrementOCounter(t *testing.T) { func Test_sceneQueryBuilder_AddO(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
id int id int
@ -1306,52 +1279,9 @@ func Test_sceneQueryBuilder_IncrementOCounter(t *testing.T) {
{ {
"increment", "increment",
sceneIDs[1], sceneIDs[1],
2,
false,
},
{
"invalid",
invalidID,
0,
true,
},
}
qb := db.Scene
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.IncrementOCounter(ctx, tt.id)
if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.IncrementOCounter() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("sceneQueryBuilder.IncrementOCounter() = %v, want %v", got, tt.want)
}
})
}
}
func Test_sceneQueryBuilder_DecrementOCounter(t *testing.T) {
tests := []struct {
name string
id int
want int
wantErr bool
}{
{
"decrement",
sceneIDs[2],
1, 1,
false, false,
}, },
{
"zero",
sceneIDs[0],
0,
false,
},
{ {
"invalid", "invalid",
invalidID, invalidID,
@ -1364,19 +1294,19 @@ func Test_sceneQueryBuilder_DecrementOCounter(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.DecrementOCounter(ctx, tt.id) got, err := qb.AddO(ctx, tt.id, nil)
if (err != nil) != tt.wantErr { if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.DecrementOCounter() error = %v, wantErr %v", err, tt.wantErr) t.Errorf("sceneQueryBuilder.AddO() error = %v, wantErr %v", err, tt.wantErr)
return return
} }
if got != tt.want { if len(got) != tt.want {
t.Errorf("sceneQueryBuilder.DecrementOCounter() = %v, want %v", got, tt.want) t.Errorf("sceneQueryBuilder.AddO() = %v, want %v", got, tt.want)
} }
}) })
} }
} }
func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) { func Test_sceneQueryBuilder_DeleteO(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
id int id int
@ -1395,11 +1325,42 @@ func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
0, 0,
false, false,
}, },
}
qb := db.Scene
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.DeleteO(ctx, tt.id, nil)
if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.DeleteO() error = %v, wantErr %v", err, tt.wantErr)
return
}
if len(got) != tt.want {
t.Errorf("sceneQueryBuilder.DeleteO() = %v, want %v", got, tt.want)
}
})
}
}
func Test_sceneQueryBuilder_ResetO(t *testing.T) {
tests := []struct {
name string
id int
want int
wantErr bool
}{
{ {
"invalid", "decrement",
invalidID, sceneIDs[2],
0, 0,
true, false,
},
{
"zero",
sceneIDs[0],
0,
false,
}, },
} }
@ -1407,9 +1368,9 @@ func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.ResetOCounter(ctx, tt.id) got, err := qb.ResetO(ctx, tt.id)
if (err != nil) != tt.wantErr { if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.ResetOCounter() error = %v, wantErr %v", err, tt.wantErr) t.Errorf("sceneQueryBuilder.ResetO() error = %v, wantErr %v", err, tt.wantErr)
return return
} }
if got != tt.want { if got != tt.want {
@ -1419,6 +1380,10 @@ func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
} }
} }
func Test_sceneQueryBuilder_ResetWatchCount(t *testing.T) {
return
}
func Test_sceneQueryBuilder_Destroy(t *testing.T) { func Test_sceneQueryBuilder_Destroy(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
@ -2158,19 +2123,19 @@ func TestSceneQuery(t *testing.T) {
[]int{sceneIdxWithMovie}, []int{sceneIdxWithMovie},
false, false,
}, },
{ // {
"specific play count", // "specific play count",
nil, // nil,
&models.SceneFilterType{ // &models.SceneFilterType{
PlayCount: &models.IntCriterionInput{ // PlayCount: &models.IntCriterionInput{
Modifier: models.CriterionModifierEquals, // Modifier: models.CriterionModifierEquals,
Value: getScenePlayCount(sceneIdxWithGallery), // Value: getScenePlayCount(sceneIdxWithGallery),
}, // },
}, // },
[]int{sceneIdxWithGallery}, // []int{sceneIdxWithGallery},
[]int{sceneIdxWithMovie}, // []int{sceneIdxWithMovie},
false, // false,
}, // },
{ {
"stash id with endpoint", "stash id with endpoint",
nil, nil,
@ -2767,7 +2732,11 @@ func verifyScenesOCounter(t *testing.T, oCounterCriterion models.IntCriterionInp
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil) scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
for _, scene := range scenes { for _, scene := range scenes {
verifyInt(t, scene.OCounter, oCounterCriterion) count, err := sqb.GetOCount(ctx, scene.ID)
if err != nil {
t.Errorf("Error getting ocounter: %v", err)
}
verifyInt(t, count, oCounterCriterion)
} }
return nil return nil
@ -4023,14 +3992,14 @@ func TestSceneQuerySorting(t *testing.T) {
"play_count", "play_count",
"play_count", "play_count",
models.SortDirectionEnumDesc, models.SortDirectionEnumDesc,
sceneIDs[sceneIdx1WithPerformer], -1,
-1, -1,
}, },
{ {
"last_played_at", "last_played_at",
"last_played_at", "last_played_at",
models.SortDirectionEnumDesc, models.SortDirectionEnumDesc,
sceneIDs[sceneIdx1WithPerformer], -1,
-1, -1,
}, },
{ {
@ -4551,7 +4520,7 @@ func TestSceneStore_AssignFiles(t *testing.T) {
} }
} }
func TestSceneStore_IncrementWatchCount(t *testing.T) { func TestSceneStore_AddView(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
sceneID int sceneID int
@ -4561,7 +4530,7 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
{ {
"valid", "valid",
sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithPerformer],
getScenePlayCount(sceneIdx1WithPerformer) + 1, 1, //getScenePlayCount(sceneIdx1WithPerformer) + 1,
false, false,
}, },
{ {
@ -4577,9 +4546,9 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
withRollbackTxn(func(ctx context.Context) error { withRollbackTxn(func(ctx context.Context) error {
newVal, err := qb.IncrementWatchCount(ctx, tt.sceneID) views, err := qb.AddViews(ctx, tt.sceneID, nil)
if (err != nil) != tt.wantErr { if (err != nil) != tt.wantErr {
t.Errorf("SceneStore.IncrementWatchCount() error = %v, wantErr %v", err, tt.wantErr) t.Errorf("SceneStore.AddView() error = %v, wantErr %v", err, tt.wantErr)
} }
if err != nil { if err != nil {
@ -4587,16 +4556,21 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
} }
assert := assert.New(t) assert := assert.New(t)
assert.Equal(tt.expectedCount, newVal) assert.Equal(tt.expectedCount, len(views))
// find the scene and check the count // find the scene and check the count
scene, err := qb.Find(ctx, tt.sceneID) count, err := qb.CountViews(ctx, tt.sceneID)
if err != nil { if err != nil {
t.Errorf("SceneStore.Find() error = %v", err) t.Errorf("SceneStore.CountViews() error = %v", err)
} }
assert.Equal(tt.expectedCount, scene.PlayCount) lastView, err := qb.LastView(ctx, tt.sceneID)
assert.True(scene.LastPlayedAt.After(time.Now().Add(-1 * time.Minute))) if err != nil {
t.Errorf("SceneStore.LastView() error = %v", err)
}
assert.Equal(tt.expectedCount, count)
assert.True(lastView.After(time.Now().Add(-1 * time.Minute)))
return nil return nil
}) })
@ -4604,6 +4578,10 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
} }
} }
func TestSceneStore_DecrementWatchCount(t *testing.T) {
return
}
func TestSceneStore_SaveActivity(t *testing.T) { func TestSceneStore_SaveActivity(t *testing.T) {
var ( var (
resumeTime = 111.2 resumeTime = 111.2
@ -4702,3 +4680,77 @@ func TestSceneStore_SaveActivity(t *testing.T) {
// TODO Count // TODO Count
// TODO SizeCount // TODO SizeCount
// TODO - this should be in history_test and generalised
func TestSceneStore_CountAllViews(t *testing.T) {
withRollbackTxn(func(ctx context.Context) error {
qb := db.Scene
sceneID := sceneIDs[sceneIdx1WithPerformer]
// get the current play count
currentCount, err := qb.CountAllViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountAllViews() error = %v", err)
return nil
}
// add a view
_, err = qb.AddViews(ctx, sceneID, nil)
if err != nil {
t.Errorf("SceneStore.AddViews() error = %v", err)
return nil
}
// get the new play count
newCount, err := qb.CountAllViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountAllViews() error = %v", err)
return nil
}
assert.Equal(t, currentCount+1, newCount)
return nil
})
}
func TestSceneStore_CountUniqueViews(t *testing.T) {
withRollbackTxn(func(ctx context.Context) error {
qb := db.Scene
sceneID := sceneIDs[sceneIdx1WithPerformer]
// get the current play count
currentCount, err := qb.CountUniqueViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountUniqueViews() error = %v", err)
return nil
}
// add a view
_, err = qb.AddViews(ctx, sceneID, nil)
if err != nil {
t.Errorf("SceneStore.AddViews() error = %v", err)
return nil
}
// add a second view
_, err = qb.AddViews(ctx, sceneID, nil)
if err != nil {
t.Errorf("SceneStore.AddViews() error = %v", err)
return nil
}
// get the new play count
newCount, err := qb.CountUniqueViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountUniqueViews() error = %v", err)
return nil
}
assert.Equal(t, currentCount+1, newCount)
return nil
})
}

View file

@ -1012,10 +1012,6 @@ func makeSceneFile(i int) *models.VideoFile {
} }
} }
func getScenePlayCount(index int) int {
return index % 5
}
func getScenePlayDuration(index int) float64 { func getScenePlayDuration(index int) float64 {
if index%5 == 0 { if index%5 == 0 {
return 0 return 0
@ -1032,15 +1028,6 @@ func getSceneResumeTime(index int) float64 {
return float64(index%5) * 1.2 return float64(index%5) * 1.2
} }
func getSceneLastPlayed(index int) *time.Time {
if index%5 == 0 {
return nil
}
t := time.Date(2020, 1, index%5, 1, 2, 3, 0, time.UTC)
return &t
}
func makeScene(i int) *models.Scene { func makeScene(i int) *models.Scene {
title := getSceneTitle(i) title := getSceneTitle(i)
details := getSceneStringValue(i, "Details") details := getSceneStringValue(i, "Details")
@ -1073,7 +1060,6 @@ func makeScene(i int) *models.Scene {
getSceneEmptyString(i, urlField), getSceneEmptyString(i, urlField),
}), }),
Rating: getIntPtr(rating), Rating: getIntPtr(rating),
OCounter: getOCounter(i),
Date: getObjectDate(i), Date: getObjectDate(i),
StudioID: studioID, StudioID: studioID,
GalleryIDs: models.NewRelatedIDs(gids), GalleryIDs: models.NewRelatedIDs(gids),
@ -1083,9 +1069,7 @@ func makeScene(i int) *models.Scene {
StashIDs: models.NewRelatedStashIDs([]models.StashID{ StashIDs: models.NewRelatedStashIDs([]models.StashID{
sceneStashID(i), sceneStashID(i),
}), }),
PlayCount: getScenePlayCount(i),
PlayDuration: getScenePlayDuration(i), PlayDuration: getScenePlayDuration(i),
LastPlayedAt: getSceneLastPlayed(i),
ResumeTime: getSceneResumeTime(i), ResumeTime: getSceneResumeTime(i),
} }
} }

View file

@ -110,27 +110,6 @@ func getCountSort(primaryTable, joinTable, primaryFK, direction string) string {
return fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM %s AS sort WHERE sort.%s = %s.id) %s", joinTable, primaryFK, primaryTable, getSortDirection(direction)) return fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM %s AS sort WHERE sort.%s = %s.id) %s", joinTable, primaryFK, primaryTable, getSortDirection(direction))
} }
func getMultiSumSort(sum string, primaryTable, foreignTable1, joinTable1, foreignTable2, joinTable2, primaryFK, foreignFK1, foreignFK2, direction string) string {
return fmt.Sprintf(" ORDER BY (SELECT SUM(%s) "+
"FROM ("+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
"UNION ALL "+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
")) %s",
sum,
sum, sum, joinTable1,
foreignTable1, foreignTable1, foreignFK1,
primaryFK, primaryTable,
sum, sum, joinTable2,
foreignTable2, foreignTable2, foreignFK2,
primaryFK, primaryTable,
getSortDirection(direction))
}
func getStringSearchClause(columns []string, q string, not bool) sqlClause { func getStringSearchClause(columns []string, q string, not bool) sqlClause {
var likeClauses []string var likeClauses []string
var args []interface{} var args []interface{}
@ -349,28 +328,6 @@ func getCountCriterionClause(primaryTable, joinTable, primaryFK string, criterio
return getIntCriterionWhereClause(lhs, criterion) return getIntCriterionWhereClause(lhs, criterion)
} }
func getJoinedMultiSumCriterionClause(primaryTable, foreignTable1, joinTable1, foreignTable2, joinTable2, primaryFK string, foreignFK1 string, foreignFK2 string, sum string, criterion models.IntCriterionInput) (string, []interface{}) {
lhs := fmt.Sprintf("(SELECT SUM(%s) "+
"FROM ("+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
"UNION ALL "+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
"))",
sum,
sum, sum, joinTable1,
foreignTable1, foreignTable1, foreignFK1,
primaryFK, primaryTable,
sum, sum, joinTable2,
foreignTable2, foreignTable2, foreignFK2,
primaryFK, primaryTable,
)
return getIntCriterionWhereClause(lhs, criterion)
}
func coalesce(column string) string { func coalesce(column string) string {
return fmt.Sprintf("COALESCE(%s, '')", column) return fmt.Sprintf("COALESCE(%s, '')", column)
} }

View file

@ -5,6 +5,7 @@ import (
"database/sql" "database/sql"
"errors" "errors"
"fmt" "fmt"
"time"
"github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp" "github.com/doug-martin/goqu/v9/exp"
@ -773,6 +774,270 @@ func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID model
return nil return nil
} }
type viewHistoryTable struct {
table
dateColumn exp.IdentifierExpression
}
func (t *viewHistoryTable) getDates(ctx context.Context, id int) ([]time.Time, error) {
table := t.table.table
q := dialect.Select(
t.dateColumn,
).From(table).Where(
t.idColumn.Eq(id),
).Order(t.dateColumn.Desc())
const single = false
var ret []time.Time
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var date Timestamp
if err := rows.Scan(&date); err != nil {
return err
}
ret = append(ret, date.Timestamp)
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getManyDates(ctx context.Context, ids []int) ([][]time.Time, error) {
table := t.table.table
q := dialect.Select(
t.idColumn,
t.dateColumn,
).From(table).Where(
t.idColumn.In(ids),
).Order(t.dateColumn.Desc())
ret := make([][]time.Time, len(ids))
idToIndex := idToIndexMap(ids)
const single = false
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
var date Timestamp
if err := rows.Scan(&id, &date); err != nil {
return err
}
idx := idToIndex[id]
ret[idx] = append(ret[idx], date.Timestamp)
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getLastDate(ctx context.Context, id int) (*time.Time, error) {
table := t.table.table
q := dialect.Select(t.dateColumn).From(table).Where(
t.idColumn.Eq(id),
).Order(t.dateColumn.Desc()).Limit(1)
var date NullTimestamp
if err := querySimple(ctx, q, &date); err != nil {
return nil, err
}
return date.TimePtr(), nil
}
func (t *viewHistoryTable) getManyLastDate(ctx context.Context, ids []int) ([]*time.Time, error) {
table := t.table.table
q := dialect.Select(
t.idColumn,
goqu.MAX(t.dateColumn),
).From(table).Where(
t.idColumn.In(ids),
).GroupBy(t.idColumn)
ret := make([]*time.Time, len(ids))
idToIndex := idToIndexMap(ids)
const single = false
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
// MAX appears to return a string, so handle it manually
var dateString string
if err := rows.Scan(&id, &dateString); err != nil {
return err
}
t, err := time.Parse(TimestampFormat, dateString)
if err != nil {
return fmt.Errorf("parsing date %v: %w", dateString, err)
}
idx := idToIndex[id]
ret[idx] = &t
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getCount(ctx context.Context, id int) (int, error) {
table := t.table.table
q := dialect.Select(goqu.COUNT("*")).From(table).Where(t.idColumn.Eq(id))
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (t *viewHistoryTable) getManyCount(ctx context.Context, ids []int) ([]int, error) {
table := t.table.table
q := dialect.Select(
t.idColumn,
goqu.COUNT(t.dateColumn),
).From(table).Where(
t.idColumn.In(ids),
).GroupBy(t.idColumn)
ret := make([]int, len(ids))
idToIndex := idToIndexMap(ids)
const single = false
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
var count int
if err := rows.Scan(&id, &count); err != nil {
return err
}
idx := idToIndex[id]
ret[idx] = count
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getAllCount(ctx context.Context) (int, error) {
table := t.table.table
q := dialect.Select(goqu.COUNT("*")).From(table)
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (t *viewHistoryTable) getUniqueCount(ctx context.Context) (int, error) {
table := t.table.table
q := dialect.Select(goqu.COUNT(goqu.DISTINCT(t.idColumn))).From(table)
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (t *viewHistoryTable) addDates(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
table := t.table.table
if len(dates) == 0 {
dates = []time.Time{time.Now()}
}
for _, d := range dates {
q := dialect.Insert(table).Cols(t.idColumn.GetCol(), t.dateColumn.GetCol()).Vals(
// convert all dates to UTC
goqu.Vals{id, UTCTimestamp{Timestamp{d}}},
)
if _, err := exec(ctx, q); err != nil {
return nil, fmt.Errorf("inserting into %s: %w", table.GetTable(), err)
}
}
return t.getDates(ctx, id)
}
func (t *viewHistoryTable) deleteDates(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
table := t.table.table
mostRecent := false
if len(dates) == 0 {
mostRecent = true
dates = []time.Time{time.Now()}
}
for _, date := range dates {
var subquery *goqu.SelectDataset
if mostRecent {
// delete the most recent
subquery = dialect.Select("rowid").From(table).Where(
t.idColumn.Eq(id),
).Order(t.dateColumn.Desc()).Limit(1)
} else {
subquery = dialect.Select("rowid").From(table).Where(
t.idColumn.Eq(id),
t.dateColumn.Eq(UTCTimestamp{Timestamp{date}}),
).Limit(1)
}
q := dialect.Delete(table).Where(goqu.I("rowid").Eq(subquery))
if _, err := exec(ctx, q); err != nil {
return nil, fmt.Errorf("deleting from %s: %w", table.GetTable(), err)
}
}
return t.getDates(ctx, id)
}
func (t *viewHistoryTable) deleteAllDates(ctx context.Context, id int) (int, error) {
table := t.table.table
q := dialect.Delete(table).Where(t.idColumn.Eq(id))
if _, err := exec(ctx, q); err != nil {
return 0, fmt.Errorf("resetting dates for id %v: %w", id, err)
}
return t.getCount(ctx, id)
}
type sqler interface { type sqler interface {
ToSQL() (sql string, params []interface{}, err error) ToSQL() (sql string, params []interface{}, err error)
} }

View file

@ -190,6 +190,22 @@ var (
}, },
valueColumn: scenesURLsJoinTable.Col(sceneURLColumn), valueColumn: scenesURLsJoinTable.Col(sceneURLColumn),
} }
scenesViewTableMgr = &viewHistoryTable{
table: table{
table: goqu.T(scenesViewDatesTable),
idColumn: goqu.T(scenesViewDatesTable).Col(sceneIDColumn),
},
dateColumn: goqu.T(scenesViewDatesTable).Col(sceneViewDateColumn),
}
scenesOTableMgr = &viewHistoryTable{
table: table{
table: goqu.T(scenesODatesTable),
idColumn: goqu.T(scenesODatesTable).Col(sceneIDColumn),
},
dateColumn: goqu.T(scenesODatesTable).Col(sceneODateColumn),
}
) )
var ( var (

View file

@ -5,6 +5,8 @@ import (
"time" "time"
) )
const TimestampFormat = time.RFC3339
// Timestamp represents a time stored in RFC3339 format. // Timestamp represents a time stored in RFC3339 format.
type Timestamp struct { type Timestamp struct {
Timestamp time.Time Timestamp time.Time
@ -18,7 +20,18 @@ func (t *Timestamp) Scan(value interface{}) error {
// Value implements the driver Valuer interface. // Value implements the driver Valuer interface.
func (t Timestamp) Value() (driver.Value, error) { func (t Timestamp) Value() (driver.Value, error) {
return t.Timestamp.Format(time.RFC3339), nil return t.Timestamp.Format(TimestampFormat), nil
}
// UTCTimestamp stores a time in UTC.
// TODO - Timestamp should use UTC by default
type UTCTimestamp struct {
Timestamp
}
// Value implements the driver Valuer interface.
func (t UTCTimestamp) Value() (driver.Value, error) {
return t.Timestamp.Timestamp.UTC().Format(TimestampFormat), nil
} }
// NullTimestamp represents a nullable time stored in RFC3339 format. // NullTimestamp represents a nullable time stored in RFC3339 format.
@ -47,7 +60,7 @@ func (t NullTimestamp) Value() (driver.Value, error) {
return nil, nil return nil, nil
} }
return t.Timestamp.Format(time.RFC3339), nil return t.Timestamp.Format(TimestampFormat), nil
} }
func (t NullTimestamp) TimePtr() *time.Time { func (t NullTimestamp) TimePtr() *time.Time {

View file

@ -22,6 +22,9 @@ fragment SceneData on Scene {
play_duration play_duration
play_count play_count
play_history
o_history
files { files {
...VideoFileData ...VideoFileData
} }

View file

@ -34,16 +34,36 @@ mutation SceneSaveActivity(
) )
} }
mutation SceneIncrementPlayCount($id: ID!) { mutation SceneAddPlay($id: ID!, $times: [Timestamp!]) {
sceneIncrementPlayCount(id: $id) sceneAddPlay(id: $id, times: $times) {
count
history
}
} }
mutation SceneIncrementO($id: ID!) { mutation SceneDeletePlay($id: ID!, $times: [Timestamp!]) {
sceneIncrementO(id: $id) sceneDeletePlay(id: $id, times: $times) {
count
history
}
} }
mutation SceneDecrementO($id: ID!) { mutation SceneResetPlayCount($id: ID!) {
sceneDecrementO(id: $id) sceneResetPlayCount(id: $id)
}
mutation SceneAddO($id: ID!, $times: [Timestamp!]) {
sceneAddO(id: $id, times: $times) {
count
history
}
}
mutation SceneDeleteO($id: ID!, $times: [Timestamp!]) {
sceneDeleteO(id: $id, times: $times) {
count
history
}
} }
mutation SceneResetO($id: ID!) { mutation SceneResetO($id: ID!) {

View file

@ -61,6 +61,7 @@ const QueueViewer = lazyComponent(() => import("./QueueViewer"));
const SceneMarkersPanel = lazyComponent(() => import("./SceneMarkersPanel")); const SceneMarkersPanel = lazyComponent(() => import("./SceneMarkersPanel"));
const SceneFileInfoPanel = lazyComponent(() => import("./SceneFileInfoPanel")); const SceneFileInfoPanel = lazyComponent(() => import("./SceneFileInfoPanel"));
const SceneDetailPanel = lazyComponent(() => import("./SceneDetailPanel")); const SceneDetailPanel = lazyComponent(() => import("./SceneDetailPanel"));
const SceneHistoryPanel = lazyComponent(() => import("./SceneHistoryPanel"));
const SceneMoviePanel = lazyComponent(() => import("./SceneMoviePanel")); const SceneMoviePanel = lazyComponent(() => import("./SceneMoviePanel"));
const SceneGalleriesPanel = lazyComponent( const SceneGalleriesPanel = lazyComponent(
() => import("./SceneGalleriesPanel") () => import("./SceneGalleriesPanel")
@ -158,6 +159,7 @@ const ScenePage: React.FC<IProps> = ({
Mousetrap.bind("e", () => setActiveTabKey("scene-edit-panel")); Mousetrap.bind("e", () => setActiveTabKey("scene-edit-panel"));
Mousetrap.bind("k", () => setActiveTabKey("scene-markers-panel")); Mousetrap.bind("k", () => setActiveTabKey("scene-markers-panel"));
Mousetrap.bind("i", () => setActiveTabKey("scene-file-info-panel")); Mousetrap.bind("i", () => setActiveTabKey("scene-file-info-panel"));
Mousetrap.bind("h", () => setActiveTabKey("scene-history-panel"));
Mousetrap.bind("o", () => { Mousetrap.bind("o", () => {
onIncrementClick(); onIncrementClick();
}); });
@ -172,6 +174,7 @@ const ScenePage: React.FC<IProps> = ({
Mousetrap.unbind("e"); Mousetrap.unbind("e");
Mousetrap.unbind("k"); Mousetrap.unbind("k");
Mousetrap.unbind("i"); Mousetrap.unbind("i");
Mousetrap.unbind("h");
Mousetrap.unbind("o"); Mousetrap.unbind("o");
Mousetrap.unbind("p n"); Mousetrap.unbind("p n");
Mousetrap.unbind("p p"); Mousetrap.unbind("p p");
@ -407,6 +410,11 @@ const ScenePage: React.FC<IProps> = ({
<Counter count={scene.files.length} hideZero hideOne /> <Counter count={scene.files.length} hideZero hideOne />
</Nav.Link> </Nav.Link>
</Nav.Item> </Nav.Item>
<Nav.Item>
<Nav.Link eventKey="scene-history-panel">
<FormattedMessage id="history" />
</Nav.Link>
</Nav.Item>
<Nav.Item> <Nav.Item>
<Nav.Link eventKey="scene-edit-panel"> <Nav.Link eventKey="scene-edit-panel">
<FormattedMessage id="actions.edit" /> <FormattedMessage id="actions.edit" />
@ -488,6 +496,9 @@ const ScenePage: React.FC<IProps> = ({
onDelete={() => setIsDeleteAlertOpen(true)} onDelete={() => setIsDeleteAlertOpen(true)}
/> />
</Tab.Pane> </Tab.Pane>
<Tab.Pane eventKey="scene-history-panel">
<SceneHistoryPanel scene={scene} />
</Tab.Pane>
</Tab.Content> </Tab.Content>
</Tab.Container> </Tab.Container>
); );

View file

@ -308,16 +308,6 @@ export const SceneFileInfoPanel: React.FC<ISceneFileInfoPanelProps> = (
{renderInteractiveSpeed()} {renderInteractiveSpeed()}
<URLsField id="urls" urls={props.scene.urls} truncate /> <URLsField id="urls" urls={props.scene.urls} truncate />
{renderStashIDs()} {renderStashIDs()}
<TextField
id="media_info.play_count"
value={(props.scene.play_count ?? 0).toString()}
truncate
/>
<TextField
id="media_info.play_duration"
value={TextUtils.secondsToTimestamp(props.scene.play_duration ?? 0)}
truncate
/>
</dl> </dl>
{filesPanel} {filesPanel}

View file

@ -0,0 +1,356 @@
import {
faEllipsisV,
faPlus,
faTrash,
} from "@fortawesome/free-solid-svg-icons";
import React from "react";
import { Button, Dropdown } from "react-bootstrap";
import { FormattedMessage, useIntl } from "react-intl";
import { AlertModal } from "src/components/Shared/Alert";
import { Counter } from "src/components/Shared/Counter";
import { DateInput } from "src/components/Shared/DateInput";
import { Icon } from "src/components/Shared/Icon";
import { ModalComponent } from "src/components/Shared/Modal";
import {
useSceneDecrementO,
useSceneDecrementPlayCount,
useSceneIncrementO,
useSceneIncrementPlayCount,
useSceneResetO,
useSceneResetPlayCount,
} from "src/core/StashService";
import * as GQL from "src/core/generated-graphql";
import { TextField } from "src/utils/field";
import TextUtils from "src/utils/text";
const History: React.FC<{
className?: string;
history: string[];
unknownDate?: string;
onRemove: (date: string) => void;
noneID: string;
}> = ({ className, history, unknownDate, noneID, onRemove }) => {
const intl = useIntl();
if (history.length === 0) {
return (
<div>
<FormattedMessage id={noneID} />
</div>
);
}
function renderDate(date: string) {
if (date === unknownDate) {
return intl.formatMessage({ id: "unknown_date" });
}
return TextUtils.formatDateTime(intl, date);
}
return (
<div className="scene-history">
<ul className={className}>
{history.map((playdate, index) => (
<li key={index}>
<span>{renderDate(playdate)}</span>
<Button
className="remove-date-button"
size="sm"
variant="minimal"
onClick={() => onRemove(playdate)}
title={intl.formatMessage({ id: "actions.remove_date" })}
>
<Icon icon={faTrash} />
</Button>
</li>
))}
</ul>
</div>
);
};
const HistoryMenu: React.FC<{
hasHistory: boolean;
onAddDate: () => void;
onClearDates: () => void;
}> = ({ hasHistory, onAddDate, onClearDates }) => {
const intl = useIntl();
return (
<Dropdown className="history-operations-dropdown">
<Dropdown.Toggle
variant="secondary"
className="minimal"
title={intl.formatMessage({ id: "operations" })}
>
<Icon icon={faEllipsisV} />
</Dropdown.Toggle>
<Dropdown.Menu className="bg-secondary text-white">
<Dropdown.Item
className="bg-secondary text-white"
onClick={() => onAddDate()}
>
<FormattedMessage id="actions.add_manual_date" />
</Dropdown.Item>
{hasHistory && (
<Dropdown.Item
className="bg-secondary text-white"
onClick={() => onClearDates()}
>
<FormattedMessage id="actions.clear_date_data" />
</Dropdown.Item>
)}
</Dropdown.Menu>
</Dropdown>
);
};
const DatePickerModal: React.FC<{
show: boolean;
onClose: (t?: string) => void;
}> = ({ show, onClose }) => {
const intl = useIntl();
const [date, setDate] = React.useState<string>(
TextUtils.dateTimeToString(new Date())
);
return (
<ModalComponent
show={show}
header={<FormattedMessage id="actions.choose_date" />}
accept={{
onClick: () => onClose(date),
text: intl.formatMessage({ id: "actions.confirm" }),
}}
cancel={{
variant: "secondary",
onClick: () => onClose(),
text: intl.formatMessage({ id: "actions.cancel" }),
}}
>
<div>
<DateInput value={date} onValueChange={(d) => setDate(d)} isTime />
</div>
</ModalComponent>
);
};
interface ISceneHistoryProps {
scene: GQL.SceneDataFragment;
}
export const SceneHistoryPanel: React.FC<ISceneHistoryProps> = ({ scene }) => {
const intl = useIntl();
const [dialogs, setDialogs] = React.useState({
playHistory: false,
oHistory: false,
addPlay: false,
addO: false,
});
function setDialogPartial(partial: Partial<typeof dialogs>) {
setDialogs({ ...dialogs, ...partial });
}
const [incrementPlayCount] = useSceneIncrementPlayCount();
const [decrementPlayCount] = useSceneDecrementPlayCount();
const [clearPlayCount] = useSceneResetPlayCount();
const [incrementOCount] = useSceneIncrementO(scene.id);
const [decrementOCount] = useSceneDecrementO(scene.id);
const [resetO] = useSceneResetO(scene.id);
function dateStringToISOString(time: string) {
const date = TextUtils.stringToFuzzyDateTime(time);
if (!date) return null;
return date.toISOString();
}
function handleAddPlayDate(time?: string) {
incrementPlayCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleDeletePlayDate(time: string) {
decrementPlayCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleClearPlayDates() {
setDialogPartial({ playHistory: false });
clearPlayCount({
variables: {
id: scene.id,
},
});
}
function handleAddODate(time?: string) {
incrementOCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleDeleteODate(time: string) {
decrementOCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleClearODates() {
setDialogPartial({ oHistory: false });
resetO({
variables: {
id: scene.id,
},
});
}
function maybeRenderDialogs() {
return (
<>
<AlertModal
show={dialogs.playHistory}
text={intl.formatMessage({
id: "dialogs.clear_play_history_confirm",
})}
confirmButtonText={intl.formatMessage({ id: "actions.clear" })}
onConfirm={() => handleClearPlayDates()}
onCancel={() => setDialogPartial({ playHistory: false })}
/>
<AlertModal
show={dialogs.oHistory}
text={intl.formatMessage({ id: "dialogs.clear_o_history_confirm" })}
confirmButtonText={intl.formatMessage({ id: "actions.clear" })}
onConfirm={() => handleClearODates()}
onCancel={() => setDialogPartial({ oHistory: false })}
/>
{/* add conditions here so that date is generated correctly */}
{dialogs.addPlay && (
<DatePickerModal
show
onClose={(t) => {
const tt = t ? dateStringToISOString(t) : null;
if (tt) {
handleAddPlayDate(tt);
}
setDialogPartial({ addPlay: false });
}}
/>
)}
{dialogs.addO && (
<DatePickerModal
show
onClose={(t) => {
const tt = t ? dateStringToISOString(t) : null;
if (tt) {
handleAddODate(tt);
}
setDialogPartial({ addO: false });
}}
/>
)}
</>
);
}
const playHistory = (scene.play_history ?? []).filter(
(h) => h != null
) as string[];
const oHistory = (scene.o_history ?? []).filter((h) => h != null) as string[];
return (
<div>
{maybeRenderDialogs()}
<div className="play-history">
<div className="history-header">
<h5>
<span>
<FormattedMessage id="play_history" />
<Counter count={playHistory.length} hideZero />
</span>
<span>
<Button
size="sm"
variant="minimal"
className="add-date-button"
title={intl.formatMessage({ id: "actions.add_play" })}
onClick={() => handleAddPlayDate()}
>
<Icon icon={faPlus} />
</Button>
<HistoryMenu
hasHistory={playHistory.length > 0}
onAddDate={() => setDialogPartial({ addPlay: true })}
onClearDates={() => setDialogPartial({ playHistory: true })}
/>
</span>
</h5>
</div>
<History
history={playHistory ?? []}
noneID="playdate_recorded_no"
unknownDate={scene.created_at}
onRemove={(t) => handleDeletePlayDate(t)}
/>
<dl className="details-list">
<TextField
id="media_info.play_duration"
value={TextUtils.secondsToTimestamp(scene.play_duration ?? 0)}
/>
</dl>
</div>
<div className="o-history">
<div className="history-header">
<h5>
<span>
<FormattedMessage id="o_history" />
<Counter count={oHistory.length} hideZero />
</span>
<span>
<Button
size="sm"
variant="minimal"
className="add-date-button"
title={intl.formatMessage({ id: "actions.add_o" })}
onClick={() => handleAddODate()}
>
<Icon icon={faPlus} />
</Button>
<HistoryMenu
hasHistory={oHistory.length > 0}
onAddDate={() => setDialogPartial({ addO: true })}
onClearDates={() => setDialogPartial({ oHistory: true })}
/>
</span>
</h5>
</div>
<History
history={oHistory}
noneID="odate_recorded_no"
unknownDate={scene.created_at}
onRemove={(t) => handleDeleteODate(t)}
/>
</div>
</div>
);
};
export default SceneHistoryPanel;

View file

@ -44,10 +44,16 @@ const StashIDsField: React.FC<IStashIDsField> = ({ values }) => {
return <StringListSelect value={values.map((v) => v.stash_id)} />; return <StringListSelect value={values.map((v) => v.stash_id)} />;
}; };
type MergeOptions = {
values: GQL.SceneUpdateInput;
includeViewHistory: boolean;
includeOHistory: boolean;
};
interface ISceneMergeDetailsProps { interface ISceneMergeDetailsProps {
sources: GQL.SlimSceneDataFragment[]; sources: GQL.SlimSceneDataFragment[];
dest: GQL.SlimSceneDataFragment; dest: GQL.SlimSceneDataFragment;
onClose: (values?: GQL.SceneUpdateInput) => void; onClose: (options?: MergeOptions) => void;
} }
const SceneMergeDetails: React.FC<ISceneMergeDetailsProps> = ({ const SceneMergeDetails: React.FC<ISceneMergeDetailsProps> = ({
@ -558,41 +564,45 @@ const SceneMergeDetails: React.FC<ISceneMergeDetailsProps> = ({
); );
} }
function createValues(): GQL.SceneUpdateInput { function createValues(): MergeOptions {
const all = [dest, ...sources]; const all = [dest, ...sources];
// only set the cover image if it's different from the existing cover image // only set the cover image if it's different from the existing cover image
const coverImage = image.useNewValue ? image.getNewValue() : undefined; const coverImage = image.useNewValue ? image.getNewValue() : undefined;
return { return {
id: dest.id, values: {
title: title.getNewValue(), id: dest.id,
code: code.getNewValue(), title: title.getNewValue(),
urls: url.getNewValue(), code: code.getNewValue(),
date: date.getNewValue(), urls: url.getNewValue(),
rating100: rating.getNewValue(), date: date.getNewValue(),
o_counter: oCounter.getNewValue(), rating100: rating.getNewValue(),
play_count: playCount.getNewValue(), o_counter: oCounter.getNewValue(),
play_duration: playDuration.getNewValue(), play_count: playCount.getNewValue(),
gallery_ids: galleries.getNewValue(), play_duration: playDuration.getNewValue(),
studio_id: studio.getNewValue()?.stored_id, gallery_ids: galleries.getNewValue(),
performer_ids: performers.getNewValue()?.map((p) => p.stored_id!), studio_id: studio.getNewValue()?.stored_id,
movies: movies.getNewValue()?.map((m) => { performer_ids: performers.getNewValue()?.map((p) => p.stored_id!),
// find the equivalent movie in the original scenes movies: movies.getNewValue()?.map((m) => {
const found = all // find the equivalent movie in the original scenes
.map((s) => s.movies) const found = all
.flat() .map((s) => s.movies)
.find((mm) => mm.movie.id === m.stored_id); .flat()
return { .find((mm) => mm.movie.id === m.stored_id);
movie_id: m.stored_id!, return {
scene_index: found!.scene_index, movie_id: m.stored_id!,
}; scene_index: found!.scene_index,
}), };
tag_ids: tags.getNewValue()?.map((t) => t.stored_id!), }),
details: details.getNewValue(), tag_ids: tags.getNewValue()?.map((t) => t.stored_id!),
organized: organized.getNewValue(), details: details.getNewValue(),
stash_ids: stashIDs.getNewValue(), organized: organized.getNewValue(),
cover_image: coverImage, stash_ids: stashIDs.getNewValue(),
cover_image: coverImage,
},
includeViewHistory: playCount.getNewValue() !== undefined,
includeOHistory: oCounter.getNewValue() !== undefined,
}; };
} }
@ -679,13 +689,16 @@ export const SceneMergeModal: React.FC<ISceneMergeModalProps> = ({
setSecondStep(true); setSecondStep(true);
} }
async function onMerge(values: GQL.SceneUpdateInput) { async function onMerge(options: MergeOptions) {
const { values, includeViewHistory, includeOHistory } = options;
try { try {
setRunning(true); setRunning(true);
const result = await mutateSceneMerge( const result = await mutateSceneMerge(
destScene[0].id, destScene[0].id,
sourceScenes.map((s) => s.id), sourceScenes.map((s) => s.id),
values values,
includeViewHistory,
includeOHistory
); );
if (result.data?.sceneMerge) { if (result.data?.sceneMerge) {
Toast.success(intl.formatMessage({ id: "toast.merged_scenes" })); Toast.success(intl.formatMessage({ id: "toast.merged_scenes" }));

View file

@ -750,3 +750,37 @@ input[type="range"].blue-slider {
bottom: 0; bottom: 0;
} }
} }
.play-history dl {
margin-top: 0.5rem;
}
.play-history,
.o-history {
.history-header h5 {
align-items: center;
display: flex;
justify-content: space-between;
}
.history-operations-dropdown {
display: inline-block;
}
.add-date-button {
color: $success;
}
.remove-date-button {
color: $danger;
}
ul {
padding-inline-start: 1rem;
li {
display: flex;
justify-content: space-between;
}
}
}

View file

@ -56,7 +56,7 @@ const _DateInput: React.FC<IProps> = (props: IProps) => {
onChange={(v) => { onChange={(v) => {
props.onValueChange(v ? dateToString(v) : ""); props.onValueChange(v ? dateToString(v) : "");
}} }}
customInput={React.createElement(ShowPickerButton)} customInput={<ShowPickerButton onClick={() => {}} />}
showMonthDropdown showMonthDropdown
showYearDropdown showYearDropdown
scrollableMonthYearDropdown scrollableMonthYearDropdown

View file

@ -570,11 +570,19 @@ export const useScenesDestroy = (input: GQL.ScenesDestroyInput) =>
}); });
export const useSceneIncrementO = (id: string) => export const useSceneIncrementO = (id: string) =>
GQL.useSceneIncrementOMutation({ GQL.useSceneAddOMutation({
variables: { id }, variables: { id },
update(cache, result) { update(cache, result, { variables }) {
const updatedOCount = result.data?.sceneIncrementO; // this is not perfectly accurate, the time is set server-side
if (updatedOCount === undefined) return; // it isn't even displayed anywhere in the UI anyway
const at = new Date().toISOString();
const mutationResult = result.data?.sceneAddO;
if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { times } = variables;
const timeArray = !times ? [at] : Array.isArray(times) ? times : [times];
const scene = cache.readFragment<GQL.SlimSceneDataFragment>({ const scene = cache.readFragment<GQL.SlimSceneDataFragment>({
id: cache.identify({ __typename: "Scene", id }), id: cache.identify({ __typename: "Scene", id }),
@ -589,7 +597,7 @@ export const useSceneIncrementO = (id: string) =>
id: cache.identify(performer), id: cache.identify(performer),
fields: { fields: {
o_counter(value) { o_counter(value) {
return value + 1; return value + timeArray.length;
}, },
}, },
}); });
@ -601,8 +609,18 @@ export const useSceneIncrementO = (id: string) =>
}); });
} }
updateStats(cache, "total_o_count", 1); updateStats(cache, "total_o_count", timeArray.length);
updateO(cache, "Scene", id, updatedOCount);
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
o_history() {
return history;
},
},
});
updateO(cache, "Scene", id, history.length);
evictQueries(cache, [ evictQueries(cache, [
GQL.FindScenesDocument, // filter by o_counter GQL.FindScenesDocument, // filter by o_counter
GQL.FindPerformersDocument, // filter by o_counter GQL.FindPerformersDocument, // filter by o_counter
@ -611,11 +629,15 @@ export const useSceneIncrementO = (id: string) =>
}); });
export const useSceneDecrementO = (id: string) => export const useSceneDecrementO = (id: string) =>
GQL.useSceneDecrementOMutation({ GQL.useSceneDeleteOMutation({
variables: { id }, variables: { id },
update(cache, result) { update(cache, result, { variables }) {
const updatedOCount = result.data?.sceneDecrementO; const mutationResult = result.data?.sceneDeleteO;
if (updatedOCount === undefined) return; if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { times } = variables;
const timeArray = !times ? null : Array.isArray(times) ? times : [times];
const scene = cache.readFragment<GQL.SlimSceneDataFragment>({ const scene = cache.readFragment<GQL.SlimSceneDataFragment>({
id: cache.identify({ __typename: "Scene", id }), id: cache.identify({ __typename: "Scene", id }),
@ -630,7 +652,7 @@ export const useSceneDecrementO = (id: string) =>
id: cache.identify(performer), id: cache.identify(performer),
fields: { fields: {
o_counter(value) { o_counter(value) {
return value - 1; return value - (timeArray?.length ?? 1);
}, },
}, },
}); });
@ -642,8 +664,18 @@ export const useSceneDecrementO = (id: string) =>
}); });
} }
updateStats(cache, "total_o_count", -1); updateStats(cache, "total_o_count", -(timeArray?.length ?? 1));
updateO(cache, "Scene", id, updatedOCount);
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
o_history() {
return history;
},
},
});
updateO(cache, "Scene", id, history.length);
evictQueries(cache, [ evictQueries(cache, [
GQL.FindScenesDocument, // filter by o_counter GQL.FindScenesDocument, // filter by o_counter
GQL.FindPerformersDocument, // filter by o_counter GQL.FindPerformersDocument, // filter by o_counter
@ -694,6 +726,16 @@ export const useSceneResetO = (id: string) =>
}); });
} }
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
o_history() {
const ret: string[] = [];
return ret;
},
},
});
updateO(cache, "Scene", id, updatedOCount); updateO(cache, "Scene", id, updatedOCount);
evictQueries(cache, [ evictQueries(cache, [
GQL.FindScenesDocument, // filter by o_counter GQL.FindScenesDocument, // filter by o_counter
@ -752,7 +794,9 @@ export const mutateSceneAssignFile = (sceneID: string, fileID: string) =>
export const mutateSceneMerge = ( export const mutateSceneMerge = (
destination: string, destination: string,
source: string[], source: string[],
values: GQL.SceneUpdateInput values: GQL.SceneUpdateInput,
includeViewHistory: boolean,
includeOHistory: boolean
) => ) =>
client.mutate<GQL.SceneMergeMutation>({ client.mutate<GQL.SceneMergeMutation>({
mutation: GQL.SceneMergeDocument, mutation: GQL.SceneMergeDocument,
@ -761,6 +805,8 @@ export const mutateSceneMerge = (
source, source,
destination, destination,
values, values,
play_history: includeViewHistory,
o_history: includeOHistory,
}, },
}, },
update(cache, result) { update(cache, result) {
@ -790,7 +836,7 @@ export const useSceneSaveActivity = () =>
id: cache.identify({ __typename: "Scene", id }), id: cache.identify({ __typename: "Scene", id }),
fields: { fields: {
resume_time() { resume_time() {
return resumeTime; return resumeTime ?? null;
}, },
play_duration(value) { play_duration(value) {
return value + playDuration; return value + playDuration;
@ -809,9 +855,108 @@ export const useSceneSaveActivity = () =>
}); });
export const useSceneIncrementPlayCount = () => export const useSceneIncrementPlayCount = () =>
GQL.useSceneIncrementPlayCountMutation({ GQL.useSceneAddPlayMutation({
update(cache, result, { variables }) { update(cache, result, { variables }) {
if (!result.data?.sceneIncrementPlayCount || !variables) return; const mutationResult = result.data?.sceneAddPlay;
if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { id } = variables;
let lastPlayCount = 0;
const playCount = history.length;
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
play_count(value) {
lastPlayCount = value;
return history.length;
},
last_played_at() {
// assume only one entry - or the first is the most recent
return history[0];
},
play_history() {
return history;
},
},
});
updateStats(cache, "total_play_count", playCount - lastPlayCount);
if (lastPlayCount === 0) {
updateStats(cache, "scenes_played", 1);
}
evictQueries(cache, [
GQL.FindScenesDocument, // filter by play count
]);
},
});
export const useSceneDecrementPlayCount = () =>
GQL.useSceneDeletePlayMutation({
update(cache, result, { variables }) {
const mutationResult = result.data?.sceneDeletePlay;
if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { id, times } = variables;
const timeArray = !times ? null : Array.isArray(times) ? times : [times];
const nRemoved = timeArray?.length ?? 1;
let lastPlayCount = 0;
let lastPlayedAt: string | null = null;
const playCount = history.length;
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
play_count(value) {
lastPlayCount = value;
return playCount;
},
play_history() {
if (history.length > 0) {
lastPlayedAt = history[0];
}
return history;
},
},
});
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
last_played_at() {
return lastPlayedAt;
},
},
});
if (lastPlayCount > 0) {
updateStats(
cache,
"total_play_count",
nRemoved > lastPlayCount ? -lastPlayCount : -nRemoved
);
}
if (lastPlayCount - nRemoved <= 0) {
updateStats(cache, "scenes_played", -1);
}
evictQueries(cache, [
GQL.FindScenesDocument, // filter by play count
]);
},
});
export const useSceneResetPlayCount = () =>
GQL.useSceneResetPlayCountMutation({
update(cache, result, { variables }) {
if (!variables) return;
let lastPlayCount = 0; let lastPlayCount = 0;
cache.modify({ cache.modify({
@ -819,19 +964,23 @@ export const useSceneIncrementPlayCount = () =>
fields: { fields: {
play_count(value) { play_count(value) {
lastPlayCount = value; lastPlayCount = value;
return value + 1; return 0;
},
play_history() {
const ret: string[] = [];
return ret;
}, },
last_played_at() { last_played_at() {
// this is not perfectly accurate, the time is set server-side return null;
// it isn't even displayed anywhere in the UI anyway
return new Date().toISOString();
}, },
}, },
}); });
updateStats(cache, "total_play_count", 1); if (lastPlayCount > 0) {
if (lastPlayCount === 0) { updateStats(cache, "total_play_count", -lastPlayCount);
updateStats(cache, "scenes_played", 1); }
if (lastPlayCount > 0) {
updateStats(cache, "scenes_played", -1);
} }
evictQueries(cache, [ evictQueries(cache, [

View file

@ -58,6 +58,7 @@
| `k` | Markers tab | | `k` | Markers tab |
| `i` | File info tab | | `i` | File info tab |
| `e` | Edit tab | | `e` | Edit tab |
| `h` | History tab |
| `,` | Hide/Show sidebar | | `,` | Hide/Show sidebar |
| `.` | Hide/Show scene scrubber | | `.` | Hide/Show scene scrubber |
| `o` | Increment O-Counter | | `o` | Increment O-Counter |

View file

@ -3,6 +3,9 @@
"add": "Add", "add": "Add",
"add_directory": "Add Directory", "add_directory": "Add Directory",
"add_entity": "Add {entityType}", "add_entity": "Add {entityType}",
"add_manual_date": "Add manual date",
"add_o": "Add O",
"add_play": "Add play",
"add_to_entity": "Add to {entityType}", "add_to_entity": "Add to {entityType}",
"allow": "Allow", "allow": "Allow",
"allow_temporarily": "Allow temporarily", "allow_temporarily": "Allow temporarily",
@ -13,9 +16,11 @@
"backup": "Backup", "backup": "Backup",
"browse_for_image": "Browse for image…", "browse_for_image": "Browse for image…",
"cancel": "Cancel", "cancel": "Cancel",
"choose_date": "Choose a date",
"clean": "Clean", "clean": "Clean",
"clear": "Clear", "clear": "Clear",
"clear_back_image": "Clear back image", "clear_back_image": "Clear back image",
"clear_date_data": "Clear date data",
"clear_front_image": "Clear front image", "clear_front_image": "Clear front image",
"clear_image": "Clear Image", "clear_image": "Clear Image",
"close": "Close", "close": "Close",
@ -84,6 +89,7 @@
"reload_plugins": "Reload plugins", "reload_plugins": "Reload plugins",
"reload_scrapers": "Reload scrapers", "reload_scrapers": "Reload scrapers",
"remove": "Remove", "remove": "Remove",
"remove_date": "Remove date",
"remove_from_gallery": "Remove from Gallery", "remove_from_gallery": "Remove from Gallery",
"rename_gen_files": "Rename generated files", "rename_gen_files": "Rename generated files",
"rescan": "Rescan", "rescan": "Rescan",
@ -808,6 +814,8 @@
"details": "Details", "details": "Details",
"developmentVersion": "Development Version", "developmentVersion": "Development Version",
"dialogs": { "dialogs": {
"clear_o_history_confirm": "Are you sure you want to clear the O history?",
"clear_play_history_confirm": "Are you sure you want to clear the play history?",
"create_new_entity": "Create new {entity}", "create_new_entity": "Create new {entity}",
"delete_alert": "The following {count, plural, one {{singularEntity}} other {{pluralEntity}}} will be deleted permanently:", "delete_alert": "The following {count, plural, one {{singularEntity}} other {{pluralEntity}}} will be deleted permanently:",
"delete_confirm": "Are you sure you want to delete {entityName}?", "delete_confirm": "Are you sure you want to delete {entityName}?",
@ -1034,6 +1042,7 @@
"height": "Height", "height": "Height",
"height_cm": "Height (cm)", "height_cm": "Height (cm)",
"help": "Help", "help": "Help",
"history": "History",
"ignore_auto_tag": "Ignore Auto Tag", "ignore_auto_tag": "Ignore Auto Tag",
"image": "Image", "image": "Image",
"image_count": "Image Count", "image_count": "Image Count",
@ -1061,6 +1070,7 @@
"downloaded_from": "Downloaded From", "downloaded_from": "Downloaded From",
"hash": "Hash", "hash": "Hash",
"interactive_speed": "Interactive speed", "interactive_speed": "Interactive speed",
"o_count": "O Count",
"performer_card": { "performer_card": {
"age": "{age} {years_old}", "age": "{age} {years_old}",
"age_context": "{age} {years_old} in this scene" "age_context": "{age} {years_old} in this scene"
@ -1080,6 +1090,8 @@
"new": "New", "new": "New",
"none": "None", "none": "None",
"o_counter": "O-Counter", "o_counter": "O-Counter",
"o_history": "O History",
"odate_recorded_no": "No O Date Recorded",
"operations": "Operations", "operations": "Operations",
"organized": "Organised", "organized": "Organised",
"orientation": "Orientation", "orientation": "Orientation",
@ -1177,6 +1189,8 @@
"piercings": "Piercings", "piercings": "Piercings",
"play_count": "Play Count", "play_count": "Play Count",
"play_duration": "Play Duration", "play_duration": "Play Duration",
"play_history": "Play History",
"playdate_recorded_no": "No Play Date Recorded",
"plays": "{value} plays", "plays": "{value} plays",
"primary_file": "Primary file", "primary_file": "Primary file",
"primary_tag": "Primary Tag", "primary_tag": "Primary Tag",
@ -1401,6 +1415,7 @@
"true": "True", "true": "True",
"twitter": "Twitter", "twitter": "Twitter",
"type": "Type", "type": "Type",
"unknown_date": "Unknown date",
"updated_at": "Updated At", "updated_at": "Updated At",
"url": "URL", "url": "URL",
"urls": "URLs", "urls": "URLs",