Scene play and o-counter history view and editing (#4532)

Co-authored-by: randemgame <61895715+randemgame@users.noreply.github.com>
This commit is contained in:
WithoutPants 2024-02-22 11:28:18 +11:00 committed by GitHub
parent 0c2a2190e5
commit a303446bb7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
51 changed files with 3581 additions and 564 deletions

View file

@ -237,9 +237,15 @@ type Mutation {
scenesUpdate(input: [SceneUpdateInput!]!): [Scene]
"Increments the o-counter for a scene. Returns the new value"
sceneIncrementO(id: ID!): Int!
sceneIncrementO(id: ID!): Int! @deprecated(reason: "Use sceneAddO instead")
"Decrements the o-counter for a scene. Returns the new value"
sceneDecrementO(id: ID!): Int!
sceneDecrementO(id: ID!): Int! @deprecated(reason: "Use sceneRemoveO instead")
"Increments the o-counter for a scene. Uses the current time if none provided."
sceneAddO(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Decrements the o-counter for a scene, removing the last recorded time if specific time not provided. Returns the new value"
sceneDeleteO(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Resets the o-counter for a scene to 0. Returns the new value"
sceneResetO(id: ID!): Int!
@ -248,6 +254,14 @@ type Mutation {
"Increments the play count for the scene. Returns the new play count value."
sceneIncrementPlayCount(id: ID!): Int!
@deprecated(reason: "Use sceneAddPlay instead")
"Increments the play count for the scene. Uses the current time if none provided."
sceneAddPlay(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Decrements the play count for the scene, removing the specific times or the last recorded time if not provided."
sceneDeletePlay(id: ID!, times: [Timestamp!]): HistoryMutationResult!
"Resets the play count for a scene to 0. Returns the new play count value."
sceneResetPlayCount(id: ID!): Int!
"Generates screenshot at specified time in seconds. Leave empty to generate default screenshot"
sceneGenerateScreenshot(id: ID!, at: Float): String!

View file

@ -58,6 +58,11 @@ type Scene {
"The number ot times a scene has been played"
play_count: Int
"Times a scene was played"
play_history: [Time!]!
"Times the o counter was incremented"
o_history: [Time!]!
files: [VideoFile!]!
paths: ScenePathsType! # Resolver
scene_markers: [SceneMarker!]!
@ -118,6 +123,7 @@ input SceneUpdateInput {
# rating expressed as 1-100
rating100: Int
o_counter: Int
@deprecated(reason: "Unsupported - Use sceneIncrementO/sceneDecrementO")
organized: Boolean
studio_id: ID
gallery_ids: [ID!]
@ -134,6 +140,9 @@ input SceneUpdateInput {
play_duration: Float
"The number ot times a scene has been played"
play_count: Int
@deprecated(
reason: "Unsupported - Use sceneIncrementPlayCount/sceneDecrementPlayCount"
)
primary_file_id: ID
}
@ -251,4 +260,13 @@ input SceneMergeInput {
destination: ID!
# values defined here will override values in the destination
values: SceneUpdateInput
# if true, the source history will be combined with the destination
play_history: Boolean
o_history: Boolean
}
type HistoryMutationResult {
count: Int!
history: [Time!]!
}

View file

@ -9,7 +9,11 @@
//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID
//go:generate go run github.com/vektah/dataloaden SceneOCountLoader int int
//go:generate go run github.com/vektah/dataloaden ScenePlayCountLoader int int
//go:generate go run github.com/vektah/dataloaden SceneOHistoryLoader int []time.Time
//go:generate go run github.com/vektah/dataloaden ScenePlayHistoryLoader int []time.Time
//go:generate go run github.com/vektah/dataloaden SceneLastPlayedLoader int *time.Time
package loaders
import (
@ -34,6 +38,12 @@ const (
type Loaders struct {
SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader
ScenePlayCount *ScenePlayCountLoader
SceneOCount *SceneOCountLoader
ScenePlayHistory *ScenePlayHistoryLoader
SceneOHistory *SceneOHistoryLoader
SceneLastPlayed *SceneLastPlayedLoader
ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader
@ -109,6 +119,31 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchGalleriesFileIDs(ctx),
},
ScenePlayCount: &ScenePlayCountLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesPlayCount(ctx),
},
SceneOCount: &SceneOCountLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesOCount(ctx),
},
ScenePlayHistory: &ScenePlayHistoryLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesPlayHistory(ctx),
},
SceneLastPlayed: &SceneLastPlayedLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesLastPlayed(ctx),
},
SceneOHistory: &SceneOHistoryLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchScenesOHistory(ctx),
},
}
newCtx := context.WithValue(r.Context(), loadersCtxKey, ldrs)
@ -251,3 +286,58 @@ func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int)
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesOCount(ctx context.Context) func(keys []int) ([]int, []error) {
return func(keys []int) (ret []int, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyOCount(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesPlayCount(ctx context.Context) func(keys []int) ([]int, []error) {
return func(keys []int) (ret []int, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyViewCount(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesOHistory(ctx context.Context) func(keys []int) ([][]time.Time, []error) {
return func(keys []int) (ret [][]time.Time, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyODates(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesPlayHistory(ctx context.Context) func(keys []int) ([][]time.Time, []error) {
return func(keys []int) (ret [][]time.Time, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyViewDates(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchScenesLastPlayed(ctx context.Context) func(keys []int) ([]*time.Time, []error) {
return func(keys []int) (ret []*time.Time, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetManyLastViewed(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}

View file

@ -0,0 +1,222 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// SceneLastPlayedLoaderConfig captures the config to create a new SceneLastPlayedLoader
type SceneLastPlayedLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]*time.Time, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneLastPlayedLoader creates a new SceneLastPlayedLoader given a fetch, wait, and maxBatch
func NewSceneLastPlayedLoader(config SceneLastPlayedLoaderConfig) *SceneLastPlayedLoader {
return &SceneLastPlayedLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneLastPlayedLoader batches and caches requests
type SceneLastPlayedLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]*time.Time, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]*time.Time
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneLastPlayedLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneLastPlayedLoaderBatch struct {
keys []int
data []*time.Time
error []error
closing bool
done chan struct{}
}
// Load a Time by key, batching and caching will be applied automatically
func (l *SceneLastPlayedLoader) Load(key int) (*time.Time, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Time.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneLastPlayedLoader) LoadThunk(key int) func() (*time.Time, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*time.Time, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneLastPlayedLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*time.Time, error) {
<-batch.done
var data *time.Time
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneLastPlayedLoader) LoadAll(keys []int) ([]*time.Time, []error) {
results := make([]func() (*time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
times := make([]*time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Times.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneLastPlayedLoader) LoadAllThunk(keys []int) func() ([]*time.Time, []error) {
results := make([]func() (*time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*time.Time, []error) {
times := make([]*time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneLastPlayedLoader) Prime(key int, value *time.Time) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneLastPlayedLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneLastPlayedLoader) unsafeSet(key int, value *time.Time) {
if l.cache == nil {
l.cache = map[int]*time.Time{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneLastPlayedLoaderBatch) keyIndex(l *SceneLastPlayedLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneLastPlayedLoaderBatch) startTimer(l *SceneLastPlayedLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneLastPlayedLoaderBatch) end(l *SceneLastPlayedLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,219 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// SceneOCountLoaderConfig captures the config to create a new SceneOCountLoader
type SceneOCountLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]int, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneOCountLoader creates a new SceneOCountLoader given a fetch, wait, and maxBatch
func NewSceneOCountLoader(config SceneOCountLoaderConfig) *SceneOCountLoader {
return &SceneOCountLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneOCountLoader batches and caches requests
type SceneOCountLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]int, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]int
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneOCountLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneOCountLoaderBatch struct {
keys []int
data []int
error []error
closing bool
done chan struct{}
}
// Load a int by key, batching and caching will be applied automatically
func (l *SceneOCountLoader) Load(key int) (int, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a int.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOCountLoader) LoadThunk(key int) func() (int, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (int, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneOCountLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (int, error) {
<-batch.done
var data int
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneOCountLoader) LoadAll(keys []int) ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
// LoadAllThunk returns a function that when called will block waiting for a ints.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOCountLoader) LoadAllThunk(keys []int) func() ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]int, []error) {
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneOCountLoader) Prime(key int, value int) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneOCountLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneOCountLoader) unsafeSet(key int, value int) {
if l.cache == nil {
l.cache = map[int]int{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneOCountLoaderBatch) keyIndex(l *SceneOCountLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneOCountLoaderBatch) startTimer(l *SceneOCountLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneOCountLoaderBatch) end(l *SceneOCountLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,223 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// SceneOHistoryLoaderConfig captures the config to create a new SceneOHistoryLoader
type SceneOHistoryLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]time.Time, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewSceneOHistoryLoader creates a new SceneOHistoryLoader given a fetch, wait, and maxBatch
func NewSceneOHistoryLoader(config SceneOHistoryLoaderConfig) *SceneOHistoryLoader {
return &SceneOHistoryLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// SceneOHistoryLoader batches and caches requests
type SceneOHistoryLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]time.Time, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]time.Time
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *sceneOHistoryLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type sceneOHistoryLoaderBatch struct {
keys []int
data [][]time.Time
error []error
closing bool
done chan struct{}
}
// Load a Time by key, batching and caching will be applied automatically
func (l *SceneOHistoryLoader) Load(key int) ([]time.Time, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Time.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOHistoryLoader) LoadThunk(key int) func() ([]time.Time, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]time.Time, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &sceneOHistoryLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]time.Time, error) {
<-batch.done
var data []time.Time
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *SceneOHistoryLoader) LoadAll(keys []int) ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Times.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *SceneOHistoryLoader) LoadAllThunk(keys []int) func() ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]time.Time, []error) {
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *SceneOHistoryLoader) Prime(key int, value []time.Time) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]time.Time, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *SceneOHistoryLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *SceneOHistoryLoader) unsafeSet(key int, value []time.Time) {
if l.cache == nil {
l.cache = map[int][]time.Time{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *sceneOHistoryLoaderBatch) keyIndex(l *SceneOHistoryLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *sceneOHistoryLoaderBatch) startTimer(l *SceneOHistoryLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *sceneOHistoryLoaderBatch) end(l *SceneOHistoryLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,219 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// ScenePlayCountLoaderConfig captures the config to create a new ScenePlayCountLoader
type ScenePlayCountLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([]int, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewScenePlayCountLoader creates a new ScenePlayCountLoader given a fetch, wait, and maxBatch
func NewScenePlayCountLoader(config ScenePlayCountLoaderConfig) *ScenePlayCountLoader {
return &ScenePlayCountLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// ScenePlayCountLoader batches and caches requests
type ScenePlayCountLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([]int, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int]int
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *scenePlayCountLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type scenePlayCountLoaderBatch struct {
keys []int
data []int
error []error
closing bool
done chan struct{}
}
// Load a int by key, batching and caching will be applied automatically
func (l *ScenePlayCountLoader) Load(key int) (int, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a int.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayCountLoader) LoadThunk(key int) func() (int, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (int, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &scenePlayCountLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (int, error) {
<-batch.done
var data int
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ScenePlayCountLoader) LoadAll(keys []int) ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
// LoadAllThunk returns a function that when called will block waiting for a ints.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayCountLoader) LoadAllThunk(keys []int) func() ([]int, []error) {
results := make([]func() (int, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]int, []error) {
ints := make([]int, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ints[i], errors[i] = thunk()
}
return ints, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ScenePlayCountLoader) Prime(key int, value int) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
l.unsafeSet(key, value)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ScenePlayCountLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ScenePlayCountLoader) unsafeSet(key int, value int) {
if l.cache == nil {
l.cache = map[int]int{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *scenePlayCountLoaderBatch) keyIndex(l *ScenePlayCountLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *scenePlayCountLoaderBatch) startTimer(l *ScenePlayCountLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *scenePlayCountLoaderBatch) end(l *ScenePlayCountLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -0,0 +1,223 @@
// Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package loaders
import (
"sync"
"time"
)
// ScenePlayHistoryLoaderConfig captures the config to create a new ScenePlayHistoryLoader
type ScenePlayHistoryLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []int) ([][]time.Time, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewScenePlayHistoryLoader creates a new ScenePlayHistoryLoader given a fetch, wait, and maxBatch
func NewScenePlayHistoryLoader(config ScenePlayHistoryLoaderConfig) *ScenePlayHistoryLoader {
return &ScenePlayHistoryLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// ScenePlayHistoryLoader batches and caches requests
type ScenePlayHistoryLoader struct {
// this method provides the data for the loader
fetch func(keys []int) ([][]time.Time, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[int][]time.Time
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *scenePlayHistoryLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type scenePlayHistoryLoaderBatch struct {
keys []int
data [][]time.Time
error []error
closing bool
done chan struct{}
}
// Load a Time by key, batching and caching will be applied automatically
func (l *ScenePlayHistoryLoader) Load(key int) ([]time.Time, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a Time.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayHistoryLoader) LoadThunk(key int) func() ([]time.Time, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() ([]time.Time, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &scenePlayHistoryLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() ([]time.Time, error) {
<-batch.done
var data []time.Time
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *ScenePlayHistoryLoader) LoadAll(keys []int) ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
// LoadAllThunk returns a function that when called will block waiting for a Times.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *ScenePlayHistoryLoader) LoadAllThunk(keys []int) func() ([][]time.Time, []error) {
results := make([]func() ([]time.Time, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([][]time.Time, []error) {
times := make([][]time.Time, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
times[i], errors[i] = thunk()
}
return times, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *ScenePlayHistoryLoader) Prime(key int, value []time.Time) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := make([]time.Time, len(value))
copy(cpy, value)
l.unsafeSet(key, cpy)
}
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *ScenePlayHistoryLoader) Clear(key int) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *ScenePlayHistoryLoader) unsafeSet(key int, value []time.Time) {
if l.cache == nil {
l.cache = map[int][]time.Time{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *scenePlayHistoryLoaderBatch) keyIndex(l *ScenePlayHistoryLoader, key int) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *scenePlayHistoryLoaderBatch) startTimer(l *ScenePlayHistoryLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *scenePlayHistoryLoaderBatch) end(l *ScenePlayHistoryLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}

View file

@ -228,7 +228,7 @@ func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) {
return err
}
scenesTotalOCount, err := sceneQB.OCount(ctx)
scenesTotalOCount, err := sceneQB.GetAllOCount(ctx)
if err != nil {
return err
}
@ -243,12 +243,12 @@ func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) {
return err
}
totalPlayCount, err := sceneQB.PlayCount(ctx)
totalPlayCount, err := sceneQB.CountAllViews(ctx)
if err != nil {
return err
}
uniqueScenePlayCount, err := sceneQB.UniqueScenePlayCount(ctx)
uniqueScenePlayCount, err := sceneQB.CountUniqueViews(ctx)
if err != nil {
return err
}

View file

@ -3,6 +3,7 @@ package api
import (
"context"
"fmt"
"time"
"github.com/stashapp/stash/internal/api/loaders"
"github.com/stashapp/stash/internal/api/urlbuilders"
@ -319,3 +320,62 @@ func (r *sceneResolver) Urls(ctx context.Context, obj *models.Scene) ([]string,
return obj.URLs.List(), nil
}
func (r *sceneResolver) OCounter(ctx context.Context, obj *models.Scene) (*int, error) {
ret, err := loaders.From(ctx).SceneOCount.Load(obj.ID)
if err != nil {
return nil, err
}
return &ret, nil
}
func (r *sceneResolver) LastPlayedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
ret, err := loaders.From(ctx).SceneLastPlayed.Load(obj.ID)
if err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) PlayCount(ctx context.Context, obj *models.Scene) (*int, error) {
ret, err := loaders.From(ctx).ScenePlayCount.Load(obj.ID)
if err != nil {
return nil, err
}
return &ret, nil
}
func (r *sceneResolver) PlayHistory(ctx context.Context, obj *models.Scene) ([]*time.Time, error) {
ret, err := loaders.From(ctx).ScenePlayHistory.Load(obj.ID)
if err != nil {
return nil, err
}
// convert to pointer slice
ptrRet := make([]*time.Time, len(ret))
for i, t := range ret {
tt := t
ptrRet[i] = &tt
}
return ptrRet, nil
}
func (r *sceneResolver) OHistory(ctx context.Context, obj *models.Scene) ([]*time.Time, error) {
ret, err := loaders.From(ctx).SceneOHistory.Load(obj.ID)
if err != nil {
return nil, err
}
// convert to pointer slice
ptrRet := make([]*time.Time, len(ret))
for i, t := range ret {
tt := t
ptrRet[i] = &tt
}
return ptrRet, nil
}

View file

@ -5,9 +5,11 @@ import (
"errors"
"fmt"
"strconv"
"time"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scene"
@ -169,8 +171,15 @@ func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTr
updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.Director = translator.optionalString(input.Director, "director")
updatedScene.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedScene.OCounter = translator.optionalInt(input.OCounter, "o_counter")
updatedScene.PlayCount = translator.optionalInt(input.PlayCount, "play_count")
if input.OCounter != nil {
logger.Warnf("o_counter is deprecated and no longer supported, use sceneIncrementO/sceneDecrementO instead")
}
if input.PlayCount != nil {
logger.Warnf("play_count is deprecated and no longer supported, use sceneIncrementPlayCount/sceneDecrementPlayCount instead")
}
updatedScene.PlayDuration = translator.optionalFloat64(input.PlayDuration, "play_duration")
updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
updatedScene.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids")
@ -569,7 +578,11 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
var ret *models.Scene
if err := r.withTxn(ctx, func(ctx context.Context) error {
if err := r.Resolver.sceneService.Merge(ctx, srcIDs, destID, *values, fileDeleter); err != nil {
if err := r.Resolver.sceneService.Merge(ctx, srcIDs, destID, fileDeleter, scene.MergeOptions{
ScenePartial: *values,
IncludePlayHistory: utils.IsTrue(input.PlayHistory),
IncludeOHistory: utils.IsTrue(input.OHistory),
}); err != nil {
return err
}
@ -811,16 +824,96 @@ func (r *mutationResolver) SceneSaveActivity(ctx context.Context, id string, res
return ret, nil
}
// deprecated
func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return 0, fmt.Errorf("converting id: %w", err)
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
ret, err = qb.IncrementWatchCount(ctx, sceneID)
updatedTimes, err = qb.AddViews(ctx, sceneID, nil)
return err
}); err != nil {
return 0, err
}
return len(updatedTimes), nil
}
func (r *mutationResolver) SceneAddPlay(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
var times []time.Time
// convert time to local time, so that sorting is consistent
for _, tt := range t {
times = append(times, tt.Local())
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.AddViews(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneDeletePlay(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, err
}
var times []time.Time
for _, tt := range t {
times = append(times, *tt)
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.DeleteViews(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneResetPlayCount(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return 0, err
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
ret, err = qb.DeleteAllViews(ctx, sceneID)
return err
}); err != nil {
return 0, err
@ -829,40 +922,46 @@ func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id strin
return ret, nil
}
// deprecated
func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return 0, fmt.Errorf("converting id: %w", err)
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
ret, err = qb.IncrementOCounter(ctx, sceneID)
updatedTimes, err = qb.AddO(ctx, sceneID, nil)
return err
}); err != nil {
return 0, err
}
return ret, nil
return len(updatedTimes), nil
}
// deprecated
func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret int, err error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return 0, fmt.Errorf("converting id: %w", err)
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
ret, err = qb.DecrementOCounter(ctx, sceneID)
updatedTimes, err = qb.DeleteO(ctx, sceneID, nil)
return err
}); err != nil {
return 0, err
}
return ret, nil
return len(updatedTimes), nil
}
func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int, err error) {
@ -874,7 +973,7 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int,
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
ret, err = qb.ResetOCounter(ctx, sceneID)
ret, err = qb.ResetO(ctx, sceneID)
return err
}); err != nil {
return 0, err
@ -883,6 +982,65 @@ func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int,
return ret, nil
}
func (r *mutationResolver) SceneAddO(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
var times []time.Time
// convert time to local time, so that sorting is consistent
for _, tt := range t {
times = append(times, tt.Local())
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.AddO(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneDeleteO(ctx context.Context, id string, t []*time.Time) (*HistoryMutationResult, error) {
sceneID, err := strconv.Atoi(id)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
var times []time.Time
for _, tt := range t {
times = append(times, *tt)
}
var updatedTimes []time.Time
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
updatedTimes, err = qb.DeleteO(ctx, sceneID, times)
return err
}); err != nil {
return nil, err
}
return &HistoryMutationResult{
Count: len(updatedTimes),
History: sliceutil.ValuesToPtrs(updatedTimes),
}, nil
}
func (r *mutationResolver) SceneGenerateScreenshot(ctx context.Context, id string, at *float64) (string, error) {
if at != nil {
manager.GetInstance().GenerateScreenshot(ctx, id, *at)

View file

@ -2,14 +2,9 @@ package api
import (
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil"
)
func stashIDsSliceToPtrSlice(v []models.StashID) []*models.StashID {
ret := make([]*models.StashID, len(v))
for i, vv := range v {
c := vv
ret[i] = &c
}
return ret
return sliceutil.ValuesToPtrs(v)
}

View file

@ -11,7 +11,7 @@ import (
type SceneService interface {
Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error)
AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error
Merge(ctx context.Context, sourceIDs []int, destinationID int, values models.ScenePartial, fileDeleter *scene.FileDeleter) error
Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
}

View file

@ -527,7 +527,6 @@ func (t *ExportTask) exportScene(ctx context.Context, wg *sync.WaitGroup, jobCha
newSceneJSON.Galleries = gallery.GetRefs(galleries)
newSceneJSON.ResumeTime = s.ResumeTime
newSceneJSON.PlayCount = s.PlayCount
newSceneJSON.PlayDuration = s.PlayDuration
performers, err := performerReader.FindBySceneID(ctx, s.ID)

View file

@ -50,7 +50,10 @@ type Scene struct {
Date string `json:"date,omitempty"`
Rating int `json:"rating,omitempty"`
Organized bool `json:"organized,omitempty"`
// deprecated - for import only
OCounter int `json:"o_counter,omitempty"`
Details string `json:"details,omitempty"`
Director string `json:"director,omitempty"`
Galleries []GalleryRef `json:"galleries,omitempty"`
@ -62,9 +65,18 @@ type Scene struct {
Cover string `json:"cover,omitempty"`
CreatedAt json.JSONTime `json:"created_at,omitempty"`
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
// deprecated - for import only
LastPlayedAt json.JSONTime `json:"last_played_at,omitempty"`
ResumeTime float64 `json:"resume_time,omitempty"`
// deprecated - for import only
PlayCount int `json:"play_count,omitempty"`
PlayHistory []json.JSONTime `json:"play_history,omitempty"`
OHistory []json.JSONTime `json:"o_history,omitempty"`
PlayDuration float64 `json:"play_duration,omitempty"`
StashIDs []models.StashID `json:"stash_ids,omitempty"`
}

View file

@ -7,6 +7,8 @@ import (
models "github.com/stashapp/stash/pkg/models"
mock "github.com/stretchr/testify/mock"
time "time"
)
// SceneReaderWriter is an autogenerated mock type for the SceneReaderWriter type
@ -42,6 +44,52 @@ func (_m *SceneReaderWriter) AddGalleryIDs(ctx context.Context, sceneID int, gal
return r0
}
// AddO provides a mock function with given fields: ctx, id, dates
func (_m *SceneReaderWriter) AddO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, id, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, id, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, id, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// AddViews provides a mock function with given fields: ctx, sceneID, dates
func (_m *SceneReaderWriter) AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, sceneID, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, sceneID, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, sceneID, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// All provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) {
ret := _m.Called(ctx)
@ -100,6 +148,27 @@ func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) {
return r0, r1
}
// CountAllViews provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) CountAllViews(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// CountByFileID provides a mock function with given fields: ctx, fileID
func (_m *SceneReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) {
ret := _m.Called(ctx, fileID)
@ -247,6 +316,48 @@ func (_m *SceneReaderWriter) CountMissingOSHash(ctx context.Context) (int, error
return r0, r1
}
// CountUniqueViews provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) CountUniqueViews(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// CountViews provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) CountViews(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, id)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Create provides a mock function with given fields: ctx, newScene, fileIDs
func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error {
ret := _m.Called(ctx, newScene, fileIDs)
@ -261,8 +372,8 @@ func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene,
return r0
}
// DecrementOCounter provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) DecrementOCounter(ctx context.Context, id int) (int, error) {
// DeleteAllViews provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) DeleteAllViews(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
@ -282,6 +393,52 @@ func (_m *SceneReaderWriter) DecrementOCounter(ctx context.Context, id int) (int
return r0, r1
}
// DeleteO provides a mock function with given fields: ctx, id, dates
func (_m *SceneReaderWriter) DeleteO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, id, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, id, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, id, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// DeleteViews provides a mock function with given fields: ctx, id, dates
func (_m *SceneReaderWriter) DeleteViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
ret := _m.Called(ctx, id, dates)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int, []time.Time) []time.Time); ok {
r0 = rf(ctx, id, dates)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int, []time.Time) error); ok {
r1 = rf(ctx, id, dates)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Destroy provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) Destroy(ctx context.Context, id int) error {
ret := _m.Called(ctx, id)
@ -593,6 +750,27 @@ func (_m *SceneReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models
return r0, r1
}
// GetAllOCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) GetAllOCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetCover provides a mock function with given fields: ctx, sceneID
func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, error) {
ret := _m.Called(ctx, sceneID)
@ -685,6 +863,121 @@ func (_m *SceneReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][
return r0, r1
}
// GetManyLastViewed provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyLastViewed(ctx context.Context, ids []int) ([]*time.Time, error) {
ret := _m.Called(ctx, ids)
var r0 []*time.Time
if rf, ok := ret.Get(0).(func(context.Context, []int) []*time.Time); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyOCount provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyOCount(ctx context.Context, ids []int) ([]int, error) {
ret := _m.Called(ctx, ids)
var r0 []int
if rf, ok := ret.Get(0).(func(context.Context, []int) []int); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]int)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyODates provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyODates(ctx context.Context, ids []int) ([][]time.Time, error) {
ret := _m.Called(ctx, ids)
var r0 [][]time.Time
if rf, ok := ret.Get(0).(func(context.Context, []int) [][]time.Time); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([][]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyViewCount provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyViewCount(ctx context.Context, ids []int) ([]int, error) {
ret := _m.Called(ctx, ids)
var r0 []int
if rf, ok := ret.Get(0).(func(context.Context, []int) []int); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]int)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetManyViewDates provides a mock function with given fields: ctx, ids
func (_m *SceneReaderWriter) GetManyViewDates(ctx context.Context, ids []int) ([][]time.Time, error) {
ret := _m.Called(ctx, ids)
var r0 [][]time.Time
if rf, ok := ret.Get(0).(func(context.Context, []int) [][]time.Time); ok {
r0 = rf(ctx, ids)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([][]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok {
r1 = rf(ctx, ids)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetMovies provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) {
ret := _m.Called(ctx, id)
@ -708,6 +1001,50 @@ func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.Mo
return r0, r1
}
// GetOCount provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) GetOCount(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, id)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetODates provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetODates(ctx context.Context, relatedID int) ([]time.Time, error) {
ret := _m.Called(ctx, relatedID)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int) []time.Time); ok {
r0 = rf(ctx, relatedID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, relatedID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetPerformerIDs provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) {
ret := _m.Called(ctx, relatedID)
@ -800,6 +1137,29 @@ func (_m *SceneReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]stri
return r0, r1
}
// GetViewDates provides a mock function with given fields: ctx, relatedID
func (_m *SceneReaderWriter) GetViewDates(ctx context.Context, relatedID int) ([]time.Time, error) {
ret := _m.Called(ctx, relatedID)
var r0 []time.Time
if rf, ok := ret.Get(0).(func(context.Context, int) []time.Time); ok {
r0 = rf(ctx, relatedID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]time.Time)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, relatedID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// HasCover provides a mock function with given fields: ctx, sceneID
func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, error) {
ret := _m.Called(ctx, sceneID)
@ -821,69 +1181,6 @@ func (_m *SceneReaderWriter) HasCover(ctx context.Context, sceneID int) (bool, e
return r0, r1
}
// IncrementOCounter provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) IncrementOCounter(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, id)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, id)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// IncrementWatchCount provides a mock function with given fields: ctx, sceneID
func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, sceneID int) (int, error) {
ret := _m.Called(ctx, sceneID)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
r0 = rf(ctx, sceneID)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
r1 = rf(ctx, sceneID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// OCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) OCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// OCountByPerformerID provides a mock function with given fields: ctx, performerID
func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerID int) (int, error) {
ret := _m.Called(ctx, performerID)
@ -905,27 +1202,6 @@ func (_m *SceneReaderWriter) OCountByPerformerID(ctx context.Context, performerI
return r0, r1
}
// PlayCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) PlayCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// PlayDuration provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) PlayDuration(ctx context.Context) (float64, error) {
ret := _m.Called(ctx)
@ -991,8 +1267,8 @@ func (_m *SceneReaderWriter) QueryCount(ctx context.Context, sceneFilter *models
return r0, r1
}
// ResetOCounter provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) ResetOCounter(ctx context.Context, id int) (int, error) {
// ResetO provides a mock function with given fields: ctx, id
func (_m *SceneReaderWriter) ResetO(ctx context.Context, id int) (int, error) {
ret := _m.Called(ctx, id)
var r0 int
@ -1054,27 +1330,6 @@ func (_m *SceneReaderWriter) Size(ctx context.Context) (float64, error) {
return r0, r1
}
// UniqueScenePlayCount provides a mock function with given fields: ctx
func (_m *SceneReaderWriter) UniqueScenePlayCount(ctx context.Context) (int, error) {
ret := _m.Called(ctx)
var r0 int
if rf, ok := ret.Get(0).(func(context.Context) int); ok {
r0 = rf(ctx)
} else {
r0 = ret.Get(0).(int)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context) error); ok {
r1 = rf(ctx)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// Update provides a mock function with given fields: ctx, updatedScene
func (_m *SceneReaderWriter) Update(ctx context.Context, updatedScene *models.Scene) error {
ret := _m.Called(ctx, updatedScene)

View file

@ -19,7 +19,6 @@ type Scene struct {
// Rating expressed in 1-100 scale
Rating *int `json:"rating"`
Organized bool `json:"organized"`
OCounter int `json:"o_counter"`
StudioID *int `json:"studio_id"`
// transient - not persisted
@ -35,10 +34,8 @@ type Scene struct {
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
LastPlayedAt *time.Time `json:"last_played_at"`
ResumeTime float64 `json:"resume_time"`
PlayDuration float64 `json:"play_duration"`
PlayCount int `json:"play_count"`
URLs RelatedStrings `json:"urls"`
GalleryIDs RelatedIDs `json:"gallery_ids"`
@ -67,14 +64,11 @@ type ScenePartial struct {
// Rating expressed in 1-100 scale
Rating OptionalInt
Organized OptionalBool
OCounter OptionalInt
StudioID OptionalInt
CreatedAt OptionalTime
UpdatedAt OptionalTime
ResumeTime OptionalFloat64
PlayDuration OptionalFloat64
PlayCount OptionalInt
LastPlayedAt OptionalTime
URLs *UpdateStrings
GalleryIDs *UpdateIDs

View file

@ -1,6 +1,8 @@
package models
import "context"
import (
"context"
)
type SceneIDLoader interface {
GetSceneIDs(ctx context.Context, relatedID int) ([]int, error)

View file

@ -1,6 +1,9 @@
package models
import "context"
import (
"context"
"time"
)
// SceneGetter provides methods to get scenes by ID.
type SceneGetter interface {
@ -40,10 +43,7 @@ type SceneCounter interface {
CountByTagID(ctx context.Context, tagID int) (int, error)
CountMissingChecksum(ctx context.Context) (int, error)
CountMissingOSHash(ctx context.Context) (int, error)
OCount(ctx context.Context) (int, error)
OCountByPerformerID(ctx context.Context, performerID int) (int, error)
PlayCount(ctx context.Context) (int, error)
UniqueScenePlayCount(ctx context.Context) (int, error)
}
// SceneCreator provides methods to create scenes.
@ -68,6 +68,24 @@ type SceneCreatorUpdater interface {
SceneUpdater
}
type ViewDateReader interface {
CountViews(ctx context.Context, id int) (int, error)
CountAllViews(ctx context.Context) (int, error)
CountUniqueViews(ctx context.Context) (int, error)
GetManyViewCount(ctx context.Context, ids []int) ([]int, error)
GetViewDates(ctx context.Context, relatedID int) ([]time.Time, error)
GetManyViewDates(ctx context.Context, ids []int) ([][]time.Time, error)
GetManyLastViewed(ctx context.Context, ids []int) ([]*time.Time, error)
}
type ODateReader interface {
GetOCount(ctx context.Context, id int) (int, error)
GetManyOCount(ctx context.Context, ids []int) ([]int, error)
GetAllOCount(ctx context.Context) (int, error)
GetODates(ctx context.Context, relatedID int) ([]time.Time, error)
GetManyODates(ctx context.Context, ids []int) ([][]time.Time, error)
}
// SceneReader provides all methods to read scenes.
type SceneReader interface {
SceneFinder
@ -75,6 +93,8 @@ type SceneReader interface {
SceneCounter
URLLoader
ViewDateReader
ODateReader
FileIDLoader
GalleryIDLoader
PerformerIDLoader
@ -92,6 +112,18 @@ type SceneReader interface {
HasCover(ctx context.Context, sceneID int) (bool, error)
}
type OHistoryWriter interface {
AddO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error)
DeleteO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error)
ResetO(ctx context.Context, id int) (int, error)
}
type ViewHistoryWriter interface {
AddViews(ctx context.Context, sceneID int, dates []time.Time) ([]time.Time, error)
DeleteViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error)
DeleteAllViews(ctx context.Context, id int) (int, error)
}
// SceneWriter provides all methods to modify scenes.
type SceneWriter interface {
SceneCreator
@ -101,11 +133,10 @@ type SceneWriter interface {
AddFileID(ctx context.Context, id int, fileID FileID) error
AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error
AssignFiles(ctx context.Context, sceneID int, fileID []FileID) error
IncrementOCounter(ctx context.Context, id int) (int, error)
DecrementOCounter(ctx context.Context, id int) (int, error)
ResetOCounter(ctx context.Context, id int) (int, error)
OHistoryWriter
ViewHistoryWriter
SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error)
IncrementWatchCount(ctx context.Context, sceneID int) (int, error)
}
// SceneReaderWriter provides all scene methods.

View file

@ -14,7 +14,9 @@ import (
"github.com/stashapp/stash/pkg/utils"
)
type CoverGetter interface {
type ExportGetter interface {
models.ViewDateReader
models.ODateReader
GetCover(ctx context.Context, sceneID int) ([]byte, error)
}
@ -27,7 +29,7 @@ type TagFinder interface {
// ToBasicJSON converts a scene object into its JSON object equivalent. It
// does not convert the relationships to other objects, with the exception
// of cover image.
func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (*jsonschema.Scene, error) {
func ToBasicJSON(ctx context.Context, reader ExportGetter, scene *models.Scene) (*jsonschema.Scene, error) {
newSceneJSON := jsonschema.Scene{
Title: scene.Title,
Code: scene.Code,
@ -47,7 +49,6 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
}
newSceneJSON.Organized = scene.Organized
newSceneJSON.OCounter = scene.OCounter
for _, f := range scene.Files.List() {
newSceneJSON.Files = append(newSceneJSON.Files, f.Base().Path)
@ -73,6 +74,24 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) (
newSceneJSON.StashIDs = ret
dates, err := reader.GetViewDates(ctx, scene.ID)
if err != nil {
return nil, fmt.Errorf("error getting view dates: %v", err)
}
for _, date := range dates {
newSceneJSON.PlayHistory = append(newSceneJSON.PlayHistory, json.JSONTime{Time: date})
}
odates, err := reader.GetODates(ctx, scene.ID)
if err != nil {
return nil, fmt.Errorf("error getting o dates: %v", err)
}
for _, date := range odates {
newSceneJSON.OHistory = append(newSceneJSON.OHistory, json.JSONTime{Time: date})
}
return &newSceneJSON, nil
}

View file

@ -8,6 +8,7 @@ import (
"github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"testing"
"time"
@ -40,7 +41,6 @@ var (
date = "2001-01-01"
dateObj, _ = models.ParseDate(date)
rating = 5
ocounter = 2
organized = true
details = "details"
)
@ -88,7 +88,6 @@ func createFullScene(id int) models.Scene {
Title: title,
Date: &dateObj,
Details: details,
OCounter: ocounter,
Rating: &rating,
Organized: organized,
URLs: models.NewRelatedStrings([]string{url}),
@ -130,7 +129,6 @@ func createFullJSONScene(image string) *jsonschema.Scene {
Files: []string{path},
Date: date,
Details: details,
OCounter: ocounter,
Rating: rating,
Organized: organized,
URLs: []string{url},
@ -193,6 +191,8 @@ func TestToJSON(t *testing.T) {
db.Scene.On("GetCover", testCtx, sceneID).Return(imageBytes, nil).Once()
db.Scene.On("GetCover", testCtx, noImageID).Return(nil, nil).Once()
db.Scene.On("GetCover", testCtx, errImageID).Return(nil, imageErr).Once()
db.Scene.On("GetViewDates", testCtx, mock.Anything).Return(nil, nil)
db.Scene.On("GetODates", testCtx, mock.Anything).Return(nil, nil)
for i, s := range scenarios {
scene := s.input

View file

@ -4,8 +4,10 @@ import (
"context"
"fmt"
"strings"
"time"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/json"
"github.com/stashapp/stash/pkg/models/jsonschema"
"github.com/stashapp/stash/pkg/sliceutil"
"github.com/stashapp/stash/pkg/utils"
@ -13,6 +15,8 @@ import (
type ImporterReaderWriter interface {
models.SceneCreatorUpdater
models.ViewHistoryWriter
models.OHistoryWriter
FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error)
}
@ -31,6 +35,8 @@ type Importer struct {
ID int
scene models.Scene
coverImageData []byte
viewHistory []time.Time
oHistory []time.Time
}
func (i *Importer) PreImport(ctx context.Context) error {
@ -68,6 +74,9 @@ func (i *Importer) PreImport(ctx context.Context) error {
}
}
i.populateViewHistory()
i.populateOHistory()
return nil
}
@ -101,20 +110,54 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
}
newScene.Organized = sceneJSON.Organized
newScene.OCounter = sceneJSON.OCounter
newScene.CreatedAt = sceneJSON.CreatedAt.GetTime()
newScene.UpdatedAt = sceneJSON.UpdatedAt.GetTime()
if !sceneJSON.LastPlayedAt.IsZero() {
t := sceneJSON.LastPlayedAt.GetTime()
newScene.LastPlayedAt = &t
}
newScene.ResumeTime = sceneJSON.ResumeTime
newScene.PlayDuration = sceneJSON.PlayDuration
newScene.PlayCount = sceneJSON.PlayCount
return newScene
}
func getHistory(historyJSON []json.JSONTime, count int, last json.JSONTime, createdAt json.JSONTime) []time.Time {
var ret []time.Time
if len(historyJSON) > 0 {
for _, d := range historyJSON {
ret = append(ret, d.GetTime())
}
} else if count > 0 {
createdAt := createdAt.GetTime()
for j := 0; j < count; j++ {
t := createdAt
if j+1 == count && !last.IsZero() {
// last one, use last play date
t = last.GetTime()
}
ret = append(ret, t)
}
}
return ret
}
func (i *Importer) populateViewHistory() {
i.viewHistory = getHistory(
i.Input.PlayHistory,
i.Input.PlayCount,
i.Input.LastPlayedAt,
i.Input.CreatedAt,
)
}
func (i *Importer) populateOHistory() {
i.viewHistory = getHistory(
i.Input.OHistory,
i.Input.OCounter,
i.Input.CreatedAt, // no last o count date
i.Input.CreatedAt,
)
}
func (i *Importer) populateFiles(ctx context.Context) error {
files := make([]*models.VideoFile, 0)
@ -365,6 +408,28 @@ func (i *Importer) populateTags(ctx context.Context) error {
return nil
}
func (i *Importer) addViewHistory(ctx context.Context) error {
if len(i.viewHistory) > 0 {
_, err := i.ReaderWriter.AddViews(ctx, i.ID, i.viewHistory)
if err != nil {
return fmt.Errorf("error adding view date: %v", err)
}
}
return nil
}
func (i *Importer) addOHistory(ctx context.Context) error {
if len(i.oHistory) > 0 {
_, err := i.ReaderWriter.AddO(ctx, i.ID, i.oHistory)
if err != nil {
return fmt.Errorf("error adding o date: %v", err)
}
}
return nil
}
func (i *Importer) PostImport(ctx context.Context, id int) error {
if len(i.coverImageData) > 0 {
if err := i.ReaderWriter.UpdateCover(ctx, id, i.coverImageData); err != nil {
@ -372,6 +437,15 @@ func (i *Importer) PostImport(ctx context.Context, id int) error {
}
}
// add histories
if err := i.addViewHistory(ctx); err != nil {
return err
}
if err := i.addOHistory(ctx); err != nil {
return err
}
return nil
}

View file

@ -6,6 +6,7 @@ import (
"fmt"
"os"
"path/filepath"
"time"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger"
@ -14,13 +15,15 @@ import (
"github.com/stashapp/stash/pkg/txn"
)
func (s *Service) Merge(
ctx context.Context,
sourceIDs []int,
destinationID int,
scenePartial models.ScenePartial,
fileDeleter *FileDeleter,
) error {
type MergeOptions struct {
ScenePartial models.ScenePartial
IncludePlayHistory bool
IncludeOHistory bool
}
func (s *Service) Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *FileDeleter, options MergeOptions) error {
scenePartial := options.ScenePartial
// ensure source ids are unique
sourceIDs = sliceutil.AppendUniques(nil, sourceIDs)
@ -74,6 +77,44 @@ func (s *Service) Merge(
return fmt.Errorf("updating scene: %w", err)
}
// merge play history
if options.IncludePlayHistory {
var allDates []time.Time
for _, src := range sources {
thisDates, err := s.Repository.GetViewDates(ctx, src.ID)
if err != nil {
return fmt.Errorf("getting view dates for scene %d: %w", src.ID, err)
}
allDates = append(allDates, thisDates...)
}
if len(allDates) > 0 {
if _, err := s.Repository.AddViews(ctx, destinationID, allDates); err != nil {
return fmt.Errorf("adding view dates to scene %d: %w", destinationID, err)
}
}
}
// merge o history
if options.IncludeOHistory {
var allDates []time.Time
for _, src := range sources {
thisDates, err := s.Repository.GetODates(ctx, src.ID)
if err != nil {
return fmt.Errorf("getting o dates for scene %d: %w", src.ID, err)
}
allDates = append(allDates, thisDates...)
}
if len(allDates) > 0 {
if _, err := s.Repository.AddO(ctx, destinationID, allDates); err != nil {
return fmt.Errorf("adding o dates to scene %d: %w", destinationID, err)
}
}
}
// delete old scenes
for _, src := range sources {
const deleteGenerated = true

View file

@ -153,3 +153,20 @@ func Map[T any, V any](vs []T, f func(T) V) []V {
}
return ret
}
func PtrsToValues[T any](vs []*T) []T {
ret := make([]T, len(vs))
for i, v := range vs {
ret[i] = *v
}
return ret
}
func ValuesToPtrs[T any](vs []T) []*T {
ret := make([]*T, len(vs))
for i, v := range vs {
vv := v
ret[i] = &vv
}
return ret
}

View file

@ -33,7 +33,7 @@ const (
dbConnTimeout = 30
)
var appSchemaVersion uint = 54
var appSchemaVersion uint = 55
//go:embed migrations/*.sql
var migrationsBox embed.FS

View file

@ -777,28 +777,6 @@ func (m *countCriterionHandlerBuilder) handler(criterion *models.IntCriterionInp
}
}
type joinedMultiSumCriterionHandlerBuilder struct {
primaryTable string
foreignTable1 string
joinTable1 string
foreignTable2 string
joinTable2 string
primaryFK string
foreignFK1 string
foreignFK2 string
sum string
}
func (m *joinedMultiSumCriterionHandlerBuilder) handler(criterion *models.IntCriterionInput) criterionHandlerFunc {
return func(ctx context.Context, f *filterBuilder) {
if criterion != nil {
clause, args := getJoinedMultiSumCriterionClause(m.primaryTable, m.foreignTable1, m.joinTable1, m.foreignTable2, m.joinTable2, m.primaryFK, m.foreignFK1, m.foreignFK2, m.sum, *criterion)
f.addWhere(clause, args...)
}
}
}
// handler for StringCriterion for string list fields
type stringListCriterionHandlerBuilder struct {
// table joining primary and foreign objects

95
pkg/sqlite/history.go Normal file
View file

@ -0,0 +1,95 @@
package sqlite
import (
"context"
"time"
)
type viewDateManager struct {
tableMgr *viewHistoryTable
}
func (qb *viewDateManager) GetViewDates(ctx context.Context, id int) ([]time.Time, error) {
return qb.tableMgr.getDates(ctx, id)
}
func (qb *viewDateManager) GetManyViewDates(ctx context.Context, ids []int) ([][]time.Time, error) {
return qb.tableMgr.getManyDates(ctx, ids)
}
func (qb *viewDateManager) CountViews(ctx context.Context, id int) (int, error) {
return qb.tableMgr.getCount(ctx, id)
}
func (qb *viewDateManager) GetManyViewCount(ctx context.Context, ids []int) ([]int, error) {
return qb.tableMgr.getManyCount(ctx, ids)
}
func (qb *viewDateManager) CountAllViews(ctx context.Context) (int, error) {
return qb.tableMgr.getAllCount(ctx)
}
func (qb *viewDateManager) CountUniqueViews(ctx context.Context) (int, error) {
return qb.tableMgr.getUniqueCount(ctx)
}
func (qb *viewDateManager) LastView(ctx context.Context, id int) (*time.Time, error) {
return qb.tableMgr.getLastDate(ctx, id)
}
func (qb *viewDateManager) GetManyLastViewed(ctx context.Context, ids []int) ([]*time.Time, error) {
return qb.tableMgr.getManyLastDate(ctx, ids)
}
func (qb *viewDateManager) AddViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.addDates(ctx, id, dates)
}
func (qb *viewDateManager) DeleteViews(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.deleteDates(ctx, id, dates)
}
func (qb *viewDateManager) DeleteAllViews(ctx context.Context, id int) (int, error) {
return qb.tableMgr.deleteAllDates(ctx, id)
}
type oDateManager struct {
tableMgr *viewHistoryTable
}
func (qb *oDateManager) GetODates(ctx context.Context, id int) ([]time.Time, error) {
return qb.tableMgr.getDates(ctx, id)
}
func (qb *oDateManager) GetManyODates(ctx context.Context, ids []int) ([][]time.Time, error) {
return qb.tableMgr.getManyDates(ctx, ids)
}
func (qb *oDateManager) GetOCount(ctx context.Context, id int) (int, error) {
return qb.tableMgr.getCount(ctx, id)
}
func (qb *oDateManager) GetManyOCount(ctx context.Context, ids []int) ([]int, error) {
return qb.tableMgr.getManyCount(ctx, ids)
}
func (qb *oDateManager) GetAllOCount(ctx context.Context) (int, error) {
return qb.tableMgr.getAllCount(ctx)
}
func (qb *oDateManager) GetUniqueOCount(ctx context.Context) (int, error) {
return qb.tableMgr.getUniqueCount(ctx)
}
func (qb *oDateManager) AddO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.addDates(ctx, id, dates)
}
func (qb *oDateManager) DeleteO(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
return qb.tableMgr.deleteDates(ctx, id, dates)
}
func (qb *oDateManager) ResetO(ctx context.Context, id int) (int, error) {
return qb.tableMgr.deleteAllDates(ctx, id)
}

View file

@ -17,7 +17,7 @@ import (
"github.com/doug-martin/goqu/v9/exp"
)
var imageTable = "images"
const imageTable = "images"
const (
imageIDColumn = "image_id"

View file

@ -0,0 +1,111 @@
PRAGMA foreign_keys=OFF;
CREATE TABLE `scenes_view_dates` (
`scene_id` integer,
`view_date` datetime not null,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);
CREATE TABLE `scenes_o_dates` (
`scene_id` integer,
`o_date` datetime not null,
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);
-- drop o_counter, play_count and last_played_at
CREATE TABLE "scenes_new" (
`id` integer not null primary key autoincrement,
`title` varchar(255),
`details` text,
`date` date,
`rating` tinyint,
`studio_id` integer,
`organized` boolean not null default '0',
`created_at` datetime not null,
`updated_at` datetime not null,
`code` text,
`director` text,
`resume_time` float not null default 0,
`play_duration` float not null default 0,
`cover_blob` varchar(255) REFERENCES `blobs`(`checksum`),
foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL
);
INSERT INTO `scenes_new`
(
`id`,
`title`,
`details`,
`date`,
`rating`,
`studio_id`,
`organized`,
`created_at`,
`updated_at`,
`code`,
`director`,
`resume_time`,
`play_duration`,
`cover_blob`
)
SELECT
`id`,
`title`,
`details`,
`date`,
`rating`,
`studio_id`,
`organized`,
`created_at`,
`updated_at`,
`code`,
`director`,
`resume_time`,
`play_duration`,
`cover_blob`
FROM `scenes`;
WITH max_view_count AS (
SELECT MAX(play_count) AS max_count
FROM scenes
), numbers AS (
SELECT 1 AS n
FROM max_view_count
UNION ALL
SELECT n + 1
FROM numbers
WHERE n < (SELECT max_count FROM max_view_count)
)
INSERT INTO scenes_view_dates (scene_id, view_date)
SELECT scenes.id,
CASE
WHEN numbers.n = scenes.play_count THEN COALESCE(scenes.last_played_at, scenes.created_at)
ELSE scenes.created_at
END AS view_date
FROM scenes
JOIN numbers
WHERE numbers.n <= scenes.play_count;
WITH numbers AS (
SELECT 1 AS n
UNION ALL
SELECT n + 1
FROM numbers
WHERE n < (SELECT MAX(o_counter) FROM scenes)
)
INSERT INTO scenes_o_dates (scene_id, o_date)
SELECT scenes.id,
CASE
WHEN numbers.n <= scenes.o_counter THEN scenes.created_at
END AS o_date
FROM scenes
CROSS JOIN numbers
WHERE numbers.n <= scenes.o_counter;
DROP INDEX `index_scenes_on_studio_id`;
DROP TABLE `scenes`;
ALTER TABLE `scenes_new` rename to `scenes`;
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
PRAGMA foreign_keys=ON;

View file

@ -0,0 +1,71 @@
package migrations
import (
"context"
"fmt"
"github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/sqlite"
)
type schema55Migrator struct {
migrator
}
func post55(ctx context.Context, db *sqlx.DB) error {
logger.Info("Running post-migration for schema version 55")
m := schema55Migrator{
migrator: migrator{
db: db,
},
}
return m.migrate(ctx)
}
func (m *schema55Migrator) migrate(ctx context.Context) error {
// the last_played_at column was storing in a different format than the rest of the timestamps
// convert the play history date to the correct format
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
query := "SELECT DISTINCT `scene_id`, `view_date` FROM `scenes_view_dates`"
rows, err := m.db.Query(query)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var (
id int
viewDate sqlite.Timestamp
)
err := rows.Scan(&id, &viewDate)
if err != nil {
return err
}
utcTimestamp := sqlite.UTCTimestamp{
Timestamp: viewDate,
}
// convert the timestamp to the correct format
if _, err := m.db.Exec("UPDATE scenes_view_dates SET view_date = ? WHERE view_date = ?", utcTimestamp, viewDate.Timestamp); err != nil {
return fmt.Errorf("error correcting view date %s to %s: %w", viewDate.Timestamp, viewDate, err)
}
}
return rows.Err()
}); err != nil {
return err
}
return nil
}
func init() {
sqlite.RegisterPostMigration(55, post55)
}

View file

@ -847,20 +847,44 @@ func performerGalleryCountCriterionHandler(qb *PerformerStore, count *models.Int
return h.handler(count)
}
// used for sorting and filtering on performer o-count
var selectPerformerOCountSQL = utils.StrFormat(
"SELECT SUM(o_counter) "+
"FROM ("+
"SELECT SUM(o_counter) as o_counter from {performers_images} s "+
"LEFT JOIN {images} ON {images}.id = s.{images_id} "+
"WHERE s.{performer_id} = {performers}.id "+
"UNION ALL "+
"SELECT COUNT({scenes_o_dates}.{o_date}) as o_counter from {performers_scenes} s "+
"LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+
"LEFT JOIN {scenes_o_dates} ON {scenes_o_dates}.{scene_id} = {scenes}.id "+
"WHERE s.{performer_id} = {performers}.id "+
")",
map[string]interface{}{
"performers_images": performersImagesTable,
"images": imageTable,
"performer_id": performerIDColumn,
"images_id": imageIDColumn,
"performers": performerTable,
"performers_scenes": performersScenesTable,
"scenes": sceneTable,
"scene_id": sceneIDColumn,
"scenes_o_dates": scenesODatesTable,
"o_date": sceneODateColumn,
},
)
func performerOCounterCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc {
h := joinedMultiSumCriterionHandlerBuilder{
primaryTable: performerTable,
foreignTable1: sceneTable,
joinTable1: performersScenesTable,
foreignTable2: imageTable,
joinTable2: performersImagesTable,
primaryFK: performerIDColumn,
foreignFK1: sceneIDColumn,
foreignFK2: imageIDColumn,
sum: "o_counter",
return func(ctx context.Context, f *filterBuilder) {
if count == nil {
return
}
return h.handler(count)
lhs := "(" + selectPerformerOCountSQL + ")"
clause, args := getIntCriterionWhereClause(lhs, *count)
f.addWhere(clause, args...)
}
}
func performerStudiosCriterionHandler(qb *PerformerStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc {
@ -998,6 +1022,11 @@ func performerAppearsWithCriterionHandler(qb *PerformerStore, performers *models
}
}
func (qb *PerformerStore) sortByOCounter(direction string) string {
// need to sum the o_counter from scenes and images
return " ORDER BY (" + selectPerformerOCountSQL + ") " + direction
}
func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) string {
var sort string
var direction string
@ -1019,12 +1048,11 @@ func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) st
sortQuery += getCountSort(performerTable, performersImagesTable, performerIDColumn, direction)
case "galleries_count":
sortQuery += getCountSort(performerTable, performersGalleriesTable, performerIDColumn, direction)
case "o_counter":
sortQuery += qb.sortByOCounter(direction)
default:
sortQuery += getSort(sort, direction, "performers")
}
if sort == "o_counter" {
return getMultiSumSort("o_counter", performerTable, sceneTable, performersScenesTable, imageTable, performersImagesTable, performerIDColumn, sceneIDColumn, imageIDColumn, direction)
}
// Whatever the sorting, always use name/id as a final sort
sortQuery += ", COALESCE(performers.name, performers.id) COLLATE NATURAL_CI ASC"

View file

@ -93,6 +93,7 @@ func (r *updateRecord) setTimestamp(destField string, v models.OptionalTime) {
}
}
//nolint:golint,unused
func (r *updateRecord) setNullTimestamp(destField string, v models.OptionalTime) {
if v.Set {
r.set(destField, NullTimestampFromTimePtr(v.Ptr()))

View file

@ -447,6 +447,14 @@ type relatedFileRow struct {
Primary bool `db:"primary"`
}
func idToIndexMap(ids []int) map[int]int {
ret := make(map[int]int)
for i, id := range ids {
ret[id] = i
}
return ret
}
func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]models.FileID, error) {
var primaryClause string
if primaryOnly {
@ -476,10 +484,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo
}
ret := make([][]models.FileID, len(ids))
idToIndex := make(map[int]int)
for i, id := range ids {
idToIndex[id] = i
}
idToIndex := idToIndexMap(ids)
for _, row := range fileRows {
id := row.ID

View file

@ -9,7 +9,6 @@ import (
"sort"
"strconv"
"strings"
"time"
"github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp"
@ -32,6 +31,10 @@ const (
moviesScenesTable = "movies_scenes"
scenesURLsTable = "scene_urls"
sceneURLColumn = "url"
scenesViewDatesTable = "scenes_view_dates"
sceneViewDateColumn = "view_date"
scenesODatesTable = "scenes_o_dates"
sceneODateColumn = "o_date"
sceneCoverBlobColumn = "cover_blob"
)
@ -81,14 +84,11 @@ type sceneRow struct {
// expressed as 1-100
Rating null.Int `db:"rating"`
Organized bool `db:"organized"`
OCounter int `db:"o_counter"`
StudioID null.Int `db:"studio_id,omitempty"`
CreatedAt Timestamp `db:"created_at"`
UpdatedAt Timestamp `db:"updated_at"`
LastPlayedAt NullTimestamp `db:"last_played_at"`
ResumeTime float64 `db:"resume_time"`
PlayDuration float64 `db:"play_duration"`
PlayCount int `db:"play_count"`
// not used in resolutions or updates
CoverBlob zero.String `db:"cover_blob"`
@ -103,14 +103,11 @@ func (r *sceneRow) fromScene(o models.Scene) {
r.Date = NullDateFromDatePtr(o.Date)
r.Rating = intFromPtr(o.Rating)
r.Organized = o.Organized
r.OCounter = o.OCounter
r.StudioID = intFromPtr(o.StudioID)
r.CreatedAt = Timestamp{Timestamp: o.CreatedAt}
r.UpdatedAt = Timestamp{Timestamp: o.UpdatedAt}
r.LastPlayedAt = NullTimestampFromTimePtr(o.LastPlayedAt)
r.ResumeTime = o.ResumeTime
r.PlayDuration = o.PlayDuration
r.PlayCount = o.PlayCount
}
type sceneQueryRow struct {
@ -132,7 +129,6 @@ func (r *sceneQueryRow) resolve() *models.Scene {
Date: r.Date.DatePtr(),
Rating: nullIntPtr(r.Rating),
Organized: r.Organized,
OCounter: r.OCounter,
StudioID: nullIntPtr(r.StudioID),
PrimaryFileID: nullIntFileIDPtr(r.PrimaryFileID),
@ -142,10 +138,8 @@ func (r *sceneQueryRow) resolve() *models.Scene {
CreatedAt: r.CreatedAt.Timestamp,
UpdatedAt: r.UpdatedAt.Timestamp,
LastPlayedAt: r.LastPlayedAt.TimePtr(),
ResumeTime: r.ResumeTime,
PlayDuration: r.PlayDuration,
PlayCount: r.PlayCount,
}
if r.PrimaryFileFolderPath.Valid && r.PrimaryFileBasename.Valid {
@ -167,14 +161,11 @@ func (r *sceneRowRecord) fromPartial(o models.ScenePartial) {
r.setNullDate("date", o.Date)
r.setNullInt("rating", o.Rating)
r.setBool("organized", o.Organized)
r.setInt("o_counter", o.OCounter)
r.setNullInt("studio_id", o.StudioID)
r.setTimestamp("created_at", o.CreatedAt)
r.setTimestamp("updated_at", o.UpdatedAt)
r.setNullTimestamp("last_played_at", o.LastPlayedAt)
r.setFloat64("resume_time", o.ResumeTime)
r.setFloat64("play_duration", o.PlayDuration)
r.setInt("play_count", o.PlayCount)
}
type SceneStore struct {
@ -182,7 +173,8 @@ type SceneStore struct {
blobJoinQueryBuilder
tableMgr *table
oCounterManager
oDateManager
viewDateManager
fileStore *FileStore
}
@ -199,7 +191,8 @@ func NewSceneStore(fileStore *FileStore, blobStore *BlobStore) *SceneStore {
},
tableMgr: sceneTableMgr,
oCounterManager: oCounterManager{sceneTableMgr},
viewDateManager: viewDateManager{scenesViewTableMgr},
oDateManager: oDateManager{scenesOTableMgr},
fileStore: fileStore,
}
}
@ -710,20 +703,18 @@ func (qb *SceneStore) CountByPerformerID(ctx context.Context, performerID int) (
func (qb *SceneStore) OCountByPerformerID(ctx context.Context, performerID int) (int, error) {
table := qb.table()
joinTable := scenesPerformersJoinTable
oHistoryTable := goqu.T(scenesODatesTable)
q := dialect.Select(goqu.COALESCE(goqu.SUM("o_counter"), 0)).From(table).InnerJoin(joinTable, goqu.On(table.Col(idColumn).Eq(joinTable.Col(sceneIDColumn)))).Where(joinTable.Col(performerIDColumn).Eq(performerID))
var ret int
if err := querySimple(ctx, q, &ret); err != nil {
return 0, err
}
q := dialect.Select(goqu.COUNT("*")).From(table).InnerJoin(
oHistoryTable,
goqu.On(table.Col(idColumn).Eq(oHistoryTable.Col(sceneIDColumn))),
).InnerJoin(
joinTable,
goqu.On(
table.Col(idColumn).Eq(joinTable.Col(sceneIDColumn)),
),
).Where(joinTable.Col(performerIDColumn).Eq(performerID))
return ret, nil
}
func (qb *SceneStore) OCount(ctx context.Context) (int, error) {
table := qb.table()
q := dialect.Select(goqu.COALESCE(goqu.SUM("o_counter"), 0)).From(table)
var ret int
if err := querySimple(ctx, q, &ret); err != nil {
return 0, err
@ -757,24 +748,6 @@ func (qb *SceneStore) Count(ctx context.Context) (int, error) {
return count(ctx, q)
}
func (qb *SceneStore) PlayCount(ctx context.Context) (int, error) {
q := dialect.Select(goqu.COALESCE(goqu.SUM("play_count"), 0)).From(qb.table())
var ret int
if err := querySimple(ctx, q, &ret); err != nil {
return 0, err
}
return ret, nil
}
func (qb *SceneStore) UniqueScenePlayCount(ctx context.Context) (int, error) {
table := qb.table()
q := dialect.Select(goqu.COUNT("*")).From(table).Where(table.Col("play_count").Gt(0))
return count(ctx, q)
}
func (qb *SceneStore) Size(ctx context.Context) (float64, error) {
table := qb.table()
fileTable := fileTableMgr.table
@ -977,7 +950,7 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF
query.handleCriterion(ctx, scenePhashDistanceCriterionHandler(qb, sceneFilter.PhashDistance))
query.handleCriterion(ctx, intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil))
query.handleCriterion(ctx, intCriterionHandler(sceneFilter.OCounter, "scenes.o_counter", nil))
query.handleCriterion(ctx, sceneOCountCriterionHandler(sceneFilter.OCounter))
query.handleCriterion(ctx, boolCriterionHandler(sceneFilter.Organized, "scenes.organized", nil))
query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.Duration, "video_files.duration", qb.addVideoFilesTable))
@ -1011,7 +984,7 @@ func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneF
query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.ResumeTime, "scenes.resume_time", nil))
query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.PlayDuration, "scenes.play_duration", nil))
query.handleCriterion(ctx, intCriterionHandler(sceneFilter.PlayCount, "scenes.play_count", nil))
query.handleCriterion(ctx, scenePlayCountCriterionHandler(sceneFilter.PlayCount))
query.handleCriterion(ctx, sceneTagsCriterionHandler(qb, sceneFilter.Tags))
query.handleCriterion(ctx, sceneTagCountCriterionHandler(qb, sceneFilter.TagCount))
@ -1194,6 +1167,26 @@ func (qb *SceneStore) QueryCount(ctx context.Context, sceneFilter *models.SceneF
return query.executeCount(ctx)
}
func scenePlayCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc {
h := countCriterionHandlerBuilder{
primaryTable: sceneTable,
joinTable: scenesViewDatesTable,
primaryFK: sceneIDColumn,
}
return h.handler(count)
}
func sceneOCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc {
h := countCriterionHandlerBuilder{
primaryTable: sceneTable,
joinTable: scenesODatesTable,
primaryFK: sceneIDColumn,
}
return h.handler(count)
}
func sceneFileCountCriterionHandler(qb *SceneStore, fileCount *models.IntCriterionInput) criterionHandlerFunc {
h := countCriterionHandlerBuilder{
primaryTable: sceneTable,
@ -1600,8 +1593,11 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
addFolderTable()
query.sortAndPagination += " ORDER BY COALESCE(scenes.title, files.basename) COLLATE NATURAL_CI " + direction + ", folders.path COLLATE NATURAL_CI " + direction
case "play_count":
// handle here since getSort has special handling for _count suffix
query.sortAndPagination += " ORDER BY scenes.play_count " + direction
query.sortAndPagination += getCountSort(sceneTable, scenesViewDatesTable, sceneIDColumn, direction)
case "last_played_at":
query.sortAndPagination += fmt.Sprintf(" ORDER BY (SELECT MAX(view_date) FROM %s AS sort WHERE sort.%s = %s.id) %s", scenesViewDatesTable, sceneIDColumn, sceneTable, getSortDirection(direction))
case "o_counter":
query.sortAndPagination += getCountSort(sceneTable, scenesODatesTable, sceneIDColumn, direction)
default:
query.sortAndPagination += getSort(sort, direction, "scenes")
}
@ -1610,23 +1606,6 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF
query.sortAndPagination += ", COALESCE(scenes.title, scenes.id) COLLATE NATURAL_CI ASC"
}
func (qb *SceneStore) getPlayCount(ctx context.Context, id int) (int, error) {
q := dialect.From(qb.tableMgr.table).Select("play_count").Where(goqu.Ex{"id": id})
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (qb *SceneStore) SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) {
if err := qb.tableMgr.checkIDExists(ctx, id); err != nil {
return false, err
@ -1651,21 +1630,6 @@ func (qb *SceneStore) SaveActivity(ctx context.Context, id int, resumeTime *floa
return true, nil
}
func (qb *SceneStore) IncrementWatchCount(ctx context.Context, id int) (int, error) {
if err := qb.tableMgr.checkIDExists(ctx, id); err != nil {
return 0, err
}
if err := qb.tableMgr.updateByID(ctx, id, goqu.Record{
"play_count": goqu.L("play_count + 1"),
"last_played_at": time.Now(),
}); err != nil {
return 0, err
}
return qb.getPlayCount(ctx, id)
}
func (qb *SceneStore) GetURLs(ctx context.Context, sceneID int) ([]string, error) {
return scenesURLsTableMgr.get(ctx, sceneID)
}

View file

@ -82,10 +82,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
director = "director"
url = "url"
rating = 60
ocounter = 5
lastPlayedAt = time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC)
resumeTime = 10.0
playCount = 3
playDuration = 34.0
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@ -117,7 +114,6 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Date: &date,
Rating: &rating,
Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene],
CreatedAt: createdAt,
UpdatedAt: updatedAt,
@ -144,9 +140,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Endpoint: endpoint2,
},
}),
LastPlayedAt: &lastPlayedAt,
ResumeTime: float64(resumeTime),
PlayCount: playCount,
PlayDuration: playDuration,
},
false,
@ -162,7 +156,6 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Date: &date,
Rating: &rating,
Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene],
Files: models.NewRelatedVideoFiles([]*models.VideoFile{
videoFile.(*models.VideoFile),
@ -192,9 +185,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
Endpoint: endpoint2,
},
}),
LastPlayedAt: &lastPlayedAt,
ResumeTime: resumeTime,
PlayCount: playCount,
PlayDuration: playDuration,
},
false,
@ -321,10 +312,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
director = "director"
url = "url"
rating = 60
ocounter = 5
lastPlayedAt = time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC)
resumeTime = 10.0
playCount = 3
playDuration = 34.0
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@ -355,7 +343,6 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
Date: &date,
Rating: &rating,
Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene],
CreatedAt: createdAt,
UpdatedAt: updatedAt,
@ -382,9 +369,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
Endpoint: endpoint2,
},
}),
LastPlayedAt: &lastPlayedAt,
ResumeTime: resumeTime,
PlayCount: playCount,
PlayDuration: playDuration,
},
false,
@ -537,10 +522,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
director = "director"
url = "url"
rating = 60
ocounter = 5
lastPlayedAt = time.Date(2002, 1, 1, 0, 0, 0, 0, time.UTC)
resumeTime = 10.0
playCount = 3
playDuration = 34.0
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
@ -576,7 +558,6 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
Date: models.NewOptionalDate(date),
Rating: models.NewOptionalInt(rating),
Organized: models.NewOptionalBool(true),
OCounter: models.NewOptionalInt(ocounter),
StudioID: models.NewOptionalInt(studioIDs[studioIdxWithScene]),
CreatedAt: models.NewOptionalTime(createdAt),
UpdatedAt: models.NewOptionalTime(updatedAt),
@ -618,9 +599,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
},
Mode: models.RelationshipUpdateModeSet,
},
LastPlayedAt: models.NewOptionalTime(lastPlayedAt),
ResumeTime: models.NewOptionalFloat64(resumeTime),
PlayCount: models.NewOptionalInt(playCount),
PlayDuration: models.NewOptionalFloat64(playDuration),
},
models.Scene{
@ -636,7 +615,6 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
Date: &date,
Rating: &rating,
Organized: true,
OCounter: ocounter,
StudioID: &studioIDs[studioIdxWithScene],
CreatedAt: createdAt,
UpdatedAt: updatedAt,
@ -663,9 +641,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
Endpoint: endpoint2,
},
}),
LastPlayedAt: &lastPlayedAt,
ResumeTime: resumeTime,
PlayCount: playCount,
PlayDuration: playDuration,
},
false,
@ -676,7 +652,6 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
clearScenePartial(),
models.Scene{
ID: sceneIDs[sceneIdxWithSpacedName],
OCounter: getOCounter(sceneIdxWithSpacedName),
Files: models.NewRelatedVideoFiles([]*models.VideoFile{
makeSceneFile(sceneIdxWithSpacedName),
}),
@ -685,9 +660,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
PerformerIDs: models.NewRelatedIDs([]int{}),
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
PlayCount: getScenePlayCount(sceneIdxWithSpacedName),
PlayDuration: getScenePlayDuration(sceneIdxWithSpacedName),
LastPlayedAt: getSceneLastPlayed(sceneIdxWithSpacedName),
ResumeTime: getSceneResumeTime(sceneIdxWithSpacedName),
},
false,
@ -1296,7 +1269,7 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
}
}
func Test_sceneQueryBuilder_IncrementOCounter(t *testing.T) {
func Test_sceneQueryBuilder_AddO(t *testing.T) {
tests := []struct {
name string
id int
@ -1306,52 +1279,9 @@ func Test_sceneQueryBuilder_IncrementOCounter(t *testing.T) {
{
"increment",
sceneIDs[1],
2,
false,
},
{
"invalid",
invalidID,
0,
true,
},
}
qb := db.Scene
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.IncrementOCounter(ctx, tt.id)
if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.IncrementOCounter() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("sceneQueryBuilder.IncrementOCounter() = %v, want %v", got, tt.want)
}
})
}
}
func Test_sceneQueryBuilder_DecrementOCounter(t *testing.T) {
tests := []struct {
name string
id int
want int
wantErr bool
}{
{
"decrement",
sceneIDs[2],
1,
false,
},
{
"zero",
sceneIDs[0],
0,
false,
},
{
"invalid",
invalidID,
@ -1364,19 +1294,19 @@ func Test_sceneQueryBuilder_DecrementOCounter(t *testing.T) {
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.DecrementOCounter(ctx, tt.id)
got, err := qb.AddO(ctx, tt.id, nil)
if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.DecrementOCounter() error = %v, wantErr %v", err, tt.wantErr)
t.Errorf("sceneQueryBuilder.AddO() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("sceneQueryBuilder.DecrementOCounter() = %v, want %v", got, tt.want)
if len(got) != tt.want {
t.Errorf("sceneQueryBuilder.AddO() = %v, want %v", got, tt.want)
}
})
}
}
func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
func Test_sceneQueryBuilder_DeleteO(t *testing.T) {
tests := []struct {
name string
id int
@ -1395,11 +1325,42 @@ func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
0,
false,
},
}
qb := db.Scene
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.DeleteO(ctx, tt.id, nil)
if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.DeleteO() error = %v, wantErr %v", err, tt.wantErr)
return
}
if len(got) != tt.want {
t.Errorf("sceneQueryBuilder.DeleteO() = %v, want %v", got, tt.want)
}
})
}
}
func Test_sceneQueryBuilder_ResetO(t *testing.T) {
tests := []struct {
name string
id int
want int
wantErr bool
}{
{
"invalid",
invalidID,
"decrement",
sceneIDs[2],
0,
true,
false,
},
{
"zero",
sceneIDs[0],
0,
false,
},
}
@ -1407,9 +1368,9 @@ func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
for _, tt := range tests {
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
got, err := qb.ResetOCounter(ctx, tt.id)
got, err := qb.ResetO(ctx, tt.id)
if (err != nil) != tt.wantErr {
t.Errorf("sceneQueryBuilder.ResetOCounter() error = %v, wantErr %v", err, tt.wantErr)
t.Errorf("sceneQueryBuilder.ResetO() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
@ -1419,6 +1380,10 @@ func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
}
}
func Test_sceneQueryBuilder_ResetWatchCount(t *testing.T) {
return
}
func Test_sceneQueryBuilder_Destroy(t *testing.T) {
tests := []struct {
name string
@ -2158,19 +2123,19 @@ func TestSceneQuery(t *testing.T) {
[]int{sceneIdxWithMovie},
false,
},
{
"specific play count",
nil,
&models.SceneFilterType{
PlayCount: &models.IntCriterionInput{
Modifier: models.CriterionModifierEquals,
Value: getScenePlayCount(sceneIdxWithGallery),
},
},
[]int{sceneIdxWithGallery},
[]int{sceneIdxWithMovie},
false,
},
// {
// "specific play count",
// nil,
// &models.SceneFilterType{
// PlayCount: &models.IntCriterionInput{
// Modifier: models.CriterionModifierEquals,
// Value: getScenePlayCount(sceneIdxWithGallery),
// },
// },
// []int{sceneIdxWithGallery},
// []int{sceneIdxWithMovie},
// false,
// },
{
"stash id with endpoint",
nil,
@ -2767,7 +2732,11 @@ func verifyScenesOCounter(t *testing.T, oCounterCriterion models.IntCriterionInp
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
for _, scene := range scenes {
verifyInt(t, scene.OCounter, oCounterCriterion)
count, err := sqb.GetOCount(ctx, scene.ID)
if err != nil {
t.Errorf("Error getting ocounter: %v", err)
}
verifyInt(t, count, oCounterCriterion)
}
return nil
@ -4023,14 +3992,14 @@ func TestSceneQuerySorting(t *testing.T) {
"play_count",
"play_count",
models.SortDirectionEnumDesc,
sceneIDs[sceneIdx1WithPerformer],
-1,
-1,
},
{
"last_played_at",
"last_played_at",
models.SortDirectionEnumDesc,
sceneIDs[sceneIdx1WithPerformer],
-1,
-1,
},
{
@ -4551,7 +4520,7 @@ func TestSceneStore_AssignFiles(t *testing.T) {
}
}
func TestSceneStore_IncrementWatchCount(t *testing.T) {
func TestSceneStore_AddView(t *testing.T) {
tests := []struct {
name string
sceneID int
@ -4561,7 +4530,7 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
{
"valid",
sceneIDs[sceneIdx1WithPerformer],
getScenePlayCount(sceneIdx1WithPerformer) + 1,
1, //getScenePlayCount(sceneIdx1WithPerformer) + 1,
false,
},
{
@ -4577,9 +4546,9 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
withRollbackTxn(func(ctx context.Context) error {
newVal, err := qb.IncrementWatchCount(ctx, tt.sceneID)
views, err := qb.AddViews(ctx, tt.sceneID, nil)
if (err != nil) != tt.wantErr {
t.Errorf("SceneStore.IncrementWatchCount() error = %v, wantErr %v", err, tt.wantErr)
t.Errorf("SceneStore.AddView() error = %v, wantErr %v", err, tt.wantErr)
}
if err != nil {
@ -4587,16 +4556,21 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
}
assert := assert.New(t)
assert.Equal(tt.expectedCount, newVal)
assert.Equal(tt.expectedCount, len(views))
// find the scene and check the count
scene, err := qb.Find(ctx, tt.sceneID)
count, err := qb.CountViews(ctx, tt.sceneID)
if err != nil {
t.Errorf("SceneStore.Find() error = %v", err)
t.Errorf("SceneStore.CountViews() error = %v", err)
}
assert.Equal(tt.expectedCount, scene.PlayCount)
assert.True(scene.LastPlayedAt.After(time.Now().Add(-1 * time.Minute)))
lastView, err := qb.LastView(ctx, tt.sceneID)
if err != nil {
t.Errorf("SceneStore.LastView() error = %v", err)
}
assert.Equal(tt.expectedCount, count)
assert.True(lastView.After(time.Now().Add(-1 * time.Minute)))
return nil
})
@ -4604,6 +4578,10 @@ func TestSceneStore_IncrementWatchCount(t *testing.T) {
}
}
func TestSceneStore_DecrementWatchCount(t *testing.T) {
return
}
func TestSceneStore_SaveActivity(t *testing.T) {
var (
resumeTime = 111.2
@ -4702,3 +4680,77 @@ func TestSceneStore_SaveActivity(t *testing.T) {
// TODO Count
// TODO SizeCount
// TODO - this should be in history_test and generalised
func TestSceneStore_CountAllViews(t *testing.T) {
withRollbackTxn(func(ctx context.Context) error {
qb := db.Scene
sceneID := sceneIDs[sceneIdx1WithPerformer]
// get the current play count
currentCount, err := qb.CountAllViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountAllViews() error = %v", err)
return nil
}
// add a view
_, err = qb.AddViews(ctx, sceneID, nil)
if err != nil {
t.Errorf("SceneStore.AddViews() error = %v", err)
return nil
}
// get the new play count
newCount, err := qb.CountAllViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountAllViews() error = %v", err)
return nil
}
assert.Equal(t, currentCount+1, newCount)
return nil
})
}
func TestSceneStore_CountUniqueViews(t *testing.T) {
withRollbackTxn(func(ctx context.Context) error {
qb := db.Scene
sceneID := sceneIDs[sceneIdx1WithPerformer]
// get the current play count
currentCount, err := qb.CountUniqueViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountUniqueViews() error = %v", err)
return nil
}
// add a view
_, err = qb.AddViews(ctx, sceneID, nil)
if err != nil {
t.Errorf("SceneStore.AddViews() error = %v", err)
return nil
}
// add a second view
_, err = qb.AddViews(ctx, sceneID, nil)
if err != nil {
t.Errorf("SceneStore.AddViews() error = %v", err)
return nil
}
// get the new play count
newCount, err := qb.CountUniqueViews(ctx)
if err != nil {
t.Errorf("SceneStore.CountUniqueViews() error = %v", err)
return nil
}
assert.Equal(t, currentCount+1, newCount)
return nil
})
}

View file

@ -1012,10 +1012,6 @@ func makeSceneFile(i int) *models.VideoFile {
}
}
func getScenePlayCount(index int) int {
return index % 5
}
func getScenePlayDuration(index int) float64 {
if index%5 == 0 {
return 0
@ -1032,15 +1028,6 @@ func getSceneResumeTime(index int) float64 {
return float64(index%5) * 1.2
}
func getSceneLastPlayed(index int) *time.Time {
if index%5 == 0 {
return nil
}
t := time.Date(2020, 1, index%5, 1, 2, 3, 0, time.UTC)
return &t
}
func makeScene(i int) *models.Scene {
title := getSceneTitle(i)
details := getSceneStringValue(i, "Details")
@ -1073,7 +1060,6 @@ func makeScene(i int) *models.Scene {
getSceneEmptyString(i, urlField),
}),
Rating: getIntPtr(rating),
OCounter: getOCounter(i),
Date: getObjectDate(i),
StudioID: studioID,
GalleryIDs: models.NewRelatedIDs(gids),
@ -1083,9 +1069,7 @@ func makeScene(i int) *models.Scene {
StashIDs: models.NewRelatedStashIDs([]models.StashID{
sceneStashID(i),
}),
PlayCount: getScenePlayCount(i),
PlayDuration: getScenePlayDuration(i),
LastPlayedAt: getSceneLastPlayed(i),
ResumeTime: getSceneResumeTime(i),
}
}

View file

@ -110,27 +110,6 @@ func getCountSort(primaryTable, joinTable, primaryFK, direction string) string {
return fmt.Sprintf(" ORDER BY (SELECT COUNT(*) FROM %s AS sort WHERE sort.%s = %s.id) %s", joinTable, primaryFK, primaryTable, getSortDirection(direction))
}
func getMultiSumSort(sum string, primaryTable, foreignTable1, joinTable1, foreignTable2, joinTable2, primaryFK, foreignFK1, foreignFK2, direction string) string {
return fmt.Sprintf(" ORDER BY (SELECT SUM(%s) "+
"FROM ("+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
"UNION ALL "+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
")) %s",
sum,
sum, sum, joinTable1,
foreignTable1, foreignTable1, foreignFK1,
primaryFK, primaryTable,
sum, sum, joinTable2,
foreignTable2, foreignTable2, foreignFK2,
primaryFK, primaryTable,
getSortDirection(direction))
}
func getStringSearchClause(columns []string, q string, not bool) sqlClause {
var likeClauses []string
var args []interface{}
@ -349,28 +328,6 @@ func getCountCriterionClause(primaryTable, joinTable, primaryFK string, criterio
return getIntCriterionWhereClause(lhs, criterion)
}
func getJoinedMultiSumCriterionClause(primaryTable, foreignTable1, joinTable1, foreignTable2, joinTable2, primaryFK string, foreignFK1 string, foreignFK2 string, sum string, criterion models.IntCriterionInput) (string, []interface{}) {
lhs := fmt.Sprintf("(SELECT SUM(%s) "+
"FROM ("+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
"UNION ALL "+
"SELECT SUM(%s) as %s from %s s "+
"LEFT JOIN %s ON %s.id = s.%s "+
"WHERE s.%s = %s.id "+
"))",
sum,
sum, sum, joinTable1,
foreignTable1, foreignTable1, foreignFK1,
primaryFK, primaryTable,
sum, sum, joinTable2,
foreignTable2, foreignTable2, foreignFK2,
primaryFK, primaryTable,
)
return getIntCriterionWhereClause(lhs, criterion)
}
func coalesce(column string) string {
return fmt.Sprintf("COALESCE(%s, '')", column)
}

View file

@ -5,6 +5,7 @@ import (
"database/sql"
"errors"
"fmt"
"time"
"github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp"
@ -773,6 +774,270 @@ func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID model
return nil
}
type viewHistoryTable struct {
table
dateColumn exp.IdentifierExpression
}
func (t *viewHistoryTable) getDates(ctx context.Context, id int) ([]time.Time, error) {
table := t.table.table
q := dialect.Select(
t.dateColumn,
).From(table).Where(
t.idColumn.Eq(id),
).Order(t.dateColumn.Desc())
const single = false
var ret []time.Time
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var date Timestamp
if err := rows.Scan(&date); err != nil {
return err
}
ret = append(ret, date.Timestamp)
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getManyDates(ctx context.Context, ids []int) ([][]time.Time, error) {
table := t.table.table
q := dialect.Select(
t.idColumn,
t.dateColumn,
).From(table).Where(
t.idColumn.In(ids),
).Order(t.dateColumn.Desc())
ret := make([][]time.Time, len(ids))
idToIndex := idToIndexMap(ids)
const single = false
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
var date Timestamp
if err := rows.Scan(&id, &date); err != nil {
return err
}
idx := idToIndex[id]
ret[idx] = append(ret[idx], date.Timestamp)
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getLastDate(ctx context.Context, id int) (*time.Time, error) {
table := t.table.table
q := dialect.Select(t.dateColumn).From(table).Where(
t.idColumn.Eq(id),
).Order(t.dateColumn.Desc()).Limit(1)
var date NullTimestamp
if err := querySimple(ctx, q, &date); err != nil {
return nil, err
}
return date.TimePtr(), nil
}
func (t *viewHistoryTable) getManyLastDate(ctx context.Context, ids []int) ([]*time.Time, error) {
table := t.table.table
q := dialect.Select(
t.idColumn,
goqu.MAX(t.dateColumn),
).From(table).Where(
t.idColumn.In(ids),
).GroupBy(t.idColumn)
ret := make([]*time.Time, len(ids))
idToIndex := idToIndexMap(ids)
const single = false
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
// MAX appears to return a string, so handle it manually
var dateString string
if err := rows.Scan(&id, &dateString); err != nil {
return err
}
t, err := time.Parse(TimestampFormat, dateString)
if err != nil {
return fmt.Errorf("parsing date %v: %w", dateString, err)
}
idx := idToIndex[id]
ret[idx] = &t
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getCount(ctx context.Context, id int) (int, error) {
table := t.table.table
q := dialect.Select(goqu.COUNT("*")).From(table).Where(t.idColumn.Eq(id))
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (t *viewHistoryTable) getManyCount(ctx context.Context, ids []int) ([]int, error) {
table := t.table.table
q := dialect.Select(
t.idColumn,
goqu.COUNT(t.dateColumn),
).From(table).Where(
t.idColumn.In(ids),
).GroupBy(t.idColumn)
ret := make([]int, len(ids))
idToIndex := idToIndexMap(ids)
const single = false
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
var id int
var count int
if err := rows.Scan(&id, &count); err != nil {
return err
}
idx := idToIndex[id]
ret[idx] = count
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *viewHistoryTable) getAllCount(ctx context.Context) (int, error) {
table := t.table.table
q := dialect.Select(goqu.COUNT("*")).From(table)
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (t *viewHistoryTable) getUniqueCount(ctx context.Context) (int, error) {
table := t.table.table
q := dialect.Select(goqu.COUNT(goqu.DISTINCT(t.idColumn))).From(table)
const single = true
var ret int
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
if err := rows.Scan(&ret); err != nil {
return err
}
return nil
}); err != nil {
return 0, err
}
return ret, nil
}
func (t *viewHistoryTable) addDates(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
table := t.table.table
if len(dates) == 0 {
dates = []time.Time{time.Now()}
}
for _, d := range dates {
q := dialect.Insert(table).Cols(t.idColumn.GetCol(), t.dateColumn.GetCol()).Vals(
// convert all dates to UTC
goqu.Vals{id, UTCTimestamp{Timestamp{d}}},
)
if _, err := exec(ctx, q); err != nil {
return nil, fmt.Errorf("inserting into %s: %w", table.GetTable(), err)
}
}
return t.getDates(ctx, id)
}
func (t *viewHistoryTable) deleteDates(ctx context.Context, id int, dates []time.Time) ([]time.Time, error) {
table := t.table.table
mostRecent := false
if len(dates) == 0 {
mostRecent = true
dates = []time.Time{time.Now()}
}
for _, date := range dates {
var subquery *goqu.SelectDataset
if mostRecent {
// delete the most recent
subquery = dialect.Select("rowid").From(table).Where(
t.idColumn.Eq(id),
).Order(t.dateColumn.Desc()).Limit(1)
} else {
subquery = dialect.Select("rowid").From(table).Where(
t.idColumn.Eq(id),
t.dateColumn.Eq(UTCTimestamp{Timestamp{date}}),
).Limit(1)
}
q := dialect.Delete(table).Where(goqu.I("rowid").Eq(subquery))
if _, err := exec(ctx, q); err != nil {
return nil, fmt.Errorf("deleting from %s: %w", table.GetTable(), err)
}
}
return t.getDates(ctx, id)
}
func (t *viewHistoryTable) deleteAllDates(ctx context.Context, id int) (int, error) {
table := t.table.table
q := dialect.Delete(table).Where(t.idColumn.Eq(id))
if _, err := exec(ctx, q); err != nil {
return 0, fmt.Errorf("resetting dates for id %v: %w", id, err)
}
return t.getCount(ctx, id)
}
type sqler interface {
ToSQL() (sql string, params []interface{}, err error)
}

View file

@ -190,6 +190,22 @@ var (
},
valueColumn: scenesURLsJoinTable.Col(sceneURLColumn),
}
scenesViewTableMgr = &viewHistoryTable{
table: table{
table: goqu.T(scenesViewDatesTable),
idColumn: goqu.T(scenesViewDatesTable).Col(sceneIDColumn),
},
dateColumn: goqu.T(scenesViewDatesTable).Col(sceneViewDateColumn),
}
scenesOTableMgr = &viewHistoryTable{
table: table{
table: goqu.T(scenesODatesTable),
idColumn: goqu.T(scenesODatesTable).Col(sceneIDColumn),
},
dateColumn: goqu.T(scenesODatesTable).Col(sceneODateColumn),
}
)
var (

View file

@ -5,6 +5,8 @@ import (
"time"
)
const TimestampFormat = time.RFC3339
// Timestamp represents a time stored in RFC3339 format.
type Timestamp struct {
Timestamp time.Time
@ -18,7 +20,18 @@ func (t *Timestamp) Scan(value interface{}) error {
// Value implements the driver Valuer interface.
func (t Timestamp) Value() (driver.Value, error) {
return t.Timestamp.Format(time.RFC3339), nil
return t.Timestamp.Format(TimestampFormat), nil
}
// UTCTimestamp stores a time in UTC.
// TODO - Timestamp should use UTC by default
type UTCTimestamp struct {
Timestamp
}
// Value implements the driver Valuer interface.
func (t UTCTimestamp) Value() (driver.Value, error) {
return t.Timestamp.Timestamp.UTC().Format(TimestampFormat), nil
}
// NullTimestamp represents a nullable time stored in RFC3339 format.
@ -47,7 +60,7 @@ func (t NullTimestamp) Value() (driver.Value, error) {
return nil, nil
}
return t.Timestamp.Format(time.RFC3339), nil
return t.Timestamp.Format(TimestampFormat), nil
}
func (t NullTimestamp) TimePtr() *time.Time {

View file

@ -22,6 +22,9 @@ fragment SceneData on Scene {
play_duration
play_count
play_history
o_history
files {
...VideoFileData
}

View file

@ -34,16 +34,36 @@ mutation SceneSaveActivity(
)
}
mutation SceneIncrementPlayCount($id: ID!) {
sceneIncrementPlayCount(id: $id)
mutation SceneAddPlay($id: ID!, $times: [Timestamp!]) {
sceneAddPlay(id: $id, times: $times) {
count
history
}
}
mutation SceneIncrementO($id: ID!) {
sceneIncrementO(id: $id)
mutation SceneDeletePlay($id: ID!, $times: [Timestamp!]) {
sceneDeletePlay(id: $id, times: $times) {
count
history
}
}
mutation SceneDecrementO($id: ID!) {
sceneDecrementO(id: $id)
mutation SceneResetPlayCount($id: ID!) {
sceneResetPlayCount(id: $id)
}
mutation SceneAddO($id: ID!, $times: [Timestamp!]) {
sceneAddO(id: $id, times: $times) {
count
history
}
}
mutation SceneDeleteO($id: ID!, $times: [Timestamp!]) {
sceneDeleteO(id: $id, times: $times) {
count
history
}
}
mutation SceneResetO($id: ID!) {

View file

@ -61,6 +61,7 @@ const QueueViewer = lazyComponent(() => import("./QueueViewer"));
const SceneMarkersPanel = lazyComponent(() => import("./SceneMarkersPanel"));
const SceneFileInfoPanel = lazyComponent(() => import("./SceneFileInfoPanel"));
const SceneDetailPanel = lazyComponent(() => import("./SceneDetailPanel"));
const SceneHistoryPanel = lazyComponent(() => import("./SceneHistoryPanel"));
const SceneMoviePanel = lazyComponent(() => import("./SceneMoviePanel"));
const SceneGalleriesPanel = lazyComponent(
() => import("./SceneGalleriesPanel")
@ -158,6 +159,7 @@ const ScenePage: React.FC<IProps> = ({
Mousetrap.bind("e", () => setActiveTabKey("scene-edit-panel"));
Mousetrap.bind("k", () => setActiveTabKey("scene-markers-panel"));
Mousetrap.bind("i", () => setActiveTabKey("scene-file-info-panel"));
Mousetrap.bind("h", () => setActiveTabKey("scene-history-panel"));
Mousetrap.bind("o", () => {
onIncrementClick();
});
@ -172,6 +174,7 @@ const ScenePage: React.FC<IProps> = ({
Mousetrap.unbind("e");
Mousetrap.unbind("k");
Mousetrap.unbind("i");
Mousetrap.unbind("h");
Mousetrap.unbind("o");
Mousetrap.unbind("p n");
Mousetrap.unbind("p p");
@ -407,6 +410,11 @@ const ScenePage: React.FC<IProps> = ({
<Counter count={scene.files.length} hideZero hideOne />
</Nav.Link>
</Nav.Item>
<Nav.Item>
<Nav.Link eventKey="scene-history-panel">
<FormattedMessage id="history" />
</Nav.Link>
</Nav.Item>
<Nav.Item>
<Nav.Link eventKey="scene-edit-panel">
<FormattedMessage id="actions.edit" />
@ -488,6 +496,9 @@ const ScenePage: React.FC<IProps> = ({
onDelete={() => setIsDeleteAlertOpen(true)}
/>
</Tab.Pane>
<Tab.Pane eventKey="scene-history-panel">
<SceneHistoryPanel scene={scene} />
</Tab.Pane>
</Tab.Content>
</Tab.Container>
);

View file

@ -308,16 +308,6 @@ export const SceneFileInfoPanel: React.FC<ISceneFileInfoPanelProps> = (
{renderInteractiveSpeed()}
<URLsField id="urls" urls={props.scene.urls} truncate />
{renderStashIDs()}
<TextField
id="media_info.play_count"
value={(props.scene.play_count ?? 0).toString()}
truncate
/>
<TextField
id="media_info.play_duration"
value={TextUtils.secondsToTimestamp(props.scene.play_duration ?? 0)}
truncate
/>
</dl>
{filesPanel}

View file

@ -0,0 +1,356 @@
import {
faEllipsisV,
faPlus,
faTrash,
} from "@fortawesome/free-solid-svg-icons";
import React from "react";
import { Button, Dropdown } from "react-bootstrap";
import { FormattedMessage, useIntl } from "react-intl";
import { AlertModal } from "src/components/Shared/Alert";
import { Counter } from "src/components/Shared/Counter";
import { DateInput } from "src/components/Shared/DateInput";
import { Icon } from "src/components/Shared/Icon";
import { ModalComponent } from "src/components/Shared/Modal";
import {
useSceneDecrementO,
useSceneDecrementPlayCount,
useSceneIncrementO,
useSceneIncrementPlayCount,
useSceneResetO,
useSceneResetPlayCount,
} from "src/core/StashService";
import * as GQL from "src/core/generated-graphql";
import { TextField } from "src/utils/field";
import TextUtils from "src/utils/text";
const History: React.FC<{
className?: string;
history: string[];
unknownDate?: string;
onRemove: (date: string) => void;
noneID: string;
}> = ({ className, history, unknownDate, noneID, onRemove }) => {
const intl = useIntl();
if (history.length === 0) {
return (
<div>
<FormattedMessage id={noneID} />
</div>
);
}
function renderDate(date: string) {
if (date === unknownDate) {
return intl.formatMessage({ id: "unknown_date" });
}
return TextUtils.formatDateTime(intl, date);
}
return (
<div className="scene-history">
<ul className={className}>
{history.map((playdate, index) => (
<li key={index}>
<span>{renderDate(playdate)}</span>
<Button
className="remove-date-button"
size="sm"
variant="minimal"
onClick={() => onRemove(playdate)}
title={intl.formatMessage({ id: "actions.remove_date" })}
>
<Icon icon={faTrash} />
</Button>
</li>
))}
</ul>
</div>
);
};
const HistoryMenu: React.FC<{
hasHistory: boolean;
onAddDate: () => void;
onClearDates: () => void;
}> = ({ hasHistory, onAddDate, onClearDates }) => {
const intl = useIntl();
return (
<Dropdown className="history-operations-dropdown">
<Dropdown.Toggle
variant="secondary"
className="minimal"
title={intl.formatMessage({ id: "operations" })}
>
<Icon icon={faEllipsisV} />
</Dropdown.Toggle>
<Dropdown.Menu className="bg-secondary text-white">
<Dropdown.Item
className="bg-secondary text-white"
onClick={() => onAddDate()}
>
<FormattedMessage id="actions.add_manual_date" />
</Dropdown.Item>
{hasHistory && (
<Dropdown.Item
className="bg-secondary text-white"
onClick={() => onClearDates()}
>
<FormattedMessage id="actions.clear_date_data" />
</Dropdown.Item>
)}
</Dropdown.Menu>
</Dropdown>
);
};
const DatePickerModal: React.FC<{
show: boolean;
onClose: (t?: string) => void;
}> = ({ show, onClose }) => {
const intl = useIntl();
const [date, setDate] = React.useState<string>(
TextUtils.dateTimeToString(new Date())
);
return (
<ModalComponent
show={show}
header={<FormattedMessage id="actions.choose_date" />}
accept={{
onClick: () => onClose(date),
text: intl.formatMessage({ id: "actions.confirm" }),
}}
cancel={{
variant: "secondary",
onClick: () => onClose(),
text: intl.formatMessage({ id: "actions.cancel" }),
}}
>
<div>
<DateInput value={date} onValueChange={(d) => setDate(d)} isTime />
</div>
</ModalComponent>
);
};
interface ISceneHistoryProps {
scene: GQL.SceneDataFragment;
}
export const SceneHistoryPanel: React.FC<ISceneHistoryProps> = ({ scene }) => {
const intl = useIntl();
const [dialogs, setDialogs] = React.useState({
playHistory: false,
oHistory: false,
addPlay: false,
addO: false,
});
function setDialogPartial(partial: Partial<typeof dialogs>) {
setDialogs({ ...dialogs, ...partial });
}
const [incrementPlayCount] = useSceneIncrementPlayCount();
const [decrementPlayCount] = useSceneDecrementPlayCount();
const [clearPlayCount] = useSceneResetPlayCount();
const [incrementOCount] = useSceneIncrementO(scene.id);
const [decrementOCount] = useSceneDecrementO(scene.id);
const [resetO] = useSceneResetO(scene.id);
function dateStringToISOString(time: string) {
const date = TextUtils.stringToFuzzyDateTime(time);
if (!date) return null;
return date.toISOString();
}
function handleAddPlayDate(time?: string) {
incrementPlayCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleDeletePlayDate(time: string) {
decrementPlayCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleClearPlayDates() {
setDialogPartial({ playHistory: false });
clearPlayCount({
variables: {
id: scene.id,
},
});
}
function handleAddODate(time?: string) {
incrementOCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleDeleteODate(time: string) {
decrementOCount({
variables: {
id: scene.id,
times: time ? [time] : undefined,
},
});
}
function handleClearODates() {
setDialogPartial({ oHistory: false });
resetO({
variables: {
id: scene.id,
},
});
}
function maybeRenderDialogs() {
return (
<>
<AlertModal
show={dialogs.playHistory}
text={intl.formatMessage({
id: "dialogs.clear_play_history_confirm",
})}
confirmButtonText={intl.formatMessage({ id: "actions.clear" })}
onConfirm={() => handleClearPlayDates()}
onCancel={() => setDialogPartial({ playHistory: false })}
/>
<AlertModal
show={dialogs.oHistory}
text={intl.formatMessage({ id: "dialogs.clear_o_history_confirm" })}
confirmButtonText={intl.formatMessage({ id: "actions.clear" })}
onConfirm={() => handleClearODates()}
onCancel={() => setDialogPartial({ oHistory: false })}
/>
{/* add conditions here so that date is generated correctly */}
{dialogs.addPlay && (
<DatePickerModal
show
onClose={(t) => {
const tt = t ? dateStringToISOString(t) : null;
if (tt) {
handleAddPlayDate(tt);
}
setDialogPartial({ addPlay: false });
}}
/>
)}
{dialogs.addO && (
<DatePickerModal
show
onClose={(t) => {
const tt = t ? dateStringToISOString(t) : null;
if (tt) {
handleAddODate(tt);
}
setDialogPartial({ addO: false });
}}
/>
)}
</>
);
}
const playHistory = (scene.play_history ?? []).filter(
(h) => h != null
) as string[];
const oHistory = (scene.o_history ?? []).filter((h) => h != null) as string[];
return (
<div>
{maybeRenderDialogs()}
<div className="play-history">
<div className="history-header">
<h5>
<span>
<FormattedMessage id="play_history" />
<Counter count={playHistory.length} hideZero />
</span>
<span>
<Button
size="sm"
variant="minimal"
className="add-date-button"
title={intl.formatMessage({ id: "actions.add_play" })}
onClick={() => handleAddPlayDate()}
>
<Icon icon={faPlus} />
</Button>
<HistoryMenu
hasHistory={playHistory.length > 0}
onAddDate={() => setDialogPartial({ addPlay: true })}
onClearDates={() => setDialogPartial({ playHistory: true })}
/>
</span>
</h5>
</div>
<History
history={playHistory ?? []}
noneID="playdate_recorded_no"
unknownDate={scene.created_at}
onRemove={(t) => handleDeletePlayDate(t)}
/>
<dl className="details-list">
<TextField
id="media_info.play_duration"
value={TextUtils.secondsToTimestamp(scene.play_duration ?? 0)}
/>
</dl>
</div>
<div className="o-history">
<div className="history-header">
<h5>
<span>
<FormattedMessage id="o_history" />
<Counter count={oHistory.length} hideZero />
</span>
<span>
<Button
size="sm"
variant="minimal"
className="add-date-button"
title={intl.formatMessage({ id: "actions.add_o" })}
onClick={() => handleAddODate()}
>
<Icon icon={faPlus} />
</Button>
<HistoryMenu
hasHistory={oHistory.length > 0}
onAddDate={() => setDialogPartial({ addO: true })}
onClearDates={() => setDialogPartial({ oHistory: true })}
/>
</span>
</h5>
</div>
<History
history={oHistory}
noneID="odate_recorded_no"
unknownDate={scene.created_at}
onRemove={(t) => handleDeleteODate(t)}
/>
</div>
</div>
);
};
export default SceneHistoryPanel;

View file

@ -44,10 +44,16 @@ const StashIDsField: React.FC<IStashIDsField> = ({ values }) => {
return <StringListSelect value={values.map((v) => v.stash_id)} />;
};
type MergeOptions = {
values: GQL.SceneUpdateInput;
includeViewHistory: boolean;
includeOHistory: boolean;
};
interface ISceneMergeDetailsProps {
sources: GQL.SlimSceneDataFragment[];
dest: GQL.SlimSceneDataFragment;
onClose: (values?: GQL.SceneUpdateInput) => void;
onClose: (options?: MergeOptions) => void;
}
const SceneMergeDetails: React.FC<ISceneMergeDetailsProps> = ({
@ -558,13 +564,14 @@ const SceneMergeDetails: React.FC<ISceneMergeDetailsProps> = ({
);
}
function createValues(): GQL.SceneUpdateInput {
function createValues(): MergeOptions {
const all = [dest, ...sources];
// only set the cover image if it's different from the existing cover image
const coverImage = image.useNewValue ? image.getNewValue() : undefined;
return {
values: {
id: dest.id,
title: title.getNewValue(),
code: code.getNewValue(),
@ -593,6 +600,9 @@ const SceneMergeDetails: React.FC<ISceneMergeDetailsProps> = ({
organized: organized.getNewValue(),
stash_ids: stashIDs.getNewValue(),
cover_image: coverImage,
},
includeViewHistory: playCount.getNewValue() !== undefined,
includeOHistory: oCounter.getNewValue() !== undefined,
};
}
@ -679,13 +689,16 @@ export const SceneMergeModal: React.FC<ISceneMergeModalProps> = ({
setSecondStep(true);
}
async function onMerge(values: GQL.SceneUpdateInput) {
async function onMerge(options: MergeOptions) {
const { values, includeViewHistory, includeOHistory } = options;
try {
setRunning(true);
const result = await mutateSceneMerge(
destScene[0].id,
sourceScenes.map((s) => s.id),
values
values,
includeViewHistory,
includeOHistory
);
if (result.data?.sceneMerge) {
Toast.success(intl.formatMessage({ id: "toast.merged_scenes" }));

View file

@ -750,3 +750,37 @@ input[type="range"].blue-slider {
bottom: 0;
}
}
.play-history dl {
margin-top: 0.5rem;
}
.play-history,
.o-history {
.history-header h5 {
align-items: center;
display: flex;
justify-content: space-between;
}
.history-operations-dropdown {
display: inline-block;
}
.add-date-button {
color: $success;
}
.remove-date-button {
color: $danger;
}
ul {
padding-inline-start: 1rem;
li {
display: flex;
justify-content: space-between;
}
}
}

View file

@ -56,7 +56,7 @@ const _DateInput: React.FC<IProps> = (props: IProps) => {
onChange={(v) => {
props.onValueChange(v ? dateToString(v) : "");
}}
customInput={React.createElement(ShowPickerButton)}
customInput={<ShowPickerButton onClick={() => {}} />}
showMonthDropdown
showYearDropdown
scrollableMonthYearDropdown

View file

@ -570,11 +570,19 @@ export const useScenesDestroy = (input: GQL.ScenesDestroyInput) =>
});
export const useSceneIncrementO = (id: string) =>
GQL.useSceneIncrementOMutation({
GQL.useSceneAddOMutation({
variables: { id },
update(cache, result) {
const updatedOCount = result.data?.sceneIncrementO;
if (updatedOCount === undefined) return;
update(cache, result, { variables }) {
// this is not perfectly accurate, the time is set server-side
// it isn't even displayed anywhere in the UI anyway
const at = new Date().toISOString();
const mutationResult = result.data?.sceneAddO;
if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { times } = variables;
const timeArray = !times ? [at] : Array.isArray(times) ? times : [times];
const scene = cache.readFragment<GQL.SlimSceneDataFragment>({
id: cache.identify({ __typename: "Scene", id }),
@ -589,7 +597,7 @@ export const useSceneIncrementO = (id: string) =>
id: cache.identify(performer),
fields: {
o_counter(value) {
return value + 1;
return value + timeArray.length;
},
},
});
@ -601,8 +609,18 @@ export const useSceneIncrementO = (id: string) =>
});
}
updateStats(cache, "total_o_count", 1);
updateO(cache, "Scene", id, updatedOCount);
updateStats(cache, "total_o_count", timeArray.length);
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
o_history() {
return history;
},
},
});
updateO(cache, "Scene", id, history.length);
evictQueries(cache, [
GQL.FindScenesDocument, // filter by o_counter
GQL.FindPerformersDocument, // filter by o_counter
@ -611,11 +629,15 @@ export const useSceneIncrementO = (id: string) =>
});
export const useSceneDecrementO = (id: string) =>
GQL.useSceneDecrementOMutation({
GQL.useSceneDeleteOMutation({
variables: { id },
update(cache, result) {
const updatedOCount = result.data?.sceneDecrementO;
if (updatedOCount === undefined) return;
update(cache, result, { variables }) {
const mutationResult = result.data?.sceneDeleteO;
if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { times } = variables;
const timeArray = !times ? null : Array.isArray(times) ? times : [times];
const scene = cache.readFragment<GQL.SlimSceneDataFragment>({
id: cache.identify({ __typename: "Scene", id }),
@ -630,7 +652,7 @@ export const useSceneDecrementO = (id: string) =>
id: cache.identify(performer),
fields: {
o_counter(value) {
return value - 1;
return value - (timeArray?.length ?? 1);
},
},
});
@ -642,8 +664,18 @@ export const useSceneDecrementO = (id: string) =>
});
}
updateStats(cache, "total_o_count", -1);
updateO(cache, "Scene", id, updatedOCount);
updateStats(cache, "total_o_count", -(timeArray?.length ?? 1));
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
o_history() {
return history;
},
},
});
updateO(cache, "Scene", id, history.length);
evictQueries(cache, [
GQL.FindScenesDocument, // filter by o_counter
GQL.FindPerformersDocument, // filter by o_counter
@ -694,6 +726,16 @@ export const useSceneResetO = (id: string) =>
});
}
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
o_history() {
const ret: string[] = [];
return ret;
},
},
});
updateO(cache, "Scene", id, updatedOCount);
evictQueries(cache, [
GQL.FindScenesDocument, // filter by o_counter
@ -752,7 +794,9 @@ export const mutateSceneAssignFile = (sceneID: string, fileID: string) =>
export const mutateSceneMerge = (
destination: string,
source: string[],
values: GQL.SceneUpdateInput
values: GQL.SceneUpdateInput,
includeViewHistory: boolean,
includeOHistory: boolean
) =>
client.mutate<GQL.SceneMergeMutation>({
mutation: GQL.SceneMergeDocument,
@ -761,6 +805,8 @@ export const mutateSceneMerge = (
source,
destination,
values,
play_history: includeViewHistory,
o_history: includeOHistory,
},
},
update(cache, result) {
@ -790,7 +836,7 @@ export const useSceneSaveActivity = () =>
id: cache.identify({ __typename: "Scene", id }),
fields: {
resume_time() {
return resumeTime;
return resumeTime ?? null;
},
play_duration(value) {
return value + playDuration;
@ -809,9 +855,108 @@ export const useSceneSaveActivity = () =>
});
export const useSceneIncrementPlayCount = () =>
GQL.useSceneIncrementPlayCountMutation({
GQL.useSceneAddPlayMutation({
update(cache, result, { variables }) {
if (!result.data?.sceneIncrementPlayCount || !variables) return;
const mutationResult = result.data?.sceneAddPlay;
if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { id } = variables;
let lastPlayCount = 0;
const playCount = history.length;
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
play_count(value) {
lastPlayCount = value;
return history.length;
},
last_played_at() {
// assume only one entry - or the first is the most recent
return history[0];
},
play_history() {
return history;
},
},
});
updateStats(cache, "total_play_count", playCount - lastPlayCount);
if (lastPlayCount === 0) {
updateStats(cache, "scenes_played", 1);
}
evictQueries(cache, [
GQL.FindScenesDocument, // filter by play count
]);
},
});
export const useSceneDecrementPlayCount = () =>
GQL.useSceneDeletePlayMutation({
update(cache, result, { variables }) {
const mutationResult = result.data?.sceneDeletePlay;
if (!mutationResult || !variables) return;
const { history } = mutationResult;
const { id, times } = variables;
const timeArray = !times ? null : Array.isArray(times) ? times : [times];
const nRemoved = timeArray?.length ?? 1;
let lastPlayCount = 0;
let lastPlayedAt: string | null = null;
const playCount = history.length;
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
play_count(value) {
lastPlayCount = value;
return playCount;
},
play_history() {
if (history.length > 0) {
lastPlayedAt = history[0];
}
return history;
},
},
});
cache.modify({
id: cache.identify({ __typename: "Scene", id }),
fields: {
last_played_at() {
return lastPlayedAt;
},
},
});
if (lastPlayCount > 0) {
updateStats(
cache,
"total_play_count",
nRemoved > lastPlayCount ? -lastPlayCount : -nRemoved
);
}
if (lastPlayCount - nRemoved <= 0) {
updateStats(cache, "scenes_played", -1);
}
evictQueries(cache, [
GQL.FindScenesDocument, // filter by play count
]);
},
});
export const useSceneResetPlayCount = () =>
GQL.useSceneResetPlayCountMutation({
update(cache, result, { variables }) {
if (!variables) return;
let lastPlayCount = 0;
cache.modify({
@ -819,19 +964,23 @@ export const useSceneIncrementPlayCount = () =>
fields: {
play_count(value) {
lastPlayCount = value;
return value + 1;
return 0;
},
play_history() {
const ret: string[] = [];
return ret;
},
last_played_at() {
// this is not perfectly accurate, the time is set server-side
// it isn't even displayed anywhere in the UI anyway
return new Date().toISOString();
return null;
},
},
});
updateStats(cache, "total_play_count", 1);
if (lastPlayCount === 0) {
updateStats(cache, "scenes_played", 1);
if (lastPlayCount > 0) {
updateStats(cache, "total_play_count", -lastPlayCount);
}
if (lastPlayCount > 0) {
updateStats(cache, "scenes_played", -1);
}
evictQueries(cache, [

View file

@ -58,6 +58,7 @@
| `k` | Markers tab |
| `i` | File info tab |
| `e` | Edit tab |
| `h` | History tab |
| `,` | Hide/Show sidebar |
| `.` | Hide/Show scene scrubber |
| `o` | Increment O-Counter |

View file

@ -3,6 +3,9 @@
"add": "Add",
"add_directory": "Add Directory",
"add_entity": "Add {entityType}",
"add_manual_date": "Add manual date",
"add_o": "Add O",
"add_play": "Add play",
"add_to_entity": "Add to {entityType}",
"allow": "Allow",
"allow_temporarily": "Allow temporarily",
@ -13,9 +16,11 @@
"backup": "Backup",
"browse_for_image": "Browse for image…",
"cancel": "Cancel",
"choose_date": "Choose a date",
"clean": "Clean",
"clear": "Clear",
"clear_back_image": "Clear back image",
"clear_date_data": "Clear date data",
"clear_front_image": "Clear front image",
"clear_image": "Clear Image",
"close": "Close",
@ -84,6 +89,7 @@
"reload_plugins": "Reload plugins",
"reload_scrapers": "Reload scrapers",
"remove": "Remove",
"remove_date": "Remove date",
"remove_from_gallery": "Remove from Gallery",
"rename_gen_files": "Rename generated files",
"rescan": "Rescan",
@ -808,6 +814,8 @@
"details": "Details",
"developmentVersion": "Development Version",
"dialogs": {
"clear_o_history_confirm": "Are you sure you want to clear the O history?",
"clear_play_history_confirm": "Are you sure you want to clear the play history?",
"create_new_entity": "Create new {entity}",
"delete_alert": "The following {count, plural, one {{singularEntity}} other {{pluralEntity}}} will be deleted permanently:",
"delete_confirm": "Are you sure you want to delete {entityName}?",
@ -1034,6 +1042,7 @@
"height": "Height",
"height_cm": "Height (cm)",
"help": "Help",
"history": "History",
"ignore_auto_tag": "Ignore Auto Tag",
"image": "Image",
"image_count": "Image Count",
@ -1061,6 +1070,7 @@
"downloaded_from": "Downloaded From",
"hash": "Hash",
"interactive_speed": "Interactive speed",
"o_count": "O Count",
"performer_card": {
"age": "{age} {years_old}",
"age_context": "{age} {years_old} in this scene"
@ -1080,6 +1090,8 @@
"new": "New",
"none": "None",
"o_counter": "O-Counter",
"o_history": "O History",
"odate_recorded_no": "No O Date Recorded",
"operations": "Operations",
"organized": "Organised",
"orientation": "Orientation",
@ -1177,6 +1189,8 @@
"piercings": "Piercings",
"play_count": "Play Count",
"play_duration": "Play Duration",
"play_history": "Play History",
"playdate_recorded_no": "No Play Date Recorded",
"plays": "{value} plays",
"primary_file": "Primary file",
"primary_tag": "Primary Tag",
@ -1401,6 +1415,7 @@
"true": "True",
"twitter": "Twitter",
"type": "Type",
"unknown_date": "Unknown date",
"updated_at": "Updated At",
"url": "URL",
"urls": "URLs",