mirror of
https://github.com/stashapp/stash.git
synced 2025-12-19 06:42:55 +01:00
Merge branch 'stashapp:develop' into develop
This commit is contained in:
commit
b4cd6b471f
251 changed files with 6125 additions and 1958 deletions
|
|
@ -57,10 +57,11 @@ Stash can pull metadata (performers, tags, descriptions, studios, and more) dire
|
|||
<sub>[StashDB](http://stashdb.org) is the canonical instance of our open source metadata API, [stash-box](https://github.com/stashapp/stash-box).</sub>
|
||||
|
||||
# Translation
|
||||
[](https://hosted.weblate.org/engage/stashapp/)
|
||||
🇧🇷 🇨🇳 🇩🇰 🇳🇱 🇬🇧 🇪🇪 🇫🇮 🇫🇷 🇩🇪 🇮🇹 🇯🇵 🇰🇷 🇵🇱 🇷🇺 🇪🇸 🇸🇪 🇹🇼 🇹🇷
|
||||
[](https://translate.codeberg.org/engage/stash/)
|
||||
|
||||
Stash is available in 25 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Stash's Weblate](https://hosted.weblate.org/projects/stashapp/stash/) to get started contributing new languages or improving existing ones. Thanks!
|
||||
Stash is available in 32 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Codeberg's Weblate](https://translate.codeberg.org/projects/stash/stash/) to get started contributing new languages or improving existing ones. Thanks!
|
||||
|
||||
[](https://translate.codeberg.org/engage/stash/)
|
||||
|
||||
# Support (FAQ)
|
||||
|
||||
|
|
|
|||
11
go.mod
11
go.mod
|
|
@ -34,6 +34,7 @@ require (
|
|||
github.com/knadh/koanf v1.5.0
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0
|
||||
github.com/mattn/go-sqlite3 v1.14.22
|
||||
github.com/mitchellh/mapstructure v1.5.0
|
||||
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
|
||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
|
||||
github.com/remeh/sizedwaitgroup v1.0.0
|
||||
|
|
@ -50,11 +51,11 @@ require (
|
|||
github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e
|
||||
github.com/zencoder/go-dash/v3 v3.0.2
|
||||
golang.org/x/crypto v0.23.0
|
||||
golang.org/x/image v0.16.0
|
||||
golang.org/x/image v0.18.0
|
||||
golang.org/x/net v0.25.0
|
||||
golang.org/x/sys v0.20.0
|
||||
golang.org/x/term v0.20.0
|
||||
golang.org/x/text v0.15.0
|
||||
golang.org/x/text v0.16.0
|
||||
gopkg.in/guregu/null.v4 v4.0.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
|
|
@ -88,7 +89,6 @@ require (
|
|||
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
|
|
@ -110,8 +110,9 @@ require (
|
|||
github.com/urfave/cli/v2 v2.8.1 // indirect
|
||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
golang.org/x/mod v0.12.0 // indirect
|
||||
golang.org/x/tools v0.13.0 // indirect
|
||||
golang.org/x/mod v0.17.0 // indirect
|
||||
golang.org/x/sync v0.7.0 // indirect
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
|
|
|||
26
go.sum
26
go.sum
|
|
@ -196,9 +196,9 @@ github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E
|
|||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
|
||||
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
||||
|
|
@ -300,8 +300,8 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
|
|
@ -729,8 +729,8 @@ golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMk
|
|||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw=
|
||||
golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs=
|
||||
golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ=
|
||||
golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
|
|
@ -758,8 +758,8 @@ golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
|||
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
|
|
@ -840,8 +840,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
|||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
|
|
@ -952,8 +952,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
|
||||
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
|
|
@ -1019,8 +1019,8 @@ golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
|||
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ type Query {
|
|||
findSavedFilter(id: ID!): SavedFilter
|
||||
findSavedFilters(mode: FilterMode): [SavedFilter!]!
|
||||
findDefaultFilter(mode: FilterMode!): SavedFilter
|
||||
@deprecated(reason: "default filter now stored in UI config")
|
||||
|
||||
"Find a scene by ID or Checksum"
|
||||
findScene(id: ID, checksum: String): Scene
|
||||
|
|
@ -345,6 +346,7 @@ type Mutation {
|
|||
saveFilter(input: SaveFilterInput!): SavedFilter!
|
||||
destroySavedFilter(input: DestroyFilterInput!): Boolean!
|
||||
setDefaultFilter(input: SetDefaultFilterInput!): Boolean!
|
||||
@deprecated(reason: "now uses UI config")
|
||||
|
||||
"Change general configuration options"
|
||||
configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult!
|
||||
|
|
|
|||
|
|
@ -334,6 +334,10 @@ input MovieFilterType {
|
|||
url: StringCriterionInput
|
||||
"Filter to only include movies where performer appears in a scene"
|
||||
performers: MultiCriterionInput
|
||||
"Filter to only include movies with these tags"
|
||||
tags: HierarchicalMultiCriterionInput
|
||||
"Filter by tag count"
|
||||
tag_count: IntCriterionInput
|
||||
"Filter by date"
|
||||
date: DateCriterionInput
|
||||
"Filter by creation time"
|
||||
|
|
@ -358,6 +362,8 @@ input StudioFilterType {
|
|||
parents: MultiCriterionInput
|
||||
"Filter by StashID"
|
||||
stash_id_endpoint: StashIDCriterionInput
|
||||
"Filter to only include studios with these tags"
|
||||
tags: HierarchicalMultiCriterionInput
|
||||
"Filter to only include studios missing this property"
|
||||
is_missing: String
|
||||
# rating expressed as 1-100
|
||||
|
|
@ -370,6 +376,8 @@ input StudioFilterType {
|
|||
image_count: IntCriterionInput
|
||||
"Filter by gallery count"
|
||||
gallery_count: IntCriterionInput
|
||||
"Filter by tag count"
|
||||
tag_count: IntCriterionInput
|
||||
"Filter by url"
|
||||
url: StringCriterionInput
|
||||
"Filter by studio aliases"
|
||||
|
|
@ -494,6 +502,12 @@ input TagFilterType {
|
|||
"Filter by number of performers with this tag"
|
||||
performer_count: IntCriterionInput
|
||||
|
||||
"Filter by number of studios with this tag"
|
||||
studio_count: IntCriterionInput
|
||||
|
||||
"Filter by number of movies with this tag"
|
||||
movie_count: IntCriterionInput
|
||||
|
||||
"Filter by number of markers with this tag"
|
||||
marker_count: IntCriterionInput
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ type Movie {
|
|||
synopsis: String
|
||||
url: String @deprecated(reason: "Use urls")
|
||||
urls: [String!]!
|
||||
tags: [Tag!]!
|
||||
created_at: Time!
|
||||
updated_at: Time!
|
||||
|
||||
|
|
@ -34,6 +35,7 @@ input MovieCreateInput {
|
|||
synopsis: String
|
||||
url: String @deprecated(reason: "Use urls")
|
||||
urls: [String!]
|
||||
tag_ids: [ID!]
|
||||
"This should be a URL or a base64 encoded data URL"
|
||||
front_image: String
|
||||
"This should be a URL or a base64 encoded data URL"
|
||||
|
|
@ -53,6 +55,7 @@ input MovieUpdateInput {
|
|||
synopsis: String
|
||||
url: String @deprecated(reason: "Use urls")
|
||||
urls: [String!]
|
||||
tag_ids: [ID!]
|
||||
"This should be a URL or a base64 encoded data URL"
|
||||
front_image: String
|
||||
"This should be a URL or a base64 encoded data URL"
|
||||
|
|
@ -67,6 +70,7 @@ input BulkMovieUpdateInput {
|
|||
studio_id: ID
|
||||
director: String
|
||||
urls: BulkUpdateStrings
|
||||
tag_ids: BulkUpdateIds
|
||||
}
|
||||
|
||||
input MovieDestroyInput {
|
||||
|
|
|
|||
|
|
@ -16,10 +16,11 @@ type Performer {
|
|||
id: ID!
|
||||
name: String!
|
||||
disambiguation: String
|
||||
url: String
|
||||
url: String @deprecated(reason: "Use urls")
|
||||
urls: [String!]
|
||||
gender: GenderEnum
|
||||
twitter: String
|
||||
instagram: String
|
||||
twitter: String @deprecated(reason: "Use urls")
|
||||
instagram: String @deprecated(reason: "Use urls")
|
||||
birthdate: String
|
||||
ethnicity: String
|
||||
country: String
|
||||
|
|
@ -60,7 +61,8 @@ type Performer {
|
|||
input PerformerCreateInput {
|
||||
name: String!
|
||||
disambiguation: String
|
||||
url: String
|
||||
url: String @deprecated(reason: "Use urls")
|
||||
urls: [String!]
|
||||
gender: GenderEnum
|
||||
birthdate: String
|
||||
ethnicity: String
|
||||
|
|
@ -75,8 +77,8 @@ input PerformerCreateInput {
|
|||
tattoos: String
|
||||
piercings: String
|
||||
alias_list: [String!]
|
||||
twitter: String
|
||||
instagram: String
|
||||
twitter: String @deprecated(reason: "Use urls")
|
||||
instagram: String @deprecated(reason: "Use urls")
|
||||
favorite: Boolean
|
||||
tag_ids: [ID!]
|
||||
"This should be a URL or a base64 encoded data URL"
|
||||
|
|
@ -95,7 +97,8 @@ input PerformerUpdateInput {
|
|||
id: ID!
|
||||
name: String
|
||||
disambiguation: String
|
||||
url: String
|
||||
url: String @deprecated(reason: "Use urls")
|
||||
urls: [String!]
|
||||
gender: GenderEnum
|
||||
birthdate: String
|
||||
ethnicity: String
|
||||
|
|
@ -110,8 +113,8 @@ input PerformerUpdateInput {
|
|||
tattoos: String
|
||||
piercings: String
|
||||
alias_list: [String!]
|
||||
twitter: String
|
||||
instagram: String
|
||||
twitter: String @deprecated(reason: "Use urls")
|
||||
instagram: String @deprecated(reason: "Use urls")
|
||||
favorite: Boolean
|
||||
tag_ids: [ID!]
|
||||
"This should be a URL or a base64 encoded data URL"
|
||||
|
|
@ -135,7 +138,8 @@ input BulkPerformerUpdateInput {
|
|||
clientMutationId: String
|
||||
ids: [ID!]
|
||||
disambiguation: String
|
||||
url: String
|
||||
url: String @deprecated(reason: "Use urls")
|
||||
urls: BulkUpdateStrings
|
||||
gender: GenderEnum
|
||||
birthdate: String
|
||||
ethnicity: String
|
||||
|
|
@ -150,8 +154,8 @@ input BulkPerformerUpdateInput {
|
|||
tattoos: String
|
||||
piercings: String
|
||||
alias_list: BulkUpdateStrings
|
||||
twitter: String
|
||||
instagram: String
|
||||
twitter: String @deprecated(reason: "Use urls")
|
||||
instagram: String @deprecated(reason: "Use urls")
|
||||
favorite: Boolean
|
||||
tag_ids: BulkUpdateIds
|
||||
# rating expressed as 1-100
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ type ScrapedMovie {
|
|||
urls: [String!]
|
||||
synopsis: String
|
||||
studio: ScrapedStudio
|
||||
tags: [ScrapedTag!]
|
||||
|
||||
"This should be a base64 encoded data URL"
|
||||
front_image: String
|
||||
|
|
@ -28,4 +29,5 @@ input ScrapedMovieInput {
|
|||
url: String @deprecated(reason: "use urls")
|
||||
urls: [String!]
|
||||
synopsis: String
|
||||
# not including tags for the input
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,9 +5,10 @@ type ScrapedPerformer {
|
|||
name: String
|
||||
disambiguation: String
|
||||
gender: String
|
||||
url: String
|
||||
twitter: String
|
||||
instagram: String
|
||||
url: String @deprecated(reason: "use urls")
|
||||
urls: [String!]
|
||||
twitter: String @deprecated(reason: "use urls")
|
||||
instagram: String @deprecated(reason: "use urls")
|
||||
birthdate: String
|
||||
ethnicity: String
|
||||
country: String
|
||||
|
|
@ -40,9 +41,10 @@ input ScrapedPerformerInput {
|
|||
name: String
|
||||
disambiguation: String
|
||||
gender: String
|
||||
url: String
|
||||
twitter: String
|
||||
instagram: String
|
||||
url: String @deprecated(reason: "use urls")
|
||||
urls: [String!]
|
||||
twitter: String @deprecated(reason: "use urls")
|
||||
instagram: String @deprecated(reason: "use urls")
|
||||
birthdate: String
|
||||
ethnicity: String
|
||||
country: String
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ type Studio {
|
|||
parent_studio: Studio
|
||||
child_studios: [Studio!]!
|
||||
aliases: [String!]!
|
||||
tags: [Tag!]!
|
||||
ignore_auto_tag: Boolean!
|
||||
|
||||
image_path: String # Resolver
|
||||
|
|
@ -35,6 +36,7 @@ input StudioCreateInput {
|
|||
favorite: Boolean
|
||||
details: String
|
||||
aliases: [String!]
|
||||
tag_ids: [ID!]
|
||||
ignore_auto_tag: Boolean
|
||||
}
|
||||
|
||||
|
|
@ -51,6 +53,7 @@ input StudioUpdateInput {
|
|||
favorite: Boolean
|
||||
details: String
|
||||
aliases: [String!]
|
||||
tag_ids: [ID!]
|
||||
ignore_auto_tag: Boolean
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ type Tag {
|
|||
image_count(depth: Int): Int! # Resolver
|
||||
gallery_count(depth: Int): Int! # Resolver
|
||||
performer_count(depth: Int): Int! # Resolver
|
||||
studio_count(depth: Int): Int! # Resolver
|
||||
movie_count(depth: Int): Int! # Resolver
|
||||
parents: [Tag!]!
|
||||
children: [Tag!]!
|
||||
|
||||
|
|
|
|||
|
|
@ -57,6 +57,20 @@ func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *mod
|
|||
return loaders.From(ctx).StudioByID.Load(*obj.StudioID)
|
||||
}
|
||||
|
||||
func (r movieResolver) Tags(ctx context.Context, obj *models.Movie) (ret []*models.Tag, err error) {
|
||||
if !obj.TagIDs.Loaded() {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadTagIDs(ctx, r.repository.Movie)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var errs []error
|
||||
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
|
||||
return ret, firstError(errs)
|
||||
}
|
||||
|
||||
func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) {
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
|
|
|
|||
|
|
@ -24,6 +24,79 @@ func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer
|
|||
return obj.Aliases.List(), nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) URL(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if !obj.URLs.Loaded() {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadURLs(ctx, r.repository.Performer)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
urls := obj.URLs.List()
|
||||
if len(urls) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return &urls[0], nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Twitter(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if !obj.URLs.Loaded() {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadURLs(ctx, r.repository.Performer)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
urls := obj.URLs.List()
|
||||
|
||||
// find the first twitter url
|
||||
for _, url := range urls {
|
||||
if performer.IsTwitterURL(url) {
|
||||
u := url
|
||||
return &u, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Instagram(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if !obj.URLs.Loaded() {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadURLs(ctx, r.repository.Performer)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
urls := obj.URLs.List()
|
||||
|
||||
// find the first instagram url
|
||||
for _, url := range urls {
|
||||
if performer.IsInstagramURL(url) {
|
||||
u := url
|
||||
return &u, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Urls(ctx context.Context, obj *models.Performer) ([]string, error) {
|
||||
if !obj.URLs.Loaded() {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadURLs(ctx, r.repository.Performer)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return obj.URLs.List(), nil
|
||||
}
|
||||
|
||||
func (r *performerResolver) Height(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||
if obj.Height != nil {
|
||||
ret := strconv.Itoa(*obj.Height)
|
||||
|
|
|
|||
|
|
@ -40,6 +40,20 @@ func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) ([]str
|
|||
return obj.Aliases.List(), nil
|
||||
}
|
||||
|
||||
func (r *studioResolver) Tags(ctx context.Context, obj *models.Studio) (ret []*models.Tag, err error) {
|
||||
if !obj.TagIDs.Loaded() {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
return obj.LoadTagIDs(ctx, r.repository.Studio)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var errs []error
|
||||
ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List())
|
||||
return ret, firstError(errs)
|
||||
}
|
||||
|
||||
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio, depth *int) (ret int, err error) {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
ret, err = scene.CountByStudioID(ctx, r.repository.Scene, obj.ID, depth)
|
||||
|
|
|
|||
|
|
@ -8,8 +8,10 @@ import (
|
|||
"github.com/stashapp/stash/pkg/gallery"
|
||||
"github.com/stashapp/stash/pkg/image"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/movie"
|
||||
"github.com/stashapp/stash/pkg/performer"
|
||||
"github.com/stashapp/stash/pkg/scene"
|
||||
"github.com/stashapp/stash/pkg/studio"
|
||||
)
|
||||
|
||||
func (r *tagResolver) Parents(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) {
|
||||
|
|
@ -107,6 +109,28 @@ func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag, depth
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) StudioCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
ret, err = studio.CountByTagID(ctx, r.repository.Studio, obj.ID, depth)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) MovieCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
ret, err = movie.CountByTagID(ctx, r.repository.Movie, obj.ID, depth)
|
||||
return err
|
||||
}); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, error) {
|
||||
var hasImage bool
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
|
|
|
|||
|
|
@ -50,6 +50,11 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp
|
|||
return nil, fmt.Errorf("converting studio id: %w", err)
|
||||
}
|
||||
|
||||
newMovie.TagIDs, err = translator.relatedIds(input.TagIds)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||
}
|
||||
|
||||
if input.Urls != nil {
|
||||
newMovie.URLs = models.NewRelatedStrings(input.Urls)
|
||||
} else if input.URL != nil {
|
||||
|
|
@ -140,6 +145,11 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp
|
|||
return nil, fmt.Errorf("converting studio id: %w", err)
|
||||
}
|
||||
|
||||
updatedMovie.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||
}
|
||||
|
||||
updatedMovie.URLs = translator.optionalURLs(input.Urls, input.URL)
|
||||
|
||||
var frontimageData []byte
|
||||
|
|
@ -211,6 +221,12 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieU
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("converting studio id: %w", err)
|
||||
}
|
||||
|
||||
updatedMovie.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||
}
|
||||
|
||||
updatedMovie.URLs = translator.optionalURLsBulk(input.Urls, nil)
|
||||
|
||||
ret := []*models.Movie{}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,11 @@ import (
|
|||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
const (
|
||||
twitterURL = "https://twitter.com"
|
||||
instagramURL = "https://instagram.com"
|
||||
)
|
||||
|
||||
// used to refetch performer after hooks run
|
||||
func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *models.Performer, err error) {
|
||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||
|
|
@ -35,7 +40,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
|||
newPerformer.Name = input.Name
|
||||
newPerformer.Disambiguation = translator.string(input.Disambiguation)
|
||||
newPerformer.Aliases = models.NewRelatedStrings(input.AliasList)
|
||||
newPerformer.URL = translator.string(input.URL)
|
||||
newPerformer.Gender = input.Gender
|
||||
newPerformer.Ethnicity = translator.string(input.Ethnicity)
|
||||
newPerformer.Country = translator.string(input.Country)
|
||||
|
|
@ -47,8 +51,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
|||
newPerformer.CareerLength = translator.string(input.CareerLength)
|
||||
newPerformer.Tattoos = translator.string(input.Tattoos)
|
||||
newPerformer.Piercings = translator.string(input.Piercings)
|
||||
newPerformer.Twitter = translator.string(input.Twitter)
|
||||
newPerformer.Instagram = translator.string(input.Instagram)
|
||||
newPerformer.Favorite = translator.bool(input.Favorite)
|
||||
newPerformer.Rating = input.Rating100
|
||||
newPerformer.Details = translator.string(input.Details)
|
||||
|
|
@ -58,6 +60,21 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
|||
newPerformer.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
|
||||
newPerformer.StashIDs = models.NewRelatedStashIDs(input.StashIds)
|
||||
|
||||
newPerformer.URLs = models.NewRelatedStrings([]string{})
|
||||
if input.URL != nil {
|
||||
newPerformer.URLs.Add(*input.URL)
|
||||
}
|
||||
if input.Twitter != nil {
|
||||
newPerformer.URLs.Add(utils.URLFromHandle(*input.Twitter, twitterURL))
|
||||
}
|
||||
if input.Instagram != nil {
|
||||
newPerformer.URLs.Add(utils.URLFromHandle(*input.Instagram, instagramURL))
|
||||
}
|
||||
|
||||
if input.Urls != nil {
|
||||
newPerformer.URLs.Add(input.Urls...)
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
newPerformer.Birthdate, err = translator.datePtr(input.Birthdate)
|
||||
|
|
@ -112,6 +129,96 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
|
|||
return r.getPerformer(ctx, newPerformer.ID)
|
||||
}
|
||||
|
||||
func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) error {
|
||||
// ensure url/twitter/instagram are not included in the input
|
||||
if translator.hasField("url") {
|
||||
return fmt.Errorf("url field must not be included if urls is included")
|
||||
}
|
||||
if translator.hasField("twitter") {
|
||||
return fmt.Errorf("twitter field must not be included if urls is included")
|
||||
}
|
||||
if translator.hasField("instagram") {
|
||||
return fmt.Errorf("instagram field must not be included if urls is included")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURL, legacyTwitter, legacyInstagram models.OptionalString, updatedPerformer *models.PerformerPartial) error {
|
||||
qb := r.repository.Performer
|
||||
|
||||
// we need to be careful with URL/Twitter/Instagram
|
||||
// treat URL as replacing the first non-Twitter/Instagram URL in the list
|
||||
// twitter should replace any existing twitter URL
|
||||
// instagram should replace any existing instagram URL
|
||||
p, err := qb.Find(ctx, performerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := p.LoadURLs(ctx, qb); err != nil {
|
||||
return fmt.Errorf("loading performer URLs: %w", err)
|
||||
}
|
||||
|
||||
existingURLs := p.URLs.List()
|
||||
|
||||
// performer partial URLs should be empty
|
||||
if legacyURL.Set {
|
||||
replaced := false
|
||||
for i, url := range existingURLs {
|
||||
if !performer.IsTwitterURL(url) && !performer.IsInstagramURL(url) {
|
||||
existingURLs[i] = legacyURL.Value
|
||||
replaced = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !replaced {
|
||||
existingURLs = append(existingURLs, legacyURL.Value)
|
||||
}
|
||||
}
|
||||
|
||||
if legacyTwitter.Set {
|
||||
value := utils.URLFromHandle(legacyTwitter.Value, twitterURL)
|
||||
found := false
|
||||
// find and replace the first twitter URL
|
||||
for i, url := range existingURLs {
|
||||
if performer.IsTwitterURL(url) {
|
||||
existingURLs[i] = value
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
existingURLs = append(existingURLs, value)
|
||||
}
|
||||
}
|
||||
if legacyInstagram.Set {
|
||||
found := false
|
||||
value := utils.URLFromHandle(legacyInstagram.Value, instagramURL)
|
||||
// find and replace the first instagram URL
|
||||
for i, url := range existingURLs {
|
||||
if performer.IsInstagramURL(url) {
|
||||
existingURLs[i] = value
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
existingURLs = append(existingURLs, value)
|
||||
}
|
||||
}
|
||||
|
||||
updatedPerformer.URLs = &models.UpdateStrings{
|
||||
Values: existingURLs,
|
||||
Mode: models.RelationshipUpdateModeSet,
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
|
||||
performerID, err := strconv.Atoi(input.ID)
|
||||
if err != nil {
|
||||
|
|
@ -127,7 +234,6 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
|||
|
||||
updatedPerformer.Name = translator.optionalString(input.Name, "name")
|
||||
updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation")
|
||||
updatedPerformer.URL = translator.optionalString(input.URL, "url")
|
||||
updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender")
|
||||
updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity")
|
||||
updatedPerformer.Country = translator.optionalString(input.Country, "country")
|
||||
|
|
@ -139,8 +245,6 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
|||
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
|
||||
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
|
||||
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")
|
||||
updatedPerformer.Twitter = translator.optionalString(input.Twitter, "twitter")
|
||||
updatedPerformer.Instagram = translator.optionalString(input.Instagram, "instagram")
|
||||
updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite")
|
||||
updatedPerformer.Rating = translator.optionalInt(input.Rating100, "rating100")
|
||||
updatedPerformer.Details = translator.optionalString(input.Details, "details")
|
||||
|
|
@ -149,6 +253,19 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
|||
updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
|
||||
updatedPerformer.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids")
|
||||
|
||||
if translator.hasField("urls") {
|
||||
// ensure url/twitter/instagram are not included in the input
|
||||
if err := r.validateNoLegacyURLs(translator); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedPerformer.URLs = translator.updateStrings(input.Urls, "urls")
|
||||
}
|
||||
|
||||
legacyURL := translator.optionalString(input.URL, "url")
|
||||
legacyTwitter := translator.optionalString(input.Twitter, "twitter")
|
||||
legacyInstagram := translator.optionalString(input.Instagram, "instagram")
|
||||
|
||||
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("converting birthdate: %w", err)
|
||||
|
|
@ -186,6 +303,12 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
|
|||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||
qb := r.repository.Performer
|
||||
|
||||
if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set {
|
||||
if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -225,7 +348,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
|
|||
updatedPerformer := models.NewPerformerPartial()
|
||||
|
||||
updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation")
|
||||
updatedPerformer.URL = translator.optionalString(input.URL, "url")
|
||||
|
||||
updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender")
|
||||
updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity")
|
||||
updatedPerformer.Country = translator.optionalString(input.Country, "country")
|
||||
|
|
@ -237,8 +360,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
|
|||
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
|
||||
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
|
||||
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")
|
||||
updatedPerformer.Twitter = translator.optionalString(input.Twitter, "twitter")
|
||||
updatedPerformer.Instagram = translator.optionalString(input.Instagram, "instagram")
|
||||
|
||||
updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite")
|
||||
updatedPerformer.Rating = translator.optionalInt(input.Rating100, "rating100")
|
||||
updatedPerformer.Details = translator.optionalString(input.Details, "details")
|
||||
|
|
@ -246,6 +368,19 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
|
|||
updatedPerformer.Weight = translator.optionalInt(input.Weight, "weight")
|
||||
updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
|
||||
|
||||
if translator.hasField("urls") {
|
||||
// ensure url/twitter/instagram are not included in the input
|
||||
if err := r.validateNoLegacyURLs(translator); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updatedPerformer.URLs = translator.updateStringsBulk(input.Urls, "urls")
|
||||
}
|
||||
|
||||
legacyURL := translator.optionalString(input.URL, "url")
|
||||
legacyTwitter := translator.optionalString(input.Twitter, "twitter")
|
||||
legacyInstagram := translator.optionalString(input.Instagram, "instagram")
|
||||
|
||||
updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("converting birthdate: %w", err)
|
||||
|
|
@ -277,6 +412,12 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
|
|||
qb := r.repository.Performer
|
||||
|
||||
for _, performerID := range performerIDs {
|
||||
if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set {
|
||||
if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,10 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput) (ret *models.SavedFilter, err error) {
|
||||
|
|
@ -67,30 +70,48 @@ func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input Destroy
|
|||
}
|
||||
|
||||
func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaultFilterInput) (bool, error) {
|
||||
if err := r.withTxn(ctx, func(ctx context.Context) error {
|
||||
qb := r.repository.SavedFilter
|
||||
// deprecated - write to the config in the meantime
|
||||
config := config.GetInstance()
|
||||
|
||||
if input.FindFilter == nil && input.ObjectFilter == nil && input.UIOptions == nil {
|
||||
// clearing
|
||||
def, err := qb.FindDefault(ctx, input.Mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uiConfig := config.GetUIConfiguration()
|
||||
if uiConfig == nil {
|
||||
uiConfig = make(map[string]interface{})
|
||||
}
|
||||
|
||||
if def != nil {
|
||||
return qb.Destroy(ctx, def.ID)
|
||||
}
|
||||
m := utils.NestedMap(uiConfig)
|
||||
|
||||
return nil
|
||||
if input.FindFilter == nil && input.ObjectFilter == nil && input.UIOptions == nil {
|
||||
// clearing
|
||||
m.Delete("defaultFilters." + strings.ToLower(input.Mode.String()))
|
||||
config.SetUIConfiguration(m)
|
||||
|
||||
if err := config.Write(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return qb.SetDefault(ctx, &models.SavedFilter{
|
||||
Mode: input.Mode,
|
||||
FindFilter: input.FindFilter,
|
||||
ObjectFilter: input.ObjectFilter,
|
||||
UIOptions: input.UIOptions,
|
||||
})
|
||||
}); err != nil {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
subMap := make(map[string]interface{})
|
||||
d, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{
|
||||
TagName: "json",
|
||||
WeaklyTypedInput: true,
|
||||
Result: &subMap,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if err := d.Decode(input); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
m.Set("defaultFilters."+strings.ToLower(input.Mode.String()), subMap)
|
||||
|
||||
config.SetUIConfiguration(m)
|
||||
|
||||
if err := config.Write(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -48,6 +48,11 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
|
|||
return nil, fmt.Errorf("converting parent id: %w", err)
|
||||
}
|
||||
|
||||
newStudio.TagIDs, err = translator.relatedIds(input.TagIds)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||
}
|
||||
|
||||
// Process the base 64 encoded image string
|
||||
var imageData []byte
|
||||
if input.Image != nil {
|
||||
|
|
@ -114,6 +119,11 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
|
|||
return nil, fmt.Errorf("converting parent id: %w", err)
|
||||
}
|
||||
|
||||
updatedStudio.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("converting tag ids: %w", err)
|
||||
}
|
||||
|
||||
// Process the base 64 encoded image string
|
||||
var imageData []byte
|
||||
imageIncluded := translator.hasField("image")
|
||||
|
|
|
|||
|
|
@ -3,8 +3,12 @@ package api
|
|||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
func (r *queryResolver) FindSavedFilter(ctx context.Context, id string) (ret *models.SavedFilter, err error) {
|
||||
|
|
@ -37,11 +41,35 @@ func (r *queryResolver) FindSavedFilters(ctx context.Context, mode *models.Filte
|
|||
}
|
||||
|
||||
func (r *queryResolver) FindDefaultFilter(ctx context.Context, mode models.FilterMode) (ret *models.SavedFilter, err error) {
|
||||
if err := r.withReadTxn(ctx, func(ctx context.Context) error {
|
||||
ret, err = r.repository.SavedFilter.FindDefault(ctx, mode)
|
||||
return err
|
||||
}); err != nil {
|
||||
// deprecated - read from the config in the meantime
|
||||
config := config.GetInstance()
|
||||
|
||||
uiConfig := config.GetUIConfiguration()
|
||||
if uiConfig == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
m := utils.NestedMap(uiConfig)
|
||||
filterRaw, _ := m.Get("defaultFilters." + strings.ToLower(mode.String()))
|
||||
|
||||
if filterRaw == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
ret = &models.SavedFilter{}
|
||||
d, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{
|
||||
TagName: "json",
|
||||
WeaklyTypedInput: true,
|
||||
Result: ret,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, err
|
||||
|
||||
if err := d.Decode(filterRaw); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -144,6 +144,23 @@ func filterPerformerTags(p []*models.ScrapedPerformer) {
|
|||
}
|
||||
}
|
||||
|
||||
// filterMovieTags removes tags matching excluded tag patterns from the provided scraped movies
|
||||
func filterMovieTags(p []*models.ScrapedMovie) {
|
||||
excludeRegexps := compileRegexps(manager.GetInstance().Config.GetScraperExcludeTagPatterns())
|
||||
|
||||
var ignoredTags []string
|
||||
|
||||
for _, s := range p {
|
||||
var ignored []string
|
||||
s.Tags, ignored = filterTags(excludeRegexps, s.Tags)
|
||||
ignoredTags = sliceutil.AppendUniques(ignoredTags, ignored)
|
||||
}
|
||||
|
||||
if len(ignoredTags) > 0 {
|
||||
logger.Debugf("Scraping ignored tags: %s", strings.Join(ignoredTags, ", "))
|
||||
}
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scraper.ScrapedScene, error) {
|
||||
content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeScene)
|
||||
if err != nil {
|
||||
|
|
@ -186,7 +203,14 @@ func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return marshalScrapedMovie(content)
|
||||
ret, err := marshalScrapedMovie(content)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filterMovieTags([]*models.ScrapedMovie{ret})
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.Source, input ScrapeSingleSceneInput) ([]*scraper.ScrapedScene, error) {
|
||||
|
|
|
|||
|
|
@ -46,17 +46,17 @@ func createMissingStudio(ctx context.Context, endpoint string, w models.StudioRe
|
|||
return nil, err
|
||||
}
|
||||
|
||||
studioPartial := s.Parent.ToPartial(s.Parent.StoredID, endpoint, nil, existingStashIDs)
|
||||
studioPartial := s.Parent.ToPartial(*s.Parent.StoredID, endpoint, nil, existingStashIDs)
|
||||
parentImage, err := s.Parent.GetImage(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := studio.ValidateModify(ctx, *studioPartial, w); err != nil {
|
||||
if err := studio.ValidateModify(ctx, studioPartial, w); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = w.UpdatePartial(ctx, *studioPartial)
|
||||
_, err = w.UpdatePartial(ctx, studioPartial)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,19 +23,27 @@ type MigrateJob struct {
|
|||
Database *sqlite.Database
|
||||
}
|
||||
|
||||
type databaseSchemaInfo struct {
|
||||
CurrentSchemaVersion uint
|
||||
RequiredSchemaVersion uint
|
||||
StepsRequired uint
|
||||
}
|
||||
|
||||
func (s *MigrateJob) Execute(ctx context.Context, progress *job.Progress) error {
|
||||
required, err := s.required()
|
||||
schemaInfo, err := s.required()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if required == 0 {
|
||||
if schemaInfo.StepsRequired == 0 {
|
||||
logger.Infof("database is already at the latest schema version")
|
||||
return nil
|
||||
}
|
||||
|
||||
logger.Infof("Migrating database from %d to %d", schemaInfo.CurrentSchemaVersion, schemaInfo.RequiredSchemaVersion)
|
||||
|
||||
// set the number of tasks = required steps + optimise
|
||||
progress.SetTotal(int(required + 1))
|
||||
progress.SetTotal(int(schemaInfo.StepsRequired + 1))
|
||||
|
||||
database := s.Database
|
||||
|
||||
|
|
@ -79,28 +87,31 @@ func (s *MigrateJob) Execute(ctx context.Context, progress *job.Progress) error
|
|||
}
|
||||
}
|
||||
|
||||
logger.Infof("Database migration complete")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MigrateJob) required() (uint, error) {
|
||||
func (s *MigrateJob) required() (ret databaseSchemaInfo, err error) {
|
||||
database := s.Database
|
||||
|
||||
m, err := sqlite.NewMigrator(database)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return
|
||||
}
|
||||
|
||||
defer m.Close()
|
||||
|
||||
currentSchemaVersion := m.CurrentSchemaVersion()
|
||||
targetSchemaVersion := m.RequiredSchemaVersion()
|
||||
ret.CurrentSchemaVersion = m.CurrentSchemaVersion()
|
||||
ret.RequiredSchemaVersion = m.RequiredSchemaVersion()
|
||||
|
||||
if targetSchemaVersion < currentSchemaVersion {
|
||||
if ret.RequiredSchemaVersion < ret.CurrentSchemaVersion {
|
||||
// shouldn't happen
|
||||
return 0, nil
|
||||
return
|
||||
}
|
||||
|
||||
return targetSchemaVersion - currentSchemaVersion, nil
|
||||
ret.StepsRequired = ret.RequiredSchemaVersion - ret.CurrentSchemaVersion
|
||||
return
|
||||
}
|
||||
|
||||
func (s *MigrateJob) runMigrations(ctx context.Context, progress *job.Progress) error {
|
||||
|
|
|
|||
|
|
@ -982,6 +982,7 @@ func (t *ExportTask) ExportStudios(ctx context.Context, workers int) {
|
|||
func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Studio) {
|
||||
defer wg.Done()
|
||||
|
||||
r := t.repository
|
||||
studioReader := t.repository.Studio
|
||||
|
||||
for s := range jobChan {
|
||||
|
|
@ -992,6 +993,18 @@ func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobCh
|
|||
continue
|
||||
}
|
||||
|
||||
tags, err := r.Tag.FindByStudioID(ctx, s.ID)
|
||||
if err != nil {
|
||||
logger.Errorf("[studios] <%s> error getting studio tags: %s", s.Name, err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
newStudioJSON.Tags = tag.GetNames(tags)
|
||||
|
||||
if t.includeDependencies {
|
||||
t.tags.IDs = sliceutil.AppendUniques(t.tags.IDs, tag.GetIDs(tags))
|
||||
}
|
||||
|
||||
fn := newStudioJSON.Filename()
|
||||
|
||||
if err := t.json.saveStudio(fn, newStudioJSON); err != nil {
|
||||
|
|
@ -1107,6 +1120,7 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
|
|||
r := t.repository
|
||||
movieReader := r.Movie
|
||||
studioReader := r.Studio
|
||||
tagReader := r.Tag
|
||||
|
||||
for m := range jobChan {
|
||||
if err := m.LoadURLs(ctx, r.Movie); err != nil {
|
||||
|
|
@ -1121,6 +1135,14 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
|
|||
continue
|
||||
}
|
||||
|
||||
tags, err := tagReader.FindByMovieID(ctx, m.ID)
|
||||
if err != nil {
|
||||
logger.Errorf("[movies] <%s> error getting image tag names: %v", m.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
newMovieJSON.Tags = tag.GetNames(tags)
|
||||
|
||||
if t.includeDependencies {
|
||||
if m.StudioID != nil {
|
||||
t.studios.IDs = sliceutil.AppendUnique(t.studios.IDs, *m.StudioID)
|
||||
|
|
|
|||
|
|
@ -292,8 +292,11 @@ func (t *ImportTask) ImportStudios(ctx context.Context) {
|
|||
}
|
||||
|
||||
func (t *ImportTask) importStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio) error {
|
||||
r := t.repository
|
||||
|
||||
importer := &studio.Importer{
|
||||
ReaderWriter: t.repository.Studio,
|
||||
TagWriter: r.Tag,
|
||||
Input: *studioJSON,
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
}
|
||||
|
|
@ -351,6 +354,7 @@ func (t *ImportTask) ImportMovies(ctx context.Context) {
|
|||
movieImporter := &movie.Importer{
|
||||
ReaderWriter: r.Movie,
|
||||
StudioWriter: r.Studio,
|
||||
TagWriter: r.Tag,
|
||||
Input: *movieJSON,
|
||||
MissingRefBehaviour: t.MissingRefBehaviour,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -311,13 +311,13 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode
|
|||
return err
|
||||
}
|
||||
|
||||
partial := s.ToPartial(s.StoredID, t.box.Endpoint, excluded, existingStashIDs)
|
||||
partial := s.ToPartial(*s.StoredID, t.box.Endpoint, excluded, existingStashIDs)
|
||||
|
||||
if err := studio.ValidateModify(ctx, *partial, qb); err != nil {
|
||||
if err := studio.ValidateModify(ctx, partial, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := qb.UpdatePartial(ctx, *partial); err != nil {
|
||||
if _, err := qb.UpdatePartial(ctx, partial); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
@ -435,13 +435,13 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent *
|
|||
return err
|
||||
}
|
||||
|
||||
partial := parent.ToPartial(parent.StoredID, t.box.Endpoint, excluded, existingStashIDs)
|
||||
partial := parent.ToPartial(*parent.StoredID, t.box.Endpoint, excluded, existingStashIDs)
|
||||
|
||||
if err := studio.ValidateModify(ctx, *partial, qb); err != nil {
|
||||
if err := studio.ValidateModify(ctx, partial, qb); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := qb.UpdatePartial(ctx, *partial); err != nil {
|
||||
if _, err := qb.UpdatePartial(ctx, partial); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ var validForHevc = []Container{Mp4}
|
|||
|
||||
var validAudioForMkv = []ProbeAudioCodec{Aac, Mp3, Vorbis, Opus}
|
||||
var validAudioForWebm = []ProbeAudioCodec{Vorbis, Opus}
|
||||
var validAudioForMp4 = []ProbeAudioCodec{Aac, Mp3}
|
||||
var validAudioForMp4 = []ProbeAudioCodec{Aac, Mp3, Opus}
|
||||
|
||||
var (
|
||||
// ErrUnsupportedVideoCodecForBrowser is returned when the video codec is not supported for browser streaming.
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"math"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
|
|
@ -25,7 +27,7 @@ var (
|
|||
VideoCodecVVPX VideoCodec = "vp8_vaapi"
|
||||
)
|
||||
|
||||
const minHeight int = 256
|
||||
const minHeight int = 480
|
||||
|
||||
// Tests all (given) hardware codec's
|
||||
func (f *FFMpeg) InitHWSupport(ctx context.Context) {
|
||||
|
|
@ -38,17 +40,19 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) {
|
|||
VideoCodecR264,
|
||||
VideoCodecIVP9,
|
||||
VideoCodecVVP9,
|
||||
VideoCodecM264,
|
||||
} {
|
||||
var args Args
|
||||
args = append(args, "-hide_banner")
|
||||
args = args.LogLevel(LogLevelWarning)
|
||||
args = f.hwDeviceInit(args, codec, false)
|
||||
args = args.Format("lavfi")
|
||||
args = args.Input(fmt.Sprintf("color=c=red:s=%dx%d", 1280, 720))
|
||||
vFile := &models.VideoFile{Width: 1280, Height: 720}
|
||||
args = args.Input(fmt.Sprintf("color=c=red:s=%dx%d", vFile.Width, vFile.Height))
|
||||
args = args.Duration(0.1)
|
||||
|
||||
// Test scaling
|
||||
videoFilter := f.hwMaxResFilter(codec, 1280, 720, minHeight, false)
|
||||
videoFilter := f.hwMaxResFilter(codec, vFile, minHeight, false)
|
||||
args = append(args, CodecInit(codec)...)
|
||||
args = args.VideoFilter(videoFilter)
|
||||
|
||||
|
|
@ -93,9 +97,9 @@ func (f *FFMpeg) hwCanFullHWTranscode(ctx context.Context, codec VideoCodec, vf
|
|||
args = args.XError()
|
||||
args = f.hwDeviceInit(args, codec, true)
|
||||
args = args.Input(vf.Path)
|
||||
args = args.Duration(0.1)
|
||||
args = args.Duration(1)
|
||||
|
||||
videoFilter := f.hwMaxResFilter(codec, vf.Width, vf.Height, reqHeight, true)
|
||||
videoFilter := f.hwMaxResFilter(codec, vf, reqHeight, true)
|
||||
args = append(args, CodecInit(codec)...)
|
||||
args = args.VideoFilter(videoFilter)
|
||||
|
||||
|
|
@ -128,12 +132,12 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
|
|||
args = append(args, "-hwaccel_device")
|
||||
args = append(args, "0")
|
||||
if fullhw {
|
||||
args = append(args, "-threads")
|
||||
args = append(args, "1")
|
||||
args = append(args, "-hwaccel")
|
||||
args = append(args, "cuda")
|
||||
args = append(args, "-hwaccel_output_format")
|
||||
args = append(args, "cuda")
|
||||
args = append(args, "-extra_hw_frames")
|
||||
args = append(args, "5")
|
||||
}
|
||||
case VideoCodecV264,
|
||||
VideoCodecVVP9:
|
||||
|
|
@ -158,6 +162,16 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args {
|
|||
args = append(args, "-filter_hw_device")
|
||||
args = append(args, "hw")
|
||||
}
|
||||
case VideoCodecM264:
|
||||
if fullhw {
|
||||
args = append(args, "-hwaccel")
|
||||
args = append(args, "videotoolbox")
|
||||
args = append(args, "-hwaccel_output_format")
|
||||
args = append(args, "videotoolbox_vld")
|
||||
} else {
|
||||
args = append(args, "-init_hw_device")
|
||||
args = append(args, "videotoolbox=vt")
|
||||
}
|
||||
}
|
||||
|
||||
return args
|
||||
|
|
@ -175,7 +189,7 @@ func (f *FFMpeg) hwFilterInit(toCodec VideoCodec, fullhw bool) VideoFilter {
|
|||
}
|
||||
case VideoCodecN264:
|
||||
if !fullhw {
|
||||
videoFilter = videoFilter.Append("format=nv12")
|
||||
videoFilter = videoFilter.Append("format=yuv420p")
|
||||
videoFilter = videoFilter.Append("hwupload_cuda")
|
||||
}
|
||||
case VideoCodecI264,
|
||||
|
|
@ -184,80 +198,146 @@ func (f *FFMpeg) hwFilterInit(toCodec VideoCodec, fullhw bool) VideoFilter {
|
|||
videoFilter = videoFilter.Append("hwupload=extra_hw_frames=64")
|
||||
videoFilter = videoFilter.Append("format=qsv")
|
||||
}
|
||||
case VideoCodecM264:
|
||||
if !fullhw {
|
||||
videoFilter = videoFilter.Append("format=nv12")
|
||||
videoFilter = videoFilter.Append("hwupload")
|
||||
}
|
||||
}
|
||||
|
||||
return videoFilter
|
||||
}
|
||||
|
||||
var scaler_re = regexp.MustCompile(`scale=(?P<value>[-\d]+:[-\d]+)`)
|
||||
var scaler_re = regexp.MustCompile(`scale=(?P<value>([-\d]+):([-\d]+))`)
|
||||
|
||||
func templateReplaceScale(input string, template string, match []int, minusonehack bool) string {
|
||||
func templateReplaceScale(input string, template string, match []int, vf *models.VideoFile, minusonehack bool) string {
|
||||
result := []byte{}
|
||||
|
||||
res := string(scaler_re.ExpandString(result, template, input, match))
|
||||
|
||||
// BUG: [scale_qsv]: Size values less than -1 are not acceptable.
|
||||
// Fix: Replace all instances of -2 with -1 in a scale operation
|
||||
if minusonehack {
|
||||
res = strings.ReplaceAll(res, "-2", "-1")
|
||||
// Parse width and height
|
||||
w, err := strconv.Atoi(input[match[4]:match[5]])
|
||||
if err != nil {
|
||||
logger.Error("failed to parse width")
|
||||
return input
|
||||
}
|
||||
h, err := strconv.Atoi(input[match[6]:match[7]])
|
||||
if err != nil {
|
||||
logger.Error("failed to parse height")
|
||||
return input
|
||||
}
|
||||
|
||||
// Calculate ratio
|
||||
ratio := float64(vf.Width) / float64(vf.Height)
|
||||
if w < 0 {
|
||||
w = int(math.Round(float64(h) * ratio))
|
||||
} else if h < 0 {
|
||||
h = int(math.Round(float64(w) / ratio))
|
||||
}
|
||||
|
||||
// Fix not divisible by 2 errors
|
||||
if w%2 != 0 {
|
||||
w++
|
||||
}
|
||||
if h%2 != 0 {
|
||||
h++
|
||||
}
|
||||
|
||||
template = strings.ReplaceAll(template, "$value", fmt.Sprintf("%d:%d", w, h))
|
||||
}
|
||||
|
||||
res := string(scaler_re.ExpandString(result, template, input, match))
|
||||
|
||||
matchStart := match[0]
|
||||
matchEnd := match[1]
|
||||
|
||||
return input[0:matchStart] + res + input[matchEnd:]
|
||||
}
|
||||
|
||||
// Replace video filter scaling with hardware scaling for full hardware transcoding
|
||||
func (f *FFMpeg) hwCodecFilter(args VideoFilter, codec VideoCodec, fullhw bool) VideoFilter {
|
||||
// Replace video filter scaling with hardware scaling for full hardware transcoding (also fixes the format)
|
||||
func (f *FFMpeg) hwCodecFilter(args VideoFilter, codec VideoCodec, vf *models.VideoFile, fullhw bool) VideoFilter {
|
||||
sargs := string(args)
|
||||
|
||||
match := scaler_re.FindStringSubmatchIndex(sargs)
|
||||
if match == nil {
|
||||
return args
|
||||
return f.hwApplyFullHWFilter(args, codec, fullhw)
|
||||
}
|
||||
|
||||
return f.hwApplyScaleTemplate(sargs, codec, match, vf, fullhw)
|
||||
}
|
||||
|
||||
// Apply format switching if applicable
|
||||
func (f *FFMpeg) hwApplyFullHWFilter(args VideoFilter, codec VideoCodec, fullhw bool) VideoFilter {
|
||||
switch codec {
|
||||
case VideoCodecN264:
|
||||
template := "scale_cuda=$value"
|
||||
// In 10bit inputs you might get an error like "10 bit encode not supported"
|
||||
if fullhw && f.version.major >= 5 {
|
||||
template += ":format=nv12"
|
||||
if fullhw && f.version.Gteq(FFMpegVersion{major: 5}) { // Added in FFMpeg 5
|
||||
args = args.Append("scale_cuda=format=yuv420p")
|
||||
}
|
||||
case VideoCodecV264, VideoCodecVVP9:
|
||||
if fullhw && f.version.Gteq(FFMpegVersion{major: 3, minor: 1}) { // Added in FFMpeg 3.1
|
||||
args = args.Append("scale_vaapi=format=nv12")
|
||||
}
|
||||
case VideoCodecI264, VideoCodecIVP9:
|
||||
if fullhw && f.version.Gteq(FFMpegVersion{major: 3, minor: 3}) { // Added in FFMpeg 3.3
|
||||
args = args.Append("scale_qsv=format=nv12")
|
||||
}
|
||||
args = VideoFilter(templateReplaceScale(sargs, template, match, false))
|
||||
case VideoCodecV264,
|
||||
VideoCodecVVP9:
|
||||
template := "scale_vaapi=$value"
|
||||
args = VideoFilter(templateReplaceScale(sargs, template, match, false))
|
||||
case VideoCodecI264,
|
||||
VideoCodecIVP9:
|
||||
template := "scale_qsv=$value"
|
||||
args = VideoFilter(templateReplaceScale(sargs, template, match, true))
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// Switch scaler
|
||||
func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []int, vf *models.VideoFile, fullhw bool) VideoFilter {
|
||||
var template string
|
||||
|
||||
switch codec {
|
||||
case VideoCodecN264:
|
||||
template = "scale_cuda=$value"
|
||||
if fullhw && f.version.Gteq(FFMpegVersion{major: 5}) { // Added in FFMpeg 5
|
||||
template += ":format=yuv420p"
|
||||
}
|
||||
case VideoCodecV264, VideoCodecVVP9:
|
||||
template = "scale_vaapi=$value"
|
||||
if fullhw && f.version.Gteq(FFMpegVersion{major: 3, minor: 1}) { // Added in FFMpeg 3.1
|
||||
template += ":format=nv12"
|
||||
}
|
||||
case VideoCodecI264, VideoCodecIVP9:
|
||||
template = "scale_qsv=$value"
|
||||
if fullhw && f.version.Gteq(FFMpegVersion{major: 3, minor: 3}) { // Added in FFMpeg 3.3
|
||||
template += ":format=nv12"
|
||||
}
|
||||
case VideoCodecM264:
|
||||
template = "scale_vt=$value"
|
||||
default:
|
||||
return VideoFilter(sargs)
|
||||
}
|
||||
|
||||
// BUG: [scale_qsv]: Size values less than -1 are not acceptable.
|
||||
isIntel := codec == VideoCodecI264 || codec == VideoCodecIVP9
|
||||
// BUG: scale_vt doesn't call ff_scale_adjust_dimensions, thus cant accept negative size values
|
||||
isApple := codec == VideoCodecM264
|
||||
return VideoFilter(templateReplaceScale(sargs, template, match, vf, isIntel || isApple))
|
||||
}
|
||||
|
||||
// Returns the max resolution for a given codec, or a default
|
||||
func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec, dW int, dH int) (int, int) {
|
||||
func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec) (int, int) {
|
||||
switch codec {
|
||||
case VideoCodecN264,
|
||||
VideoCodecI264:
|
||||
return 4096, 4096
|
||||
}
|
||||
|
||||
return dW, dH
|
||||
return 0, 0
|
||||
}
|
||||
|
||||
// Return a maxres filter
|
||||
func (f *FFMpeg) hwMaxResFilter(toCodec VideoCodec, width int, height int, reqHeight int, fullhw bool) VideoFilter {
|
||||
if width == 0 || height == 0 {
|
||||
func (f *FFMpeg) hwMaxResFilter(toCodec VideoCodec, vf *models.VideoFile, reqHeight int, fullhw bool) VideoFilter {
|
||||
if vf.Width == 0 || vf.Height == 0 {
|
||||
return ""
|
||||
}
|
||||
videoFilter := f.hwFilterInit(toCodec, fullhw)
|
||||
maxWidth, maxHeight := f.hwCodecMaxRes(toCodec, width, height)
|
||||
videoFilter = videoFilter.ScaleMaxLM(width, height, reqHeight, maxWidth, maxHeight)
|
||||
return f.hwCodecFilter(videoFilter, toCodec, fullhw)
|
||||
maxWidth, maxHeight := f.hwCodecMaxRes(toCodec)
|
||||
videoFilter = videoFilter.ScaleMaxLM(vf.Width, vf.Height, reqHeight, maxWidth, maxHeight)
|
||||
return f.hwCodecFilter(videoFilter, toCodec, vf, fullhw)
|
||||
}
|
||||
|
||||
// Return if a hardware accelerated for HLS is available
|
||||
|
|
@ -267,7 +347,8 @@ func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec {
|
|||
case VideoCodecN264,
|
||||
VideoCodecI264,
|
||||
VideoCodecV264,
|
||||
VideoCodecR264:
|
||||
VideoCodecR264,
|
||||
VideoCodecM264: // Note that the Apple encoder sucks at startup, thus HLS quality is crap
|
||||
return &element
|
||||
}
|
||||
}
|
||||
|
|
@ -279,7 +360,8 @@ func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec {
|
|||
for _, element := range f.hwCodecSupport {
|
||||
switch element {
|
||||
case VideoCodecN264,
|
||||
VideoCodecI264:
|
||||
VideoCodecI264,
|
||||
VideoCodecM264:
|
||||
return &element
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -195,6 +195,20 @@ type FFMpegVersion struct {
|
|||
patch int
|
||||
}
|
||||
|
||||
// Gteq returns true if the version is greater than or equal to the other version.
|
||||
func (v FFMpegVersion) Gteq(other FFMpegVersion) bool {
|
||||
if v.major > other.major {
|
||||
return true
|
||||
}
|
||||
if v.major == other.major && v.minor > other.minor {
|
||||
return true
|
||||
}
|
||||
if v.major == other.major && v.minor == other.minor && v.patch >= other.patch {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// FFMpeg provides an interface to ffmpeg.
|
||||
type FFMpeg struct {
|
||||
ffmpeg string
|
||||
|
|
|
|||
75
pkg/ffmpeg/ffmpeg_test.go
Normal file
75
pkg/ffmpeg/ffmpeg_test.go
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
// Package ffmpeg provides a wrapper around the ffmpeg and ffprobe executables.
|
||||
package ffmpeg
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestFFMpegVersion_GreaterThan(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
this FFMpegVersion
|
||||
other FFMpegVersion
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
"major greater, minor equal, patch equal",
|
||||
FFMpegVersion{2, 0, 0},
|
||||
FFMpegVersion{1, 0, 0},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"major greater, minor less, patch less",
|
||||
FFMpegVersion{2, 1, 1},
|
||||
FFMpegVersion{1, 0, 0},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"major equal, minor greater, patch equal",
|
||||
FFMpegVersion{1, 1, 0},
|
||||
FFMpegVersion{1, 0, 0},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"major equal, minor equal, patch greater",
|
||||
FFMpegVersion{1, 0, 1},
|
||||
FFMpegVersion{1, 0, 0},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"major equal, minor equal, patch equal",
|
||||
FFMpegVersion{1, 0, 0},
|
||||
FFMpegVersion{1, 0, 0},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"major less, minor equal, patch equal",
|
||||
FFMpegVersion{1, 0, 0},
|
||||
FFMpegVersion{2, 0, 0},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"major equal, minor less, patch equal",
|
||||
FFMpegVersion{1, 0, 0},
|
||||
FFMpegVersion{1, 1, 0},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"major equal, minor equal, patch less",
|
||||
FFMpegVersion{1, 0, 0},
|
||||
FFMpegVersion{1, 0, 1},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"major less, minor less, patch less",
|
||||
FFMpegVersion{1, 0, 0},
|
||||
FFMpegVersion{2, 1, 1},
|
||||
false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.this.Gteq(tt.other); got != tt.want {
|
||||
t.Errorf("FFMpegVersion.GreaterThan() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -59,33 +59,28 @@ func (f VideoFilter) ScaleMax(inputWidth, inputHeight, maxSize int) VideoFilter
|
|||
return f.ScaleDimensions(maxSize, -2)
|
||||
}
|
||||
|
||||
// ScaleMaxLM returns a VideoFilter scaling to maxSize with respect to a max size.
|
||||
// ScaleMaxLM scales an image to fit within specified maximum dimensions while maintaining its aspect ratio.
|
||||
func (f VideoFilter) ScaleMaxLM(width int, height int, reqHeight int, maxWidth int, maxHeight int) VideoFilter {
|
||||
// calculate the aspect ratio of the current resolution
|
||||
aspectRatio := width / height
|
||||
if maxWidth == 0 || maxHeight == 0 {
|
||||
return f.ScaleMax(width, height, reqHeight)
|
||||
}
|
||||
|
||||
// find the max height
|
||||
aspectRatio := float64(width) / float64(height)
|
||||
desiredHeight := reqHeight
|
||||
if desiredHeight == 0 {
|
||||
desiredHeight = height
|
||||
}
|
||||
desiredWidth := int(float64(desiredHeight) * aspectRatio)
|
||||
|
||||
// calculate the desired width based on the desired height and the aspect ratio
|
||||
desiredWidth := int(desiredHeight * aspectRatio)
|
||||
|
||||
// check which dimension to scale based on the maximum resolution
|
||||
if desiredHeight > maxHeight || desiredWidth > maxWidth {
|
||||
if desiredHeight-maxHeight > desiredWidth-maxWidth {
|
||||
// scale the height down to the maximum height
|
||||
return f.ScaleDimensions(-2, maxHeight)
|
||||
} else {
|
||||
// scale the width down to the maximum width
|
||||
return f.ScaleDimensions(maxWidth, -2)
|
||||
}
|
||||
if desiredHeight <= maxHeight && desiredWidth <= maxWidth {
|
||||
return f.ScaleMax(width, height, reqHeight)
|
||||
}
|
||||
|
||||
// the current resolution can be scaled to the desired height without exceeding the maximum resolution
|
||||
return f.ScaleMax(width, height, reqHeight)
|
||||
if float64(desiredHeight-maxHeight) > float64(desiredWidth-maxWidth) {
|
||||
return f.ScaleDimensions(-2, maxHeight)
|
||||
} else {
|
||||
return f.ScaleDimensions(maxWidth, -2)
|
||||
}
|
||||
}
|
||||
|
||||
// Fps returns a VideoFilter setting the frames per second.
|
||||
|
|
|
|||
|
|
@ -342,7 +342,7 @@ func (s *runningStream) makeStreamArgs(sm *StreamManager, segment int) Args {
|
|||
|
||||
videoOnly := ProbeAudioCodec(s.vf.AudioCodec) == MissingUnsupported
|
||||
|
||||
videoFilter := sm.encoder.hwMaxResFilter(codec, s.vf.Width, s.vf.Height, s.maxTranscodeSize, fullhw)
|
||||
videoFilter := sm.encoder.hwMaxResFilter(codec, s.vf, s.maxTranscodeSize, fullhw)
|
||||
|
||||
args = append(args, s.streamType.Args(codec, segment, videoFilter, videoOnly, s.outputDir)...)
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ func CodecInit(codec VideoCodec) (args Args) {
|
|||
)
|
||||
case VideoCodecM264:
|
||||
args = append(args,
|
||||
"-prio_speed", "1",
|
||||
"-realtime", "1",
|
||||
)
|
||||
case VideoCodecO264:
|
||||
args = append(args,
|
||||
|
|
@ -198,7 +198,7 @@ func (o TranscodeOptions) makeStreamArgs(sm *StreamManager) Args {
|
|||
|
||||
videoOnly := ProbeAudioCodec(o.VideoFile.AudioCodec) == MissingUnsupported
|
||||
|
||||
videoFilter := sm.encoder.hwMaxResFilter(codec, o.VideoFile.Width, o.VideoFile.Height, maxTranscodeSize, fullhw)
|
||||
videoFilter := sm.encoder.hwMaxResFilter(codec, o.VideoFile, maxTranscodeSize, fullhw)
|
||||
|
||||
args = append(args, o.StreamType.Args(codec, videoFilter, videoOnly)...)
|
||||
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ type Movie struct {
|
|||
BackImage string `json:"back_image,omitempty"`
|
||||
URLs []string `json:"urls,omitempty"`
|
||||
Studio string `json:"studio,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
CreatedAt json.JSONTime `json:"created_at,omitempty"`
|
||||
UpdatedAt json.JSONTime `json:"updated_at,omitempty"`
|
||||
|
||||
|
|
|
|||
|
|
@ -34,16 +34,14 @@ func (s *StringOrStringList) UnmarshalJSON(data []byte) error {
|
|||
}
|
||||
|
||||
type Performer struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Disambiguation string `json:"disambiguation,omitempty"`
|
||||
Gender string `json:"gender,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Twitter string `json:"twitter,omitempty"`
|
||||
Instagram string `json:"instagram,omitempty"`
|
||||
Birthdate string `json:"birthdate,omitempty"`
|
||||
Ethnicity string `json:"ethnicity,omitempty"`
|
||||
Country string `json:"country,omitempty"`
|
||||
EyeColor string `json:"eye_color,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Disambiguation string `json:"disambiguation,omitempty"`
|
||||
Gender string `json:"gender,omitempty"`
|
||||
URLs []string `json:"urls,omitempty"`
|
||||
Birthdate string `json:"birthdate,omitempty"`
|
||||
Ethnicity string `json:"ethnicity,omitempty"`
|
||||
Country string `json:"country,omitempty"`
|
||||
EyeColor string `json:"eye_color,omitempty"`
|
||||
// this should be int, but keeping string for backwards compatibility
|
||||
Height string `json:"height,omitempty"`
|
||||
Measurements string `json:"measurements,omitempty"`
|
||||
|
|
@ -66,6 +64,11 @@ type Performer struct {
|
|||
Weight int `json:"weight,omitempty"`
|
||||
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||
|
||||
// deprecated - for import only
|
||||
URL string `json:"url,omitempty"`
|
||||
Twitter string `json:"twitter,omitempty"`
|
||||
Instagram string `json:"instagram,omitempty"`
|
||||
}
|
||||
|
||||
func (s Performer) Filename() string {
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ type Studio struct {
|
|||
Details string `json:"details,omitempty"`
|
||||
Aliases []string `json:"aliases,omitempty"`
|
||||
StashIDs []models.StashID `json:"stash_ids,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"`
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -312,6 +312,29 @@ func (_m *MovieReaderWriter) GetFrontImage(ctx context.Context, movieID int) ([]
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// GetTagIDs provides a mock function with given fields: ctx, relatedID
|
||||
func (_m *MovieReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||
ret := _m.Called(ctx, relatedID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||
r0 = rf(ctx, relatedID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||
r1 = rf(ctx, relatedID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetURLs provides a mock function with given fields: ctx, relatedID
|
||||
func (_m *MovieReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) {
|
||||
ret := _m.Called(ctx, relatedID)
|
||||
|
|
|
|||
|
|
@ -383,6 +383,29 @@ func (_m *PerformerReaderWriter) GetTagIDs(ctx context.Context, relatedID int) (
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// GetURLs provides a mock function with given fields: ctx, relatedID
|
||||
func (_m *PerformerReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) {
|
||||
ret := _m.Called(ctx, relatedID)
|
||||
|
||||
var r0 []string
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok {
|
||||
r0 = rf(ctx, relatedID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]string)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||
r1 = rf(ctx, relatedID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// HasImage provides a mock function with given fields: ctx, performerID
|
||||
func (_m *PerformerReaderWriter) HasImage(ctx context.Context, performerID int) (bool, error) {
|
||||
ret := _m.Called(ctx, performerID)
|
||||
|
|
|
|||
|
|
@ -58,6 +58,27 @@ func (_m *StudioReaderWriter) Count(ctx context.Context) (int, error) {
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// CountByTagID provides a mock function with given fields: ctx, tagID
|
||||
func (_m *StudioReaderWriter) CountByTagID(ctx context.Context, tagID int) (int, error) {
|
||||
ret := _m.Called(ctx, tagID)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int) int); ok {
|
||||
r0 = rf(ctx, tagID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||
r1 = rf(ctx, tagID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Create provides a mock function with given fields: ctx, newStudio
|
||||
func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error {
|
||||
ret := _m.Called(ctx, newStudio)
|
||||
|
|
@ -316,6 +337,29 @@ func (_m *StudioReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// GetTagIDs provides a mock function with given fields: ctx, relatedID
|
||||
func (_m *StudioReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) {
|
||||
ret := _m.Called(ctx, relatedID)
|
||||
|
||||
var r0 []int
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok {
|
||||
r0 = rf(ctx, relatedID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]int)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||
r1 = rf(ctx, relatedID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// HasImage provides a mock function with given fields: ctx, studioID
|
||||
func (_m *StudioReaderWriter) HasImage(ctx context.Context, studioID int) (bool, error) {
|
||||
ret := _m.Called(ctx, studioID)
|
||||
|
|
@ -367,6 +411,27 @@ func (_m *StudioReaderWriter) Query(ctx context.Context, studioFilter *models.St
|
|||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// QueryCount provides a mock function with given fields: ctx, studioFilter, findFilter
|
||||
func (_m *StudioReaderWriter) QueryCount(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) (int, error) {
|
||||
ret := _m.Called(ctx, studioFilter, findFilter)
|
||||
|
||||
var r0 int
|
||||
if rf, ok := ret.Get(0).(func(context.Context, *models.StudioFilterType, *models.FindFilterType) int); ok {
|
||||
r0 = rf(ctx, studioFilter, findFilter)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int)
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, *models.StudioFilterType, *models.FindFilterType) error); ok {
|
||||
r1 = rf(ctx, studioFilter, findFilter)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// QueryForAutoTag provides a mock function with given fields: ctx, words
|
||||
func (_m *StudioReaderWriter) QueryForAutoTag(ctx context.Context, words []string) ([]*models.Studio, error) {
|
||||
ret := _m.Called(ctx, words)
|
||||
|
|
|
|||
|
|
@ -266,6 +266,29 @@ func (_m *TagReaderWriter) FindByImageID(ctx context.Context, imageID int) ([]*m
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByMovieID provides a mock function with given fields: ctx, movieID
|
||||
func (_m *TagReaderWriter) FindByMovieID(ctx context.Context, movieID int) ([]*models.Tag, error) {
|
||||
ret := _m.Called(ctx, movieID)
|
||||
|
||||
var r0 []*models.Tag
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Tag); ok {
|
||||
r0 = rf(ctx, movieID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Tag)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||
r1 = rf(ctx, movieID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByName provides a mock function with given fields: ctx, name, nocase
|
||||
func (_m *TagReaderWriter) FindByName(ctx context.Context, name string, nocase bool) (*models.Tag, error) {
|
||||
ret := _m.Called(ctx, name, nocase)
|
||||
|
|
@ -404,6 +427,29 @@ func (_m *TagReaderWriter) FindBySceneMarkerID(ctx context.Context, sceneMarkerI
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// FindByStudioID provides a mock function with given fields: ctx, studioID
|
||||
func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) {
|
||||
ret := _m.Called(ctx, studioID)
|
||||
|
||||
var r0 []*models.Tag
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Tag); ok {
|
||||
r0 = rf(ctx, studioID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*models.Tag)
|
||||
}
|
||||
}
|
||||
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int) error); ok {
|
||||
r1 = rf(ctx, studioID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// FindMany provides a mock function with given fields: ctx, ids
|
||||
func (_m *TagReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Tag, error) {
|
||||
ret := _m.Called(ctx, ids)
|
||||
|
|
|
|||
|
|
@ -19,7 +19,8 @@ type Movie struct {
|
|||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
|
||||
URLs RelatedStrings `json:"urls"`
|
||||
URLs RelatedStrings `json:"urls"`
|
||||
TagIDs RelatedIDs `json:"tag_ids"`
|
||||
}
|
||||
|
||||
func NewMovie() Movie {
|
||||
|
|
@ -30,9 +31,15 @@ func NewMovie() Movie {
|
|||
}
|
||||
}
|
||||
|
||||
func (g *Movie) LoadURLs(ctx context.Context, l URLLoader) error {
|
||||
return g.URLs.load(func() ([]string, error) {
|
||||
return l.GetURLs(ctx, g.ID)
|
||||
func (m *Movie) LoadURLs(ctx context.Context, l URLLoader) error {
|
||||
return m.URLs.load(func() ([]string, error) {
|
||||
return l.GetURLs(ctx, m.ID)
|
||||
})
|
||||
}
|
||||
|
||||
func (m *Movie) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||
return m.TagIDs.load(func() ([]int, error) {
|
||||
return l.GetTagIDs(ctx, m.ID)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -47,6 +54,7 @@ type MoviePartial struct {
|
|||
Director OptionalString
|
||||
Synopsis OptionalString
|
||||
URLs *UpdateStrings
|
||||
TagIDs *UpdateIDs
|
||||
CreatedAt OptionalTime
|
||||
UpdatedAt OptionalTime
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,9 +10,6 @@ type Performer struct {
|
|||
Name string `json:"name"`
|
||||
Disambiguation string `json:"disambiguation"`
|
||||
Gender *GenderEnum `json:"gender"`
|
||||
URL string `json:"url"`
|
||||
Twitter string `json:"twitter"`
|
||||
Instagram string `json:"instagram"`
|
||||
Birthdate *Date `json:"birthdate"`
|
||||
Ethnicity string `json:"ethnicity"`
|
||||
Country string `json:"country"`
|
||||
|
|
@ -37,6 +34,7 @@ type Performer struct {
|
|||
IgnoreAutoTag bool `json:"ignore_auto_tag"`
|
||||
|
||||
Aliases RelatedStrings `json:"aliases"`
|
||||
URLs RelatedStrings `json:"urls"`
|
||||
TagIDs RelatedIDs `json:"tag_ids"`
|
||||
StashIDs RelatedStashIDs `json:"stash_ids"`
|
||||
}
|
||||
|
|
@ -55,9 +53,7 @@ type PerformerPartial struct {
|
|||
Name OptionalString
|
||||
Disambiguation OptionalString
|
||||
Gender OptionalString
|
||||
URL OptionalString
|
||||
Twitter OptionalString
|
||||
Instagram OptionalString
|
||||
URLs *UpdateStrings
|
||||
Birthdate OptionalDate
|
||||
Ethnicity OptionalString
|
||||
Country OptionalString
|
||||
|
|
@ -99,6 +95,12 @@ func (s *Performer) LoadAliases(ctx context.Context, l AliasLoader) error {
|
|||
})
|
||||
}
|
||||
|
||||
func (s *Performer) LoadURLs(ctx context.Context, l URLLoader) error {
|
||||
return s.URLs.load(func() ([]string, error) {
|
||||
return l.GetURLs(ctx, s.ID)
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Performer) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||
return s.TagIDs.load(func() ([]int, error) {
|
||||
return l.GetTagIDs(ctx, s.ID)
|
||||
|
|
|
|||
|
|
@ -62,9 +62,9 @@ func (s *ScrapedStudio) GetImage(ctx context.Context, excluded map[string]bool)
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) *StudioPartial {
|
||||
func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) StudioPartial {
|
||||
ret := NewStudioPartial()
|
||||
ret.ID, _ = strconv.Atoi(*id)
|
||||
ret.ID, _ = strconv.Atoi(id)
|
||||
|
||||
if s.Name != "" && !excluded["name"] {
|
||||
ret.Name = NewOptionalString(s.Name)
|
||||
|
|
@ -82,8 +82,6 @@ func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[stri
|
|||
ret.ParentID = NewOptionalInt(parentID)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ret.ParentID = NewOptionalIntPtr(nil)
|
||||
}
|
||||
|
||||
if s.RemoteSiteID != nil && endpoint != "" {
|
||||
|
|
@ -97,7 +95,7 @@ func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[stri
|
|||
})
|
||||
}
|
||||
|
||||
return &ret
|
||||
return ret
|
||||
}
|
||||
|
||||
// A performer from a scraping operation...
|
||||
|
|
@ -107,9 +105,10 @@ type ScrapedPerformer struct {
|
|||
Name *string `json:"name"`
|
||||
Disambiguation *string `json:"disambiguation"`
|
||||
Gender *string `json:"gender"`
|
||||
URL *string `json:"url"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
URLs []string `json:"urls"`
|
||||
URL *string `json:"url"` // deprecated
|
||||
Twitter *string `json:"twitter"` // deprecated
|
||||
Instagram *string `json:"instagram"` // deprecated
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
|
|
@ -191,9 +190,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
|
|||
ret.Weight = &w
|
||||
}
|
||||
}
|
||||
if p.Instagram != nil && !excluded["instagram"] {
|
||||
ret.Instagram = *p.Instagram
|
||||
}
|
||||
|
||||
if p.Measurements != nil && !excluded["measurements"] {
|
||||
ret.Measurements = *p.Measurements
|
||||
}
|
||||
|
|
@ -221,11 +218,27 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool
|
|||
ret.Circumcised = &v
|
||||
}
|
||||
}
|
||||
if p.Twitter != nil && !excluded["twitter"] {
|
||||
ret.Twitter = *p.Twitter
|
||||
}
|
||||
if p.URL != nil && !excluded["url"] {
|
||||
ret.URL = *p.URL
|
||||
|
||||
// if URLs are provided, only use those
|
||||
if len(p.URLs) > 0 {
|
||||
if !excluded["urls"] {
|
||||
ret.URLs = NewRelatedStrings(p.URLs)
|
||||
}
|
||||
} else {
|
||||
urls := []string{}
|
||||
if p.URL != nil && !excluded["url"] {
|
||||
urls = append(urls, *p.URL)
|
||||
}
|
||||
if p.Twitter != nil && !excluded["twitter"] {
|
||||
urls = append(urls, *p.Twitter)
|
||||
}
|
||||
if p.Instagram != nil && !excluded["instagram"] {
|
||||
urls = append(urls, *p.Instagram)
|
||||
}
|
||||
|
||||
if len(urls) > 0 {
|
||||
ret.URLs = NewRelatedStrings(urls)
|
||||
}
|
||||
}
|
||||
|
||||
if p.RemoteSiteID != nil && endpoint != "" {
|
||||
|
|
@ -309,9 +322,6 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
|
|||
ret.Weight = NewOptionalInt(w)
|
||||
}
|
||||
}
|
||||
if p.Instagram != nil && !excluded["instagram"] {
|
||||
ret.Instagram = NewOptionalString(*p.Instagram)
|
||||
}
|
||||
if p.Measurements != nil && !excluded["measurements"] {
|
||||
ret.Measurements = NewOptionalString(*p.Measurements)
|
||||
}
|
||||
|
|
@ -330,11 +340,33 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool,
|
|||
if p.Tattoos != nil && !excluded["tattoos"] {
|
||||
ret.Tattoos = NewOptionalString(*p.Tattoos)
|
||||
}
|
||||
if p.Twitter != nil && !excluded["twitter"] {
|
||||
ret.Twitter = NewOptionalString(*p.Twitter)
|
||||
}
|
||||
if p.URL != nil && !excluded["url"] {
|
||||
ret.URL = NewOptionalString(*p.URL)
|
||||
|
||||
// if URLs are provided, only use those
|
||||
if len(p.URLs) > 0 {
|
||||
if !excluded["urls"] {
|
||||
ret.URLs = &UpdateStrings{
|
||||
Values: p.URLs,
|
||||
Mode: RelationshipUpdateModeSet,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
urls := []string{}
|
||||
if p.URL != nil && !excluded["url"] {
|
||||
urls = append(urls, *p.URL)
|
||||
}
|
||||
if p.Twitter != nil && !excluded["twitter"] {
|
||||
urls = append(urls, *p.Twitter)
|
||||
}
|
||||
if p.Instagram != nil && !excluded["instagram"] {
|
||||
urls = append(urls, *p.Instagram)
|
||||
}
|
||||
|
||||
if len(urls) > 0 {
|
||||
ret.URLs = &UpdateStrings{
|
||||
Values: urls,
|
||||
Mode: RelationshipUpdateModeSet,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if p.RemoteSiteID != nil && endpoint != "" {
|
||||
|
|
@ -371,6 +403,7 @@ type ScrapedMovie struct {
|
|||
URLs []string `json:"urls"`
|
||||
Synopsis *string `json:"synopsis"`
|
||||
Studio *ScrapedStudio `json:"studio"`
|
||||
Tags []*ScrapedTag `json:"tags"`
|
||||
// This should be a base64 encoded data URL
|
||||
FrontImage *string `json:"front_image"`
|
||||
// This should be a base64 encoded data URL
|
||||
|
|
|
|||
|
|
@ -161,9 +161,9 @@ func Test_scrapedToPerformerInput(t *testing.T) {
|
|||
Tattoos: nextVal(),
|
||||
Piercings: nextVal(),
|
||||
Aliases: nextVal(),
|
||||
URL: nextVal(),
|
||||
Twitter: nextVal(),
|
||||
Instagram: nextVal(),
|
||||
URL: nextVal(),
|
||||
Details: nextVal(),
|
||||
RemoteSiteID: &remoteSiteID,
|
||||
},
|
||||
|
|
@ -186,9 +186,7 @@ func Test_scrapedToPerformerInput(t *testing.T) {
|
|||
Tattoos: *nextVal(),
|
||||
Piercings: *nextVal(),
|
||||
Aliases: NewRelatedStrings([]string{*nextVal()}),
|
||||
Twitter: *nextVal(),
|
||||
Instagram: *nextVal(),
|
||||
URL: *nextVal(),
|
||||
URLs: NewRelatedStrings([]string{*nextVal(), *nextVal(), *nextVal()}),
|
||||
Details: *nextVal(),
|
||||
StashIDs: NewRelatedStashIDs([]StashID{
|
||||
{
|
||||
|
|
@ -249,3 +247,123 @@ func Test_scrapedToPerformerInput(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestScrapedStudio_ToPartial(t *testing.T) {
|
||||
var (
|
||||
id = 1000
|
||||
idStr = strconv.Itoa(id)
|
||||
storedID = "storedID"
|
||||
parentStoredID = 2000
|
||||
parentStoredIDStr = strconv.Itoa(parentStoredID)
|
||||
name = "name"
|
||||
url = "url"
|
||||
remoteSiteID = "remoteSiteID"
|
||||
endpoint = "endpoint"
|
||||
image = "image"
|
||||
images = []string{image}
|
||||
|
||||
existingEndpoint = "existingEndpoint"
|
||||
existingStashID = StashID{"existingStashID", existingEndpoint}
|
||||
existingStashIDs = []StashID{existingStashID}
|
||||
)
|
||||
|
||||
fullStudio := ScrapedStudio{
|
||||
StoredID: &storedID,
|
||||
Name: name,
|
||||
URL: &url,
|
||||
Parent: &ScrapedStudio{
|
||||
StoredID: &parentStoredIDStr,
|
||||
},
|
||||
Image: &image,
|
||||
Images: images,
|
||||
RemoteSiteID: &remoteSiteID,
|
||||
}
|
||||
|
||||
type args struct {
|
||||
id string
|
||||
endpoint string
|
||||
excluded map[string]bool
|
||||
existingStashIDs []StashID
|
||||
}
|
||||
|
||||
stdArgs := args{
|
||||
id: idStr,
|
||||
endpoint: endpoint,
|
||||
excluded: map[string]bool{},
|
||||
existingStashIDs: existingStashIDs,
|
||||
}
|
||||
|
||||
excludeAll := map[string]bool{
|
||||
"name": true,
|
||||
"url": true,
|
||||
"parent": true,
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
o ScrapedStudio
|
||||
args args
|
||||
want StudioPartial
|
||||
}{
|
||||
{
|
||||
"full no exclusions",
|
||||
fullStudio,
|
||||
stdArgs,
|
||||
StudioPartial{
|
||||
ID: id,
|
||||
Name: NewOptionalString(name),
|
||||
URL: NewOptionalString(url),
|
||||
ParentID: NewOptionalInt(parentStoredID),
|
||||
StashIDs: &UpdateStashIDs{
|
||||
StashIDs: append(existingStashIDs, StashID{
|
||||
Endpoint: endpoint,
|
||||
StashID: remoteSiteID,
|
||||
}),
|
||||
Mode: RelationshipUpdateModeSet,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"exclude all",
|
||||
fullStudio,
|
||||
args{
|
||||
id: idStr,
|
||||
excluded: excludeAll,
|
||||
},
|
||||
StudioPartial{
|
||||
ID: id,
|
||||
},
|
||||
},
|
||||
{
|
||||
"overwrite stash id",
|
||||
fullStudio,
|
||||
args{
|
||||
id: idStr,
|
||||
excluded: excludeAll,
|
||||
endpoint: existingEndpoint,
|
||||
existingStashIDs: existingStashIDs,
|
||||
},
|
||||
StudioPartial{
|
||||
ID: id,
|
||||
StashIDs: &UpdateStashIDs{
|
||||
StashIDs: []StashID{{
|
||||
Endpoint: existingEndpoint,
|
||||
StashID: remoteSiteID,
|
||||
}},
|
||||
Mode: RelationshipUpdateModeSet,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := tt.o
|
||||
got := s.ToPartial(tt.args.id, tt.args.endpoint, tt.args.excluded, tt.args.existingStashIDs)
|
||||
|
||||
// unset updatedAt - we don't need to compare it
|
||||
got.UpdatedAt = OptionalTime{}
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ type Studio struct {
|
|||
IgnoreAutoTag bool `json:"ignore_auto_tag"`
|
||||
|
||||
Aliases RelatedStrings `json:"aliases"`
|
||||
TagIDs RelatedIDs `json:"tag_ids"`
|
||||
StashIDs RelatedStashIDs `json:"stash_ids"`
|
||||
}
|
||||
|
||||
|
|
@ -45,6 +46,7 @@ type StudioPartial struct {
|
|||
IgnoreAutoTag OptionalBool
|
||||
|
||||
Aliases *UpdateStrings
|
||||
TagIDs *UpdateIDs
|
||||
StashIDs *UpdateStashIDs
|
||||
}
|
||||
|
||||
|
|
@ -61,6 +63,12 @@ func (s *Studio) LoadAliases(ctx context.Context, l AliasLoader) error {
|
|||
})
|
||||
}
|
||||
|
||||
func (s *Studio) LoadTagIDs(ctx context.Context, l TagIDLoader) error {
|
||||
return s.TagIDs.load(func() ([]int, error) {
|
||||
return l.GetTagIDs(ctx, s.ID)
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Studio) LoadStashIDs(ctx context.Context, l StashIDLoader) error {
|
||||
return s.StashIDs.load(func() ([]StashID, error) {
|
||||
return l.GetStashIDs(ctx, s.ID)
|
||||
|
|
@ -72,6 +80,10 @@ func (s *Studio) LoadRelationships(ctx context.Context, l PerformerReader) error
|
|||
return err
|
||||
}
|
||||
|
||||
if err := s.LoadTagIDs(ctx, l); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.LoadStashIDs(ctx, l); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,10 @@ type MovieFilterType struct {
|
|||
URL *StringCriterionInput `json:"url"`
|
||||
// Filter to only include movies where performer appears in a scene
|
||||
Performers *MultiCriterionInput `json:"performers"`
|
||||
// Filter to only include performers with these tags
|
||||
Tags *HierarchicalMultiCriterionInput `json:"tags"`
|
||||
// Filter by tag count
|
||||
TagCount *IntCriterionInput `json:"tag_count"`
|
||||
// Filter by date
|
||||
Date *DateCriterionInput `json:"date"`
|
||||
// Filter by related scenes that meet this criteria
|
||||
|
|
|
|||
|
|
@ -203,7 +203,8 @@ type PerformerFilterType struct {
|
|||
type PerformerCreateInput struct {
|
||||
Name string `json:"name"`
|
||||
Disambiguation *string `json:"disambiguation"`
|
||||
URL *string `json:"url"`
|
||||
URL *string `json:"url"` // deprecated
|
||||
Urls []string `json:"urls"`
|
||||
Gender *GenderEnum `json:"gender"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
|
|
@ -220,8 +221,8 @@ type PerformerCreateInput struct {
|
|||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
AliasList []string `json:"alias_list"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Twitter *string `json:"twitter"` // deprecated
|
||||
Instagram *string `json:"instagram"` // deprecated
|
||||
Favorite *bool `json:"favorite"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
// This should be a URL or a base64 encoded data URL
|
||||
|
|
@ -239,7 +240,8 @@ type PerformerUpdateInput struct {
|
|||
ID string `json:"id"`
|
||||
Name *string `json:"name"`
|
||||
Disambiguation *string `json:"disambiguation"`
|
||||
URL *string `json:"url"`
|
||||
URL *string `json:"url"` // deprecated
|
||||
Urls []string `json:"urls"`
|
||||
Gender *GenderEnum `json:"gender"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
|
|
@ -256,8 +258,8 @@ type PerformerUpdateInput struct {
|
|||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
AliasList []string `json:"alias_list"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Twitter *string `json:"twitter"` // deprecated
|
||||
Instagram *string `json:"instagram"` // deprecated
|
||||
Favorite *bool `json:"favorite"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
// This should be a URL or a base64 encoded data URL
|
||||
|
|
|
|||
|
|
@ -65,6 +65,7 @@ type MovieReader interface {
|
|||
MovieQueryer
|
||||
MovieCounter
|
||||
URLLoader
|
||||
TagIDLoader
|
||||
|
||||
All(ctx context.Context) ([]*Movie, error)
|
||||
GetFrontImage(ctx context.Context, movieID int) ([]byte, error)
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ type PerformerReader interface {
|
|||
AliasLoader
|
||||
StashIDLoader
|
||||
TagIDLoader
|
||||
URLLoader
|
||||
|
||||
All(ctx context.Context) ([]*Performer, error)
|
||||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ type StudioFinder interface {
|
|||
// StudioQueryer provides methods to query studios.
|
||||
type StudioQueryer interface {
|
||||
Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error)
|
||||
QueryCount(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) (int, error)
|
||||
}
|
||||
|
||||
type StudioAutoTagQueryer interface {
|
||||
|
|
@ -36,6 +37,7 @@ type StudioAutoTagQueryer interface {
|
|||
// StudioCounter provides methods to count studios.
|
||||
type StudioCounter interface {
|
||||
Count(ctx context.Context) (int, error)
|
||||
CountByTagID(ctx context.Context, tagID int) (int, error)
|
||||
}
|
||||
|
||||
// StudioCreator provides methods to create studios.
|
||||
|
|
@ -74,6 +76,7 @@ type StudioReader interface {
|
|||
|
||||
AliasLoader
|
||||
StashIDLoader
|
||||
TagIDLoader
|
||||
|
||||
All(ctx context.Context) ([]*Studio, error)
|
||||
GetImage(ctx context.Context, studioID int) ([]byte, error)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,9 @@ type TagFinder interface {
|
|||
FindByImageID(ctx context.Context, imageID int) ([]*Tag, error)
|
||||
FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error)
|
||||
FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error)
|
||||
FindByMovieID(ctx context.Context, movieID int) ([]*Tag, error)
|
||||
FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error)
|
||||
FindByStudioID(ctx context.Context, studioID int) ([]*Tag, error)
|
||||
FindByName(ctx context.Context, name string, nocase bool) (*Tag, error)
|
||||
FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,13 +7,11 @@ type SavedFilterReader interface {
|
|||
Find(ctx context.Context, id int) (*SavedFilter, error)
|
||||
FindMany(ctx context.Context, ids []int, ignoreNotFound bool) ([]*SavedFilter, error)
|
||||
FindByMode(ctx context.Context, mode FilterMode) ([]*SavedFilter, error)
|
||||
FindDefault(ctx context.Context, mode FilterMode) (*SavedFilter, error)
|
||||
}
|
||||
|
||||
type SavedFilterWriter interface {
|
||||
Create(ctx context.Context, obj *SavedFilter) error
|
||||
Update(ctx context.Context, obj *SavedFilter) error
|
||||
SetDefault(ctx context.Context, obj *SavedFilter) error
|
||||
Destroy(ctx context.Context, id int) error
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,10 @@ type StudioFilterType struct {
|
|||
IsMissing *string `json:"is_missing"`
|
||||
// Filter by rating expressed as 1-100
|
||||
Rating100 *IntCriterionInput `json:"rating100"`
|
||||
// Filter to only include studios with these tags
|
||||
Tags *HierarchicalMultiCriterionInput `json:"tags"`
|
||||
// Filter by tag count
|
||||
TagCount *IntCriterionInput `json:"tag_count"`
|
||||
// Filter by favorite
|
||||
Favorite *bool `json:"favorite"`
|
||||
// Filter by scene count
|
||||
|
|
@ -53,6 +57,7 @@ type StudioCreateInput struct {
|
|||
Favorite *bool `json:"favorite"`
|
||||
Details *string `json:"details"`
|
||||
Aliases []string `json:"aliases"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||
}
|
||||
|
||||
|
|
@ -68,5 +73,6 @@ type StudioUpdateInput struct {
|
|||
Favorite *bool `json:"favorite"`
|
||||
Details *string `json:"details"`
|
||||
Aliases []string `json:"aliases"`
|
||||
TagIds []string `json:"tag_ids"`
|
||||
IgnoreAutoTag *bool `json:"ignore_auto_tag"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,10 @@ type TagFilterType struct {
|
|||
GalleryCount *IntCriterionInput `json:"gallery_count"`
|
||||
// Filter by number of performers with this tag
|
||||
PerformerCount *IntCriterionInput `json:"performer_count"`
|
||||
// Filter by number of studios with this tag
|
||||
StudioCount *IntCriterionInput `json:"studio_count"`
|
||||
// Filter by number of movies with this tag
|
||||
MovieCount *IntCriterionInput `json:"movie_count"`
|
||||
// Filter by number of markers with this tag
|
||||
MarkerCount *IntCriterionInput `json:"marker_count"`
|
||||
// Filter by parent tags
|
||||
|
|
|
|||
|
|
@ -3,9 +3,11 @@ package movie
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/models/jsonschema"
|
||||
"github.com/stashapp/stash/pkg/sliceutil"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
|
|
@ -17,6 +19,7 @@ type ImporterReaderWriter interface {
|
|||
type Importer struct {
|
||||
ReaderWriter ImporterReaderWriter
|
||||
StudioWriter models.StudioFinderCreator
|
||||
TagWriter models.TagFinderCreator
|
||||
Input jsonschema.Movie
|
||||
MissingRefBehaviour models.ImportMissingRefEnum
|
||||
|
||||
|
|
@ -32,6 +35,10 @@ func (i *Importer) PreImport(ctx context.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if err := i.populateTags(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var err error
|
||||
if len(i.Input.FrontImage) > 0 {
|
||||
i.frontImageData, err = utils.ProcessBase64Image(i.Input.FrontImage)
|
||||
|
|
@ -49,6 +56,74 @@ func (i *Importer) PreImport(ctx context.Context) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (i *Importer) populateTags(ctx context.Context) error {
|
||||
if len(i.Input.Tags) > 0 {
|
||||
|
||||
tags, err := importTags(ctx, i.TagWriter, i.Input.Tags, i.MissingRefBehaviour)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, p := range tags {
|
||||
i.movie.TagIDs.Add(p.ID)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) {
|
||||
tags, err := tagWriter.FindByNames(ctx, names, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pluckedNames []string
|
||||
for _, tag := range tags {
|
||||
pluckedNames = append(pluckedNames, tag.Name)
|
||||
}
|
||||
|
||||
missingTags := sliceutil.Filter(names, func(name string) bool {
|
||||
return !sliceutil.Contains(pluckedNames, name)
|
||||
})
|
||||
|
||||
if len(missingTags) > 0 {
|
||||
if missingRefBehaviour == models.ImportMissingRefEnumFail {
|
||||
return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", "))
|
||||
}
|
||||
|
||||
if missingRefBehaviour == models.ImportMissingRefEnumCreate {
|
||||
createdTags, err := createTags(ctx, tagWriter, missingTags)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating tags: %v", err)
|
||||
}
|
||||
|
||||
tags = append(tags, createdTags...)
|
||||
}
|
||||
|
||||
// ignore if MissingRefBehaviour set to Ignore
|
||||
}
|
||||
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string) ([]*models.Tag, error) {
|
||||
var ret []*models.Tag
|
||||
for _, name := range names {
|
||||
newTag := models.NewTag()
|
||||
newTag.Name = name
|
||||
|
||||
err := tagWriter.Create(ctx, &newTag)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret = append(ret, &newTag)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (i *Importer) movieJSONToMovie(movieJSON jsonschema.Movie) models.Movie {
|
||||
newMovie := models.Movie{
|
||||
Name: movieJSON.Name,
|
||||
|
|
@ -57,6 +132,8 @@ func (i *Importer) movieJSONToMovie(movieJSON jsonschema.Movie) models.Movie {
|
|||
Synopsis: movieJSON.Synopsis,
|
||||
CreatedAt: movieJSON.CreatedAt.GetTime(),
|
||||
UpdatedAt: movieJSON.UpdatedAt.GetTime(),
|
||||
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
}
|
||||
|
||||
if len(movieJSON.URLs) > 0 {
|
||||
|
|
|
|||
|
|
@ -26,6 +26,13 @@ const (
|
|||
missingStudioName = "existingStudioName"
|
||||
|
||||
errImageID = 3
|
||||
|
||||
existingTagID = 105
|
||||
errTagsID = 106
|
||||
|
||||
existingTagName = "existingTagName"
|
||||
existingTagErr = "existingTagErr"
|
||||
missingTagName = "missingTagName"
|
||||
)
|
||||
|
||||
var testCtx = context.Background()
|
||||
|
|
@ -157,6 +164,97 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) {
|
|||
db.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithTag(t *testing.T) {
|
||||
db := mocks.NewDatabase()
|
||||
|
||||
i := Importer{
|
||||
ReaderWriter: db.Movie,
|
||||
TagWriter: db.Tag,
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
Input: jsonschema.Movie{
|
||||
Tags: []string{
|
||||
existingTagName,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
db.Tag.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{
|
||||
{
|
||||
ID: existingTagID,
|
||||
Name: existingTagName,
|
||||
},
|
||||
}, nil).Once()
|
||||
db.Tag.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once()
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingTagID, i.movie.TagIDs.List()[0])
|
||||
|
||||
i.Input.Tags = []string{existingTagErr}
|
||||
err = i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
db.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTag(t *testing.T) {
|
||||
db := mocks.NewDatabase()
|
||||
|
||||
i := Importer{
|
||||
ReaderWriter: db.Movie,
|
||||
TagWriter: db.Tag,
|
||||
Input: jsonschema.Movie{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumFail,
|
||||
}
|
||||
|
||||
db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3)
|
||||
db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) {
|
||||
t := args.Get(1).(*models.Tag)
|
||||
t.ID = existingTagID
|
||||
}).Return(nil)
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
|
||||
i.MissingRefBehaviour = models.ImportMissingRefEnumCreate
|
||||
err = i.PreImport(testCtx)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, existingTagID, i.movie.TagIDs.List()[0])
|
||||
|
||||
db.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) {
|
||||
db := mocks.NewDatabase()
|
||||
|
||||
i := Importer{
|
||||
ReaderWriter: db.Movie,
|
||||
TagWriter: db.Tag,
|
||||
Input: jsonschema.Movie{
|
||||
Tags: []string{
|
||||
missingTagName,
|
||||
},
|
||||
},
|
||||
MissingRefBehaviour: models.ImportMissingRefEnumCreate,
|
||||
}
|
||||
|
||||
db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once()
|
||||
db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error"))
|
||||
|
||||
err := i.PreImport(testCtx)
|
||||
assert.NotNil(t, err)
|
||||
|
||||
db.AssertExpectations(t)
|
||||
}
|
||||
|
||||
func TestImporterPostImport(t *testing.T) {
|
||||
db := mocks.NewDatabase()
|
||||
|
||||
|
|
|
|||
|
|
@ -18,3 +18,15 @@ func CountByStudioID(ctx context.Context, r models.MovieQueryer, id int, depth *
|
|||
|
||||
return r.QueryCount(ctx, filter, nil)
|
||||
}
|
||||
|
||||
func CountByTagID(ctx context.Context, r models.MovieQueryer, id int, depth *int) (int, error) {
|
||||
filter := &models.MovieFilterType{
|
||||
Tags: &models.HierarchicalMultiCriterionInput{
|
||||
Value: []string{strconv.Itoa(id)},
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
Depth: depth,
|
||||
},
|
||||
}
|
||||
|
||||
return r.QueryCount(ctx, filter, nil)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ type ImageAliasStashIDGetter interface {
|
|||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
||||
models.AliasLoader
|
||||
models.StashIDLoader
|
||||
models.URLLoader
|
||||
}
|
||||
|
||||
// ToJSON converts a Performer object into its JSON equivalent.
|
||||
|
|
@ -23,7 +24,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode
|
|||
newPerformerJSON := jsonschema.Performer{
|
||||
Name: performer.Name,
|
||||
Disambiguation: performer.Disambiguation,
|
||||
URL: performer.URL,
|
||||
Ethnicity: performer.Ethnicity,
|
||||
Country: performer.Country,
|
||||
EyeColor: performer.EyeColor,
|
||||
|
|
@ -32,8 +32,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode
|
|||
CareerLength: performer.CareerLength,
|
||||
Tattoos: performer.Tattoos,
|
||||
Piercings: performer.Piercings,
|
||||
Twitter: performer.Twitter,
|
||||
Instagram: performer.Instagram,
|
||||
Favorite: performer.Favorite,
|
||||
Details: performer.Details,
|
||||
HairColor: performer.HairColor,
|
||||
|
|
@ -78,6 +76,11 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode
|
|||
|
||||
newPerformerJSON.Aliases = performer.Aliases.List()
|
||||
|
||||
if err := performer.LoadURLs(ctx, reader); err != nil {
|
||||
return nil, fmt.Errorf("loading performer urls: %w", err)
|
||||
}
|
||||
newPerformerJSON.URLs = performer.URLs.List()
|
||||
|
||||
if err := performer.LoadStashIDs(ctx, reader); err != nil {
|
||||
return nil, fmt.Errorf("loading performer stash ids: %w", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ func createFullPerformer(id int, name string) *models.Performer {
|
|||
ID: id,
|
||||
Name: name,
|
||||
Disambiguation: disambiguation,
|
||||
URL: url,
|
||||
URLs: models.NewRelatedStrings([]string{url, twitter, instagram}),
|
||||
Aliases: models.NewRelatedStrings(aliases),
|
||||
Birthdate: &birthDate,
|
||||
CareerLength: careerLength,
|
||||
|
|
@ -90,11 +90,9 @@ func createFullPerformer(id int, name string) *models.Performer {
|
|||
Favorite: true,
|
||||
Gender: &genderEnum,
|
||||
Height: &height,
|
||||
Instagram: instagram,
|
||||
Measurements: measurements,
|
||||
Piercings: piercings,
|
||||
Tattoos: tattoos,
|
||||
Twitter: twitter,
|
||||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
Rating: &rating,
|
||||
|
|
@ -114,6 +112,7 @@ func createEmptyPerformer(id int) models.Performer {
|
|||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
Aliases: models.NewRelatedStrings([]string{}),
|
||||
URLs: models.NewRelatedStrings([]string{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||
}
|
||||
|
|
@ -123,7 +122,7 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
|
|||
return &jsonschema.Performer{
|
||||
Name: name,
|
||||
Disambiguation: disambiguation,
|
||||
URL: url,
|
||||
URLs: []string{url, twitter, instagram},
|
||||
Aliases: aliases,
|
||||
Birthdate: birthDate.String(),
|
||||
CareerLength: careerLength,
|
||||
|
|
@ -136,11 +135,9 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
|
|||
Favorite: true,
|
||||
Gender: gender,
|
||||
Height: strconv.Itoa(height),
|
||||
Instagram: instagram,
|
||||
Measurements: measurements,
|
||||
Piercings: piercings,
|
||||
Tattoos: tattoos,
|
||||
Twitter: twitter,
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
},
|
||||
|
|
@ -161,6 +158,7 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer {
|
|||
func createEmptyJSONPerformer() *jsonschema.Performer {
|
||||
return &jsonschema.Performer{
|
||||
Aliases: []string{},
|
||||
URLs: []string{},
|
||||
StashIDs: []models.StashID{},
|
||||
CreatedAt: json.JSONTime{
|
||||
Time: createTime,
|
||||
|
|
|
|||
|
|
@ -188,7 +188,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform
|
|||
newPerformer := models.Performer{
|
||||
Name: performerJSON.Name,
|
||||
Disambiguation: performerJSON.Disambiguation,
|
||||
URL: performerJSON.URL,
|
||||
Ethnicity: performerJSON.Ethnicity,
|
||||
Country: performerJSON.Country,
|
||||
EyeColor: performerJSON.EyeColor,
|
||||
|
|
@ -198,8 +197,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform
|
|||
Tattoos: performerJSON.Tattoos,
|
||||
Piercings: performerJSON.Piercings,
|
||||
Aliases: models.NewRelatedStrings(performerJSON.Aliases),
|
||||
Twitter: performerJSON.Twitter,
|
||||
Instagram: performerJSON.Instagram,
|
||||
Details: performerJSON.Details,
|
||||
HairColor: performerJSON.HairColor,
|
||||
Favorite: performerJSON.Favorite,
|
||||
|
|
@ -211,6 +208,25 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform
|
|||
StashIDs: models.NewRelatedStashIDs(performerJSON.StashIDs),
|
||||
}
|
||||
|
||||
if len(performerJSON.URLs) > 0 {
|
||||
newPerformer.URLs = models.NewRelatedStrings(performerJSON.URLs)
|
||||
} else {
|
||||
urls := []string{}
|
||||
if performerJSON.URL != "" {
|
||||
urls = append(urls, performerJSON.URL)
|
||||
}
|
||||
if performerJSON.Twitter != "" {
|
||||
urls = append(urls, performerJSON.Twitter)
|
||||
}
|
||||
if performerJSON.Instagram != "" {
|
||||
urls = append(urls, performerJSON.Instagram)
|
||||
}
|
||||
|
||||
if len(urls) > 0 {
|
||||
newPerformer.URLs = models.NewRelatedStrings([]string{performerJSON.URL})
|
||||
}
|
||||
}
|
||||
|
||||
if performerJSON.Gender != "" {
|
||||
v := models.GenderEnum(performerJSON.Gender)
|
||||
newPerformer.Gender = &v
|
||||
|
|
|
|||
18
pkg/performer/url.go
Normal file
18
pkg/performer/url.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
package performer
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
)
|
||||
|
||||
var (
|
||||
twitterURLRE = regexp.MustCompile(`^https?:\/\/(?:www\.)?twitter\.com\/`)
|
||||
instagramURLRE = regexp.MustCompile(`^https?:\/\/(?:www\.)?instagram\.com\/`)
|
||||
)
|
||||
|
||||
func IsTwitterURL(url string) bool {
|
||||
return twitterURLRE.MatchString(url)
|
||||
}
|
||||
|
||||
func IsInstagramURL(url string) bool {
|
||||
return instagramURLRE.MatchString(url)
|
||||
}
|
||||
|
|
@ -102,11 +102,15 @@ func validateName(ctx context.Context, name string, disambig string, existingID
|
|||
},
|
||||
}
|
||||
|
||||
modifier := models.CriterionModifierIsNull
|
||||
|
||||
if disambig != "" {
|
||||
performerFilter.Disambiguation = &models.StringCriterionInput{
|
||||
Value: disambig,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
modifier = models.CriterionModifierEquals
|
||||
}
|
||||
|
||||
performerFilter.Disambiguation = &models.StringCriterionInput{
|
||||
Value: disambig,
|
||||
Modifier: modifier,
|
||||
}
|
||||
|
||||
if existingID == nil {
|
||||
|
|
|
|||
|
|
@ -15,6 +15,9 @@ func nameFilter(n string) *models.PerformerFilterType {
|
|||
Value: n,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
},
|
||||
Disambiguation: &models.StringCriterionInput{
|
||||
Modifier: models.CriterionModifierIsNull,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -41,13 +44,6 @@ func TestValidateName(t *testing.T) {
|
|||
newName = "new name"
|
||||
newDisambig = "new disambiguation"
|
||||
)
|
||||
// existing1 := models.Performer{
|
||||
// Name: name1,
|
||||
// }
|
||||
// existing2 := models.Performer{
|
||||
// Name: name2,
|
||||
// Disambiguation: disambig,
|
||||
// }
|
||||
|
||||
pp := 1
|
||||
findFilter := &models.FindFilterType{
|
||||
|
|
|
|||
|
|
@ -81,15 +81,33 @@ func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCont
|
|||
}
|
||||
|
||||
q := s.getJsonQuery(doc)
|
||||
// if these just return the return values from scraper.scrape* functions then
|
||||
// it ends up returning ScrapedContent(nil) rather than nil
|
||||
switch ty {
|
||||
case ScrapeContentTypePerformer:
|
||||
return scraper.scrapePerformer(ctx, q)
|
||||
ret, err := scraper.scrapePerformer(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
case ScrapeContentTypeScene:
|
||||
return scraper.scrapeScene(ctx, q)
|
||||
ret, err := scraper.scrapeScene(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
case ScrapeContentTypeGallery:
|
||||
return scraper.scrapeGallery(ctx, q)
|
||||
ret, err := scraper.scrapeGallery(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
case ScrapeContentTypeMovie:
|
||||
return scraper.scrapeMovie(ctx, q)
|
||||
ret, err := scraper.scrapeMovie(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, ErrNotSupported
|
||||
|
|
|
|||
|
|
@ -289,11 +289,13 @@ type mappedMovieScraperConfig struct {
|
|||
mappedConfig
|
||||
|
||||
Studio mappedConfig `yaml:"Studio"`
|
||||
Tags mappedConfig `yaml:"Tags"`
|
||||
}
|
||||
type _mappedMovieScraperConfig mappedMovieScraperConfig
|
||||
|
||||
const (
|
||||
mappedScraperConfigMovieStudio = "Studio"
|
||||
mappedScraperConfigMovieTags = "Tags"
|
||||
)
|
||||
|
||||
func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
|
|
@ -308,9 +310,11 @@ func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) err
|
|||
thisMap := make(map[string]interface{})
|
||||
|
||||
thisMap[mappedScraperConfigMovieStudio] = parentMap[mappedScraperConfigMovieStudio]
|
||||
|
||||
delete(parentMap, mappedScraperConfigMovieStudio)
|
||||
|
||||
thisMap[mappedScraperConfigMovieTags] = parentMap[mappedScraperConfigMovieTags]
|
||||
delete(parentMap, mappedScraperConfigMovieTags)
|
||||
|
||||
// re-unmarshal the sub-fields
|
||||
yml, err := yaml.Marshal(thisMap)
|
||||
if err != nil {
|
||||
|
|
@ -1099,6 +1103,7 @@ func (s mappedScraper) scrapeMovie(ctx context.Context, q mappedQuery) (*models.
|
|||
movieMap := movieScraperConfig.mappedConfig
|
||||
|
||||
movieStudioMap := movieScraperConfig.Studio
|
||||
movieTagsMap := movieScraperConfig.Tags
|
||||
|
||||
results := movieMap.process(ctx, q, s.Common)
|
||||
|
||||
|
|
@ -1113,7 +1118,19 @@ func (s mappedScraper) scrapeMovie(ctx context.Context, q mappedQuery) (*models.
|
|||
}
|
||||
}
|
||||
|
||||
if len(results) == 0 && ret.Studio == nil {
|
||||
// now apply the tags
|
||||
if movieTagsMap != nil {
|
||||
logger.Debug(`Processing movie tags:`)
|
||||
tagResults := movieTagsMap.process(ctx, q, s.Common)
|
||||
|
||||
for _, p := range tagResults {
|
||||
tag := &models.ScrapedTag{}
|
||||
p.apply(tag)
|
||||
ret.Tags = append(ret.Tags, tag)
|
||||
}
|
||||
}
|
||||
|
||||
if len(results) == 0 && ret.Studio == nil && len(ret.Tags) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,29 +2,30 @@ package scraper
|
|||
|
||||
type ScrapedPerformerInput struct {
|
||||
// Set if performer matched
|
||||
StoredID *string `json:"stored_id"`
|
||||
Name *string `json:"name"`
|
||||
Disambiguation *string `json:"disambiguation"`
|
||||
Gender *string `json:"gender"`
|
||||
URL *string `json:"url"`
|
||||
Twitter *string `json:"twitter"`
|
||||
Instagram *string `json:"instagram"`
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
EyeColor *string `json:"eye_color"`
|
||||
Height *string `json:"height"`
|
||||
Measurements *string `json:"measurements"`
|
||||
FakeTits *string `json:"fake_tits"`
|
||||
PenisLength *string `json:"penis_length"`
|
||||
Circumcised *string `json:"circumcised"`
|
||||
CareerLength *string `json:"career_length"`
|
||||
Tattoos *string `json:"tattoos"`
|
||||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Details *string `json:"details"`
|
||||
DeathDate *string `json:"death_date"`
|
||||
HairColor *string `json:"hair_color"`
|
||||
Weight *string `json:"weight"`
|
||||
RemoteSiteID *string `json:"remote_site_id"`
|
||||
StoredID *string `json:"stored_id"`
|
||||
Name *string `json:"name"`
|
||||
Disambiguation *string `json:"disambiguation"`
|
||||
Gender *string `json:"gender"`
|
||||
URLs []string `json:"urls"`
|
||||
URL *string `json:"url"` // deprecated
|
||||
Twitter *string `json:"twitter"` // deprecated
|
||||
Instagram *string `json:"instagram"` // deprecated
|
||||
Birthdate *string `json:"birthdate"`
|
||||
Ethnicity *string `json:"ethnicity"`
|
||||
Country *string `json:"country"`
|
||||
EyeColor *string `json:"eye_color"`
|
||||
Height *string `json:"height"`
|
||||
Measurements *string `json:"measurements"`
|
||||
FakeTits *string `json:"fake_tits"`
|
||||
PenisLength *string `json:"penis_length"`
|
||||
Circumcised *string `json:"circumcised"`
|
||||
CareerLength *string `json:"career_length"`
|
||||
Tattoos *string `json:"tattoos"`
|
||||
Piercings *string `json:"piercings"`
|
||||
Aliases *string `json:"aliases"`
|
||||
Details *string `json:"details"`
|
||||
DeathDate *string `json:"death_date"`
|
||||
HairColor *string `json:"hair_color"`
|
||||
Weight *string `json:"weight"`
|
||||
RemoteSiteID *string `json:"remote_site_id"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/match"
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
"github.com/stashapp/stash/pkg/utils"
|
||||
)
|
||||
|
||||
// postScrape handles post-processing of scraped content. If the content
|
||||
|
|
@ -67,17 +68,53 @@ func (c Cache) postScrapePerformer(ctx context.Context, p models.ScrapedPerforme
|
|||
|
||||
p.Country = resolveCountryName(p.Country)
|
||||
|
||||
// populate URL/URLs
|
||||
// if URLs are provided, only use those
|
||||
if len(p.URLs) > 0 {
|
||||
p.URL = &p.URLs[0]
|
||||
} else {
|
||||
urls := []string{}
|
||||
if p.URL != nil {
|
||||
urls = append(urls, *p.URL)
|
||||
}
|
||||
if p.Twitter != nil && *p.Twitter != "" {
|
||||
// handle twitter profile names
|
||||
u := utils.URLFromHandle(*p.Twitter, "https://twitter.com")
|
||||
urls = append(urls, u)
|
||||
}
|
||||
if p.Instagram != nil && *p.Instagram != "" {
|
||||
// handle instagram profile names
|
||||
u := utils.URLFromHandle(*p.Instagram, "https://instagram.com")
|
||||
urls = append(urls, u)
|
||||
}
|
||||
|
||||
if len(urls) > 0 {
|
||||
p.URLs = urls
|
||||
}
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie) (ScrapedContent, error) {
|
||||
if m.Studio != nil {
|
||||
r := c.repository
|
||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||
return match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, nil)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
r := c.repository
|
||||
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
|
||||
tqb := r.TagFinder
|
||||
tags, err := postProcessTags(ctx, tqb, m.Tags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.Tags = tags
|
||||
|
||||
if m.Studio != nil {
|
||||
if err := match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// post-process - set the image if applicable
|
||||
|
|
|
|||
|
|
@ -163,6 +163,12 @@ func (i *Input) populateURL() {
|
|||
if i.Scene != nil && i.Scene.URL == nil && len(i.Scene.URLs) > 0 {
|
||||
i.Scene.URL = &i.Scene.URLs[0]
|
||||
}
|
||||
if i.Gallery != nil && i.Gallery.URL == nil && len(i.Gallery.URLs) > 0 {
|
||||
i.Gallery.URL = &i.Gallery.URLs[0]
|
||||
}
|
||||
if i.Performer != nil && i.Performer.URL == nil && len(i.Performer.URLs) > 0 {
|
||||
i.Performer.URL = &i.Performer.URLs[0]
|
||||
}
|
||||
}
|
||||
|
||||
// simple type definitions that can help customize
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ import (
|
|||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
|
|
@ -41,6 +40,7 @@ type PerformerReader interface {
|
|||
match.PerformerFinder
|
||||
models.AliasLoader
|
||||
models.StashIDLoader
|
||||
models.URLLoader
|
||||
FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error)
|
||||
GetImage(ctx context.Context, performerID int) ([]byte, error)
|
||||
}
|
||||
|
|
@ -685,6 +685,10 @@ func performerFragmentToScrapedPerformer(p graphql.PerformerFragment) *models.Sc
|
|||
sp.Aliases = &alias
|
||||
}
|
||||
|
||||
for _, u := range p.Urls {
|
||||
sp.URLs = append(sp.URLs, u.URL)
|
||||
}
|
||||
|
||||
return sp
|
||||
}
|
||||
|
||||
|
|
@ -1128,6 +1132,10 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if err := performer.LoadURLs(ctx, pqb); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := performer.LoadStashIDs(ctx, pqb); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -1195,28 +1203,8 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf
|
|||
}
|
||||
}
|
||||
|
||||
var urls []string
|
||||
if len(strings.TrimSpace(performer.Twitter)) > 0 {
|
||||
reg := regexp.MustCompile(`https?:\/\/(?:www\.)?twitter\.com`)
|
||||
if reg.MatchString(performer.Twitter) {
|
||||
urls = append(urls, strings.TrimSpace(performer.Twitter))
|
||||
} else {
|
||||
urls = append(urls, "https://twitter.com/"+strings.TrimSpace(performer.Twitter))
|
||||
}
|
||||
}
|
||||
if len(strings.TrimSpace(performer.Instagram)) > 0 {
|
||||
reg := regexp.MustCompile(`https?:\/\/(?:www\.)?instagram\.com`)
|
||||
if reg.MatchString(performer.Instagram) {
|
||||
urls = append(urls, strings.TrimSpace(performer.Instagram))
|
||||
} else {
|
||||
urls = append(urls, "https://instagram.com/"+strings.TrimSpace(performer.Instagram))
|
||||
}
|
||||
}
|
||||
if len(strings.TrimSpace(performer.URL)) > 0 {
|
||||
urls = append(urls, strings.TrimSpace(performer.URL))
|
||||
}
|
||||
if len(urls) > 0 {
|
||||
draft.Urls = urls
|
||||
if len(performer.URLs.List()) > 0 {
|
||||
draft.Urls = performer.URLs.List()
|
||||
}
|
||||
|
||||
stashIDs, err := pqb.GetStashIDs(ctx, performer.ID)
|
||||
|
|
|
|||
|
|
@ -62,15 +62,33 @@ func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCon
|
|||
}
|
||||
|
||||
q := s.getXPathQuery(doc)
|
||||
// if these just return the return values from scraper.scrape* functions then
|
||||
// it ends up returning ScrapedContent(nil) rather than nil
|
||||
switch ty {
|
||||
case ScrapeContentTypePerformer:
|
||||
return scraper.scrapePerformer(ctx, q)
|
||||
ret, err := scraper.scrapePerformer(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
case ScrapeContentTypeScene:
|
||||
return scraper.scrapeScene(ctx, q)
|
||||
ret, err := scraper.scrapeScene(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
case ScrapeContentTypeGallery:
|
||||
return scraper.scrapeGallery(ctx, q)
|
||||
ret, err := scraper.scrapeGallery(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
case ScrapeContentTypeMovie:
|
||||
return scraper.scrapeMovie(ctx, q)
|
||||
ret, err := scraper.scrapeMovie(ctx, q)
|
||||
if err != nil || ret == nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
return nil, ErrNotSupported
|
||||
|
|
|
|||
|
|
@ -495,9 +495,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error {
|
|||
table.Col(idColumn),
|
||||
table.Col("name"),
|
||||
table.Col("details"),
|
||||
table.Col("url"),
|
||||
table.Col("twitter"),
|
||||
table.Col("instagram"),
|
||||
table.Col("tattoos"),
|
||||
table.Col("piercings"),
|
||||
).Where(table.Col(idColumn).Gt(lastID)).Limit(1000)
|
||||
|
|
@ -510,9 +507,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error {
|
|||
id int
|
||||
name sql.NullString
|
||||
details sql.NullString
|
||||
url sql.NullString
|
||||
twitter sql.NullString
|
||||
instagram sql.NullString
|
||||
tattoos sql.NullString
|
||||
piercings sql.NullString
|
||||
)
|
||||
|
|
@ -521,9 +515,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error {
|
|||
&id,
|
||||
&name,
|
||||
&details,
|
||||
&url,
|
||||
&twitter,
|
||||
&instagram,
|
||||
&tattoos,
|
||||
&piercings,
|
||||
); err != nil {
|
||||
|
|
@ -533,9 +524,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error {
|
|||
set := goqu.Record{}
|
||||
db.obfuscateNullString(set, "name", name)
|
||||
db.obfuscateNullString(set, "details", details)
|
||||
db.obfuscateNullString(set, "url", url)
|
||||
db.obfuscateNullString(set, "twitter", twitter)
|
||||
db.obfuscateNullString(set, "instagram", instagram)
|
||||
db.obfuscateNullString(set, "tattoos", tattoos)
|
||||
db.obfuscateNullString(set, "piercings", piercings)
|
||||
|
||||
|
|
@ -566,6 +554,10 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if err := db.anonymiseURLs(ctx, goqu.T(performerURLsTable), "performer_id"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ const (
|
|||
dbConnTimeout = 30
|
||||
)
|
||||
|
||||
var appSchemaVersion uint = 59
|
||||
var appSchemaVersion uint = 64
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
var migrationsBox embed.FS
|
||||
|
|
|
|||
2
pkg/sqlite/migrations/60_default_filter_move.up.sql
Normal file
2
pkg/sqlite/migrations/60_default_filter_move.up.sql
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
-- no schema changes
|
||||
-- default filters will be removed in post-migration
|
||||
176
pkg/sqlite/migrations/60_postmigrate.go
Normal file
176
pkg/sqlite/migrations/60_postmigrate.go
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
package migrations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/internal/manager/config"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/sqlite"
|
||||
)
|
||||
|
||||
type schema60Migrator struct {
|
||||
migrator
|
||||
}
|
||||
|
||||
func post60(ctx context.Context, db *sqlx.DB) error {
|
||||
logger.Info("Running post-migration for schema version 60")
|
||||
|
||||
m := schema60Migrator{
|
||||
migrator: migrator{
|
||||
db: db,
|
||||
},
|
||||
}
|
||||
|
||||
return m.migrate(ctx)
|
||||
}
|
||||
|
||||
func (m *schema60Migrator) decodeJSON(s string, v interface{}) {
|
||||
if s == "" {
|
||||
return
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(s), v); err != nil {
|
||||
logger.Errorf("error decoding json %q: %v", s, err)
|
||||
}
|
||||
}
|
||||
|
||||
type schema60DefaultFilters map[string]interface{}
|
||||
|
||||
func (m *schema60Migrator) migrate(ctx context.Context) error {
|
||||
|
||||
// save default filters into the UI config
|
||||
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
|
||||
query := "SELECT id, mode, find_filter, object_filter, ui_options FROM `saved_filters` WHERE `name` = ''"
|
||||
|
||||
rows, err := m.db.Query(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
defaultFilters := make(schema60DefaultFilters)
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
id int
|
||||
mode string
|
||||
findFilterStr string
|
||||
objectFilterStr string
|
||||
uiOptionsStr string
|
||||
)
|
||||
|
||||
if err := rows.Scan(&id, &mode, &findFilterStr, &objectFilterStr, &uiOptionsStr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// convert the filters to the correct format
|
||||
findFilter := make(map[string]interface{})
|
||||
objectFilter := make(map[string]interface{})
|
||||
uiOptions := make(map[string]interface{})
|
||||
|
||||
m.decodeJSON(findFilterStr, &findFilter)
|
||||
m.decodeJSON(objectFilterStr, &objectFilter)
|
||||
m.decodeJSON(uiOptionsStr, &uiOptions)
|
||||
|
||||
o := map[string]interface{}{
|
||||
"mode": mode,
|
||||
"find_filter": findFilter,
|
||||
"object_filter": objectFilter,
|
||||
"ui_options": uiOptions,
|
||||
}
|
||||
|
||||
defaultFilters[strings.ToLower(mode)] = o
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := m.saveDefaultFilters(defaultFilters); err != nil {
|
||||
return fmt.Errorf("saving default filters: %w", err)
|
||||
}
|
||||
|
||||
// remove the default filters from the database
|
||||
query = "DELETE FROM `saved_filters` WHERE `name` = ''"
|
||||
if _, err := m.db.Exec(query); err != nil {
|
||||
return fmt.Errorf("deleting default filters: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *schema60Migrator) saveDefaultFilters(defaultFilters schema60DefaultFilters) error {
|
||||
if len(defaultFilters) == 0 {
|
||||
logger.Debugf("no default filters to save")
|
||||
return nil
|
||||
}
|
||||
|
||||
// save the default filters into the UI config
|
||||
config := config.GetInstance()
|
||||
|
||||
orgPath := config.GetConfigFile()
|
||||
|
||||
if orgPath == "" {
|
||||
// no config file to migrate (usually in a test or new system)
|
||||
logger.Debugf("no config file to migrate")
|
||||
return nil
|
||||
}
|
||||
|
||||
uiConfig := config.GetUIConfiguration()
|
||||
if uiConfig == nil {
|
||||
uiConfig = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// if the defaultFilters key already exists, don't overwrite them
|
||||
if _, found := uiConfig["defaultFilters"]; found {
|
||||
logger.Warn("defaultFilters already exists in the UI config, skipping migration")
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := m.backupConfig(orgPath); err != nil {
|
||||
return fmt.Errorf("backing up config: %w", err)
|
||||
}
|
||||
|
||||
uiConfig["defaultFilters"] = map[string]interface{}(defaultFilters)
|
||||
config.SetUIConfiguration(uiConfig)
|
||||
|
||||
if err := config.Write(); err != nil {
|
||||
return fmt.Errorf("failed to write config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *schema60Migrator) backupConfig(orgPath string) error {
|
||||
c := config.GetInstance()
|
||||
|
||||
// save a backup of the original config file
|
||||
backupPath := fmt.Sprintf("%s.59.%s", orgPath, time.Now().Format("20060102_150405"))
|
||||
|
||||
data, err := c.Marshal()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal backup config: %w", err)
|
||||
}
|
||||
|
||||
logger.Infof("Backing up config to %s", backupPath)
|
||||
if err := os.WriteFile(backupPath, data, 0644); err != nil {
|
||||
return fmt.Errorf("failed to write backup config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
sqlite.RegisterPostMigration(60, post60)
|
||||
}
|
||||
10
pkg/sqlite/migrations/61_movie_tags.up.sql
Normal file
10
pkg/sqlite/migrations/61_movie_tags.up.sql
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
CREATE TABLE `movies_tags` (
|
||||
`movie_id` integer NOT NULL,
|
||||
`tag_id` integer NOT NULL,
|
||||
foreign key(`movie_id`) references `movies`(`id`) on delete CASCADE,
|
||||
foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE,
|
||||
PRIMARY KEY(`movie_id`, `tag_id`)
|
||||
);
|
||||
|
||||
CREATE INDEX `index_movies_tags_on_tag_id` on `movies_tags` (`tag_id`);
|
||||
CREATE INDEX `index_movies_tags_on_movie_id` on `movies_tags` (`movie_id`);
|
||||
155
pkg/sqlite/migrations/62_performer_urls.up.sql
Normal file
155
pkg/sqlite/migrations/62_performer_urls.up.sql
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
PRAGMA foreign_keys=OFF;
|
||||
|
||||
CREATE TABLE `performer_urls` (
|
||||
`performer_id` integer NOT NULL,
|
||||
`position` integer NOT NULL,
|
||||
`url` varchar(255) NOT NULL,
|
||||
foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE,
|
||||
PRIMARY KEY(`performer_id`, `position`, `url`)
|
||||
);
|
||||
|
||||
CREATE INDEX `performers_urls_url` on `performer_urls` (`url`);
|
||||
|
||||
-- drop url, twitter and instagram
|
||||
-- make name not null
|
||||
CREATE TABLE `performers_new` (
|
||||
`id` integer not null primary key autoincrement,
|
||||
`name` varchar(255) not null,
|
||||
`disambiguation` varchar(255),
|
||||
`gender` varchar(20),
|
||||
`birthdate` date,
|
||||
`ethnicity` varchar(255),
|
||||
`country` varchar(255),
|
||||
`eye_color` varchar(255),
|
||||
`height` int,
|
||||
`measurements` varchar(255),
|
||||
`fake_tits` varchar(255),
|
||||
`career_length` varchar(255),
|
||||
`tattoos` varchar(255),
|
||||
`piercings` varchar(255),
|
||||
`favorite` boolean not null default '0',
|
||||
`created_at` datetime not null,
|
||||
`updated_at` datetime not null,
|
||||
`details` text,
|
||||
`death_date` date,
|
||||
`hair_color` varchar(255),
|
||||
`weight` integer,
|
||||
`rating` tinyint,
|
||||
`ignore_auto_tag` boolean not null default '0',
|
||||
`image_blob` varchar(255) REFERENCES `blobs`(`checksum`),
|
||||
`penis_length` float,
|
||||
`circumcised` varchar[10]
|
||||
);
|
||||
|
||||
INSERT INTO `performers_new`
|
||||
(
|
||||
`id`,
|
||||
`name`,
|
||||
`disambiguation`,
|
||||
`gender`,
|
||||
`birthdate`,
|
||||
`ethnicity`,
|
||||
`country`,
|
||||
`eye_color`,
|
||||
`height`,
|
||||
`measurements`,
|
||||
`fake_tits`,
|
||||
`career_length`,
|
||||
`tattoos`,
|
||||
`piercings`,
|
||||
`favorite`,
|
||||
`created_at`,
|
||||
`updated_at`,
|
||||
`details`,
|
||||
`death_date`,
|
||||
`hair_color`,
|
||||
`weight`,
|
||||
`rating`,
|
||||
`ignore_auto_tag`,
|
||||
`image_blob`,
|
||||
`penis_length`,
|
||||
`circumcised`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`name`,
|
||||
`disambiguation`,
|
||||
`gender`,
|
||||
`birthdate`,
|
||||
`ethnicity`,
|
||||
`country`,
|
||||
`eye_color`,
|
||||
`height`,
|
||||
`measurements`,
|
||||
`fake_tits`,
|
||||
`career_length`,
|
||||
`tattoos`,
|
||||
`piercings`,
|
||||
`favorite`,
|
||||
`created_at`,
|
||||
`updated_at`,
|
||||
`details`,
|
||||
`death_date`,
|
||||
`hair_color`,
|
||||
`weight`,
|
||||
`rating`,
|
||||
`ignore_auto_tag`,
|
||||
`image_blob`,
|
||||
`penis_length`,
|
||||
`circumcised`
|
||||
FROM `performers`;
|
||||
|
||||
INSERT INTO `performer_urls`
|
||||
(
|
||||
`performer_id`,
|
||||
`position`,
|
||||
`url`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
'0',
|
||||
`url`
|
||||
FROM `performers`
|
||||
WHERE `performers`.`url` IS NOT NULL AND `performers`.`url` != '';
|
||||
|
||||
INSERT INTO `performer_urls`
|
||||
(
|
||||
`performer_id`,
|
||||
`position`,
|
||||
`url`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
(SELECT count(*) FROM `performer_urls` WHERE `performer_id` = `performers`.`id`)+1,
|
||||
CASE
|
||||
WHEN `twitter` LIKE 'http%://%' THEN `twitter`
|
||||
ELSE 'https://www.twitter.com/' || `twitter`
|
||||
END
|
||||
FROM `performers`
|
||||
WHERE `performers`.`twitter` IS NOT NULL AND `performers`.`twitter` != '';
|
||||
|
||||
INSERT INTO `performer_urls`
|
||||
(
|
||||
`performer_id`,
|
||||
`position`,
|
||||
`url`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
(SELECT count(*) FROM `performer_urls` WHERE `performer_id` = `performers`.`id`)+1,
|
||||
CASE
|
||||
WHEN `instagram` LIKE 'http%://%' THEN `instagram`
|
||||
ELSE 'https://www.instagram.com/' || `instagram`
|
||||
END
|
||||
FROM `performers`
|
||||
WHERE `performers`.`instagram` IS NOT NULL AND `performers`.`instagram` != '';
|
||||
|
||||
DROP INDEX `performers_name_disambiguation_unique`;
|
||||
DROP INDEX `performers_name_unique`;
|
||||
DROP TABLE `performers`;
|
||||
ALTER TABLE `performers_new` rename to `performers`;
|
||||
|
||||
CREATE UNIQUE INDEX `performers_name_disambiguation_unique` on `performers` (`name`, `disambiguation`) WHERE `disambiguation` IS NOT NULL;
|
||||
CREATE UNIQUE INDEX `performers_name_unique` on `performers` (`name`) WHERE `disambiguation` IS NULL;
|
||||
|
||||
PRAGMA foreign_keys=ON;
|
||||
9
pkg/sqlite/migrations/63_studio_tags.up.sql
Normal file
9
pkg/sqlite/migrations/63_studio_tags.up.sql
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
CREATE TABLE `studios_tags` (
|
||||
`studio_id` integer NOT NULL,
|
||||
`tag_id` integer NOT NULL,
|
||||
foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE,
|
||||
foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE,
|
||||
PRIMARY KEY(`studio_id`, `tag_id`)
|
||||
);
|
||||
|
||||
CREATE INDEX `index_studios_tags_on_tag_id` on `studios_tags` (`tag_id`);
|
||||
49
pkg/sqlite/migrations/64_fixes.up.sql
Normal file
49
pkg/sqlite/migrations/64_fixes.up.sql
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
PRAGMA foreign_keys=OFF;
|
||||
|
||||
-- recreate scenes_view_dates adding not null to scene_id and adding indexes
|
||||
CREATE TABLE `scenes_view_dates_new` (
|
||||
`scene_id` integer not null,
|
||||
`view_date` datetime not null,
|
||||
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO `scenes_view_dates_new`
|
||||
(
|
||||
`scene_id`,
|
||||
`view_date`
|
||||
)
|
||||
SELECT
|
||||
`scene_id`,
|
||||
`view_date`
|
||||
FROM `scenes_view_dates`
|
||||
WHERE `scenes_view_dates`.`scene_id` IS NOT NULL;
|
||||
|
||||
DROP INDEX IF EXISTS `index_scenes_view_dates`;
|
||||
DROP TABLE `scenes_view_dates`;
|
||||
ALTER TABLE `scenes_view_dates_new` rename to `scenes_view_dates`;
|
||||
CREATE INDEX `index_scenes_view_dates` ON `scenes_view_dates` (`scene_id`);
|
||||
|
||||
-- recreate scenes_o_dates adding not null to scene_id and adding indexes
|
||||
CREATE TABLE `scenes_o_dates_new` (
|
||||
`scene_id` integer not null,
|
||||
`o_date` datetime not null,
|
||||
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO `scenes_o_dates_new`
|
||||
(
|
||||
`scene_id`,
|
||||
`o_date`
|
||||
)
|
||||
SELECT
|
||||
`scene_id`,
|
||||
`o_date`
|
||||
FROM `scenes_o_dates`
|
||||
WHERE `scenes_o_dates`.`scene_id` IS NOT NULL;
|
||||
|
||||
DROP INDEX IF EXISTS `index_scenes_o_dates`;
|
||||
DROP TABLE `scenes_o_dates`;
|
||||
ALTER TABLE `scenes_o_dates_new` rename to `scenes_o_dates`;
|
||||
CREATE INDEX `index_scenes_o_dates` ON `scenes_o_dates` (`scene_id`);
|
||||
|
||||
PRAGMA foreign_keys=ON;
|
||||
92
pkg/sqlite/migrations/64_postmigrate.go
Normal file
92
pkg/sqlite/migrations/64_postmigrate.go
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
package migrations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
"github.com/stashapp/stash/pkg/sqlite"
|
||||
)
|
||||
|
||||
// this is a copy of the 55 post migration
|
||||
// some non-UTC dates were missed, so we need to correct them
|
||||
|
||||
type schema64Migrator struct {
|
||||
migrator
|
||||
}
|
||||
|
||||
func post64(ctx context.Context, db *sqlx.DB) error {
|
||||
logger.Info("Running post-migration for schema version 64")
|
||||
|
||||
m := schema64Migrator{
|
||||
migrator: migrator{
|
||||
db: db,
|
||||
},
|
||||
}
|
||||
|
||||
return m.migrate(ctx)
|
||||
}
|
||||
|
||||
func (m *schema64Migrator) migrate(ctx context.Context) error {
|
||||
// the last_played_at column was storing in a different format than the rest of the timestamps
|
||||
// convert the play history date to the correct format
|
||||
if err := m.withTxn(ctx, func(tx *sqlx.Tx) error {
|
||||
query := "SELECT DISTINCT `scene_id`, `view_date` FROM `scenes_view_dates`"
|
||||
|
||||
rows, err := m.db.Query(query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
id int
|
||||
viewDate sqlite.Timestamp
|
||||
)
|
||||
|
||||
err := rows.Scan(&id, &viewDate)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// skip if already in the correct format
|
||||
if viewDate.Timestamp.Location() == time.UTC {
|
||||
logger.Debugf("view date %s is already in the correct format", viewDate.Timestamp)
|
||||
continue
|
||||
}
|
||||
|
||||
utcTimestamp := sqlite.UTCTimestamp{
|
||||
Timestamp: viewDate,
|
||||
}
|
||||
|
||||
// convert the timestamp to the correct format
|
||||
logger.Debugf("correcting view date %q to UTC date %q for scene %d", viewDate.Timestamp, viewDate.Timestamp.UTC(), id)
|
||||
r, err := m.db.Exec("UPDATE scenes_view_dates SET view_date = ? WHERE scene_id = ? AND (view_date = ? OR view_date = ?)", utcTimestamp, id, viewDate.Timestamp, viewDate)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error correcting view date %s to %s: %w", viewDate.Timestamp, viewDate, err)
|
||||
}
|
||||
|
||||
rowsAffected, err := r.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if rowsAffected == 0 {
|
||||
return fmt.Errorf("no rows affected when updating view date %s to %s for scene %d", viewDate.Timestamp, viewDate.Timestamp.UTC(), id)
|
||||
}
|
||||
}
|
||||
|
||||
return rows.Err()
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
sqlite.RegisterPostMigration(64, post64)
|
||||
}
|
||||
|
|
@ -23,6 +23,8 @@ const (
|
|||
movieFrontImageBlobColumn = "front_image_blob"
|
||||
movieBackImageBlobColumn = "back_image_blob"
|
||||
|
||||
moviesTagsTable = "movies_tags"
|
||||
|
||||
movieURLsTable = "movie_urls"
|
||||
movieURLColumn = "url"
|
||||
)
|
||||
|
|
@ -98,6 +100,7 @@ func (r *movieRowRecord) fromPartial(o models.MoviePartial) {
|
|||
type movieRepositoryType struct {
|
||||
repository
|
||||
scenes repository
|
||||
tags joinRepository
|
||||
}
|
||||
|
||||
var (
|
||||
|
|
@ -110,11 +113,21 @@ var (
|
|||
tableName: moviesScenesTable,
|
||||
idColumn: movieIDColumn,
|
||||
},
|
||||
tags: joinRepository{
|
||||
repository: repository{
|
||||
tableName: moviesTagsTable,
|
||||
idColumn: movieIDColumn,
|
||||
},
|
||||
fkColumn: tagIDColumn,
|
||||
foreignTable: tagTable,
|
||||
orderBy: "tags.name ASC",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
type MovieStore struct {
|
||||
blobJoinQueryBuilder
|
||||
tagRelationshipStore
|
||||
|
||||
tableMgr *table
|
||||
}
|
||||
|
|
@ -125,6 +138,11 @@ func NewMovieStore(blobStore *BlobStore) *MovieStore {
|
|||
blobStore: blobStore,
|
||||
joinTable: movieTable,
|
||||
},
|
||||
tagRelationshipStore: tagRelationshipStore{
|
||||
idRelationshipStore: idRelationshipStore{
|
||||
joinTable: moviesTagsTableMgr,
|
||||
},
|
||||
},
|
||||
|
||||
tableMgr: movieTableMgr,
|
||||
}
|
||||
|
|
@ -154,6 +172,10 @@ func (qb *MovieStore) Create(ctx context.Context, newObject *models.Movie) error
|
|||
}
|
||||
}
|
||||
|
||||
if err := qb.tagRelationshipStore.createRelationships(ctx, id, newObject.TagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
updated, err := qb.find(ctx, id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("finding after create: %w", err)
|
||||
|
|
@ -185,6 +207,10 @@ func (qb *MovieStore) UpdatePartial(ctx context.Context, id int, partial models.
|
|||
}
|
||||
}
|
||||
|
||||
if err := qb.tagRelationshipStore.modifyRelationships(ctx, id, partial.TagIDs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return qb.find(ctx, id)
|
||||
}
|
||||
|
||||
|
|
@ -202,6 +228,10 @@ func (qb *MovieStore) Update(ctx context.Context, updatedObject *models.Movie) e
|
|||
}
|
||||
}
|
||||
|
||||
if err := qb.tagRelationshipStore.replaceRelationships(ctx, updatedObject.ID, updatedObject.TagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -430,6 +460,7 @@ var movieSortOptions = sortOptions{
|
|||
"random",
|
||||
"rating",
|
||||
"scenes_count",
|
||||
"tag_count",
|
||||
"updated_at",
|
||||
}
|
||||
|
||||
|
|
@ -451,6 +482,8 @@ func (qb *MovieStore) getMovieSort(findFilter *models.FindFilterType) (string, e
|
|||
|
||||
sortQuery := ""
|
||||
switch sort {
|
||||
case "tag_count":
|
||||
sortQuery += getCountSort(movieTable, moviesTagsTable, movieIDColumn, direction)
|
||||
case "scenes_count": // generic getSort won't work for this
|
||||
sortQuery += getCountSort(movieTable, moviesScenesTable, movieIDColumn, direction)
|
||||
default:
|
||||
|
|
|
|||
|
|
@ -63,6 +63,8 @@ func (qb *movieFilterHandler) criterionHandler() criterionHandler {
|
|||
qb.urlsCriterionHandler(movieFilter.URL),
|
||||
studioCriterionHandler(movieTable, movieFilter.Studios),
|
||||
qb.performersCriterionHandler(movieFilter.Performers),
|
||||
qb.tagsCriterionHandler(movieFilter.Tags),
|
||||
qb.tagCountCriterionHandler(movieFilter.TagCount),
|
||||
&dateCriterionHandler{movieFilter.Date, "movies.date", nil},
|
||||
×tampCriterionHandler{movieFilter.CreatedAt, "movies.created_at", nil},
|
||||
×tampCriterionHandler{movieFilter.UpdatedAt, "movies.updated_at", nil},
|
||||
|
|
@ -162,3 +164,28 @@ func (qb *movieFilterHandler) performersCriterionHandler(performers *models.Mult
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (qb *movieFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc {
|
||||
h := joinedHierarchicalMultiCriterionHandlerBuilder{
|
||||
primaryTable: movieTable,
|
||||
foreignTable: tagTable,
|
||||
foreignFK: "tag_id",
|
||||
|
||||
relationsTable: "tags_relations",
|
||||
joinAs: "movie_tag",
|
||||
joinTable: moviesTagsTable,
|
||||
primaryFK: movieIDColumn,
|
||||
}
|
||||
|
||||
return h.handler(tags)
|
||||
}
|
||||
|
||||
func (qb *movieFilterHandler) tagCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc {
|
||||
h := countCriterionHandlerBuilder{
|
||||
primaryTable: movieTable,
|
||||
joinTable: moviesTagsTable,
|
||||
primaryFK: movieIDColumn,
|
||||
}
|
||||
|
||||
return h.handler(count)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import (
|
|||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
|
|
@ -17,7 +18,12 @@ import (
|
|||
|
||||
func loadMovieRelationships(ctx context.Context, expected models.Movie, actual *models.Movie) error {
|
||||
if expected.URLs.Loaded() {
|
||||
if err := actual.LoadURLs(ctx, db.Gallery); err != nil {
|
||||
if err := actual.LoadURLs(ctx, db.Movie); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if expected.TagIDs.Loaded() {
|
||||
if err := actual.LoadTagIDs(ctx, db.Movie); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -25,6 +31,337 @@ func loadMovieRelationships(ctx context.Context, expected models.Movie, actual *
|
|||
return nil
|
||||
}
|
||||
|
||||
func Test_MovieStore_Create(t *testing.T) {
|
||||
var (
|
||||
name = "name"
|
||||
url = "url"
|
||||
aliases = "alias1, alias2"
|
||||
director = "director"
|
||||
rating = 60
|
||||
duration = 34
|
||||
synopsis = "synopsis"
|
||||
date, _ = models.ParseDate("2003-02-01")
|
||||
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
newObject models.Movie
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
"full",
|
||||
models.Movie{
|
||||
Name: name,
|
||||
Duration: &duration,
|
||||
Date: &date,
|
||||
Rating: &rating,
|
||||
StudioID: &studioIDs[studioIdxWithMovie],
|
||||
Director: director,
|
||||
Synopsis: synopsis,
|
||||
URLs: models.NewRelatedStrings([]string{url}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithMovie]}),
|
||||
Aliases: aliases,
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"invalid tag id",
|
||||
models.Movie{
|
||||
Name: name,
|
||||
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
},
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Movie
|
||||
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
|
||||
p := tt.newObject
|
||||
if err := qb.Create(ctx, &p); (err != nil) != tt.wantErr {
|
||||
t.Errorf("MovieStore.Create() error = %v, wantErr = %v", err, tt.wantErr)
|
||||
}
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Zero(p.ID)
|
||||
return
|
||||
}
|
||||
|
||||
assert.NotZero(p.ID)
|
||||
|
||||
copy := tt.newObject
|
||||
copy.ID = p.ID
|
||||
|
||||
// load relationships
|
||||
if err := loadMovieRelationships(ctx, copy, &p); err != nil {
|
||||
t.Errorf("loadMovieRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(copy, p)
|
||||
|
||||
// ensure can find the movie
|
||||
found, err := qb.Find(ctx, p.ID)
|
||||
if err != nil {
|
||||
t.Errorf("MovieStore.Find() error = %v", err)
|
||||
}
|
||||
|
||||
if !assert.NotNil(found) {
|
||||
return
|
||||
}
|
||||
|
||||
// load relationships
|
||||
if err := loadMovieRelationships(ctx, copy, found); err != nil {
|
||||
t.Errorf("loadMovieRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
assert.Equal(copy, *found)
|
||||
|
||||
return
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_movieQueryBuilder_Update(t *testing.T) {
|
||||
var (
|
||||
name = "name"
|
||||
url = "url"
|
||||
aliases = "alias1, alias2"
|
||||
director = "director"
|
||||
rating = 60
|
||||
duration = 34
|
||||
synopsis = "synopsis"
|
||||
date, _ = models.ParseDate("2003-02-01")
|
||||
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
updatedObject *models.Movie
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
"full",
|
||||
&models.Movie{
|
||||
ID: movieIDs[movieIdxWithTag],
|
||||
Name: name,
|
||||
Duration: &duration,
|
||||
Date: &date,
|
||||
Rating: &rating,
|
||||
StudioID: &studioIDs[studioIdxWithMovie],
|
||||
Director: director,
|
||||
Synopsis: synopsis,
|
||||
URLs: models.NewRelatedStrings([]string{url}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithMovie]}),
|
||||
Aliases: aliases,
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"clear tag ids",
|
||||
&models.Movie{
|
||||
ID: movieIDs[movieIdxWithTag],
|
||||
Name: name,
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"invalid studio id",
|
||||
&models.Movie{
|
||||
ID: movieIDs[movieIdxWithScene],
|
||||
Name: name,
|
||||
StudioID: &invalidID,
|
||||
},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"invalid tag id",
|
||||
&models.Movie{
|
||||
ID: movieIDs[movieIdxWithScene],
|
||||
Name: name,
|
||||
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
||||
},
|
||||
true,
|
||||
},
|
||||
}
|
||||
|
||||
qb := db.Movie
|
||||
for _, tt := range tests {
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
|
||||
copy := *tt.updatedObject
|
||||
|
||||
if err := qb.Update(ctx, tt.updatedObject); (err != nil) != tt.wantErr {
|
||||
t.Errorf("movieQueryBuilder.Update() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
|
||||
if tt.wantErr {
|
||||
return
|
||||
}
|
||||
|
||||
s, err := qb.Find(ctx, tt.updatedObject.ID)
|
||||
if err != nil {
|
||||
t.Errorf("movieQueryBuilder.Find() error = %v", err)
|
||||
}
|
||||
|
||||
// load relationships
|
||||
if err := loadMovieRelationships(ctx, copy, s); err != nil {
|
||||
t.Errorf("loadMovieRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(copy, *s)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func clearMoviePartial() models.MoviePartial {
|
||||
// leave mandatory fields
|
||||
return models.MoviePartial{
|
||||
Aliases: models.OptionalString{Set: true, Null: true},
|
||||
Synopsis: models.OptionalString{Set: true, Null: true},
|
||||
Director: models.OptionalString{Set: true, Null: true},
|
||||
Duration: models.OptionalInt{Set: true, Null: true},
|
||||
URLs: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet},
|
||||
Date: models.OptionalDate{Set: true, Null: true},
|
||||
Rating: models.OptionalInt{Set: true, Null: true},
|
||||
StudioID: models.OptionalInt{Set: true, Null: true},
|
||||
TagIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet},
|
||||
}
|
||||
}
|
||||
|
||||
func Test_movieQueryBuilder_UpdatePartial(t *testing.T) {
|
||||
var (
|
||||
name = "name"
|
||||
url = "url"
|
||||
aliases = "alias1, alias2"
|
||||
director = "director"
|
||||
rating = 60
|
||||
duration = 34
|
||||
synopsis = "synopsis"
|
||||
date, _ = models.ParseDate("2003-02-01")
|
||||
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
id int
|
||||
partial models.MoviePartial
|
||||
want models.Movie
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
"full",
|
||||
movieIDs[movieIdxWithScene],
|
||||
models.MoviePartial{
|
||||
Name: models.NewOptionalString(name),
|
||||
Director: models.NewOptionalString(director),
|
||||
Synopsis: models.NewOptionalString(synopsis),
|
||||
Aliases: models.NewOptionalString(aliases),
|
||||
URLs: &models.UpdateStrings{
|
||||
Values: []string{url},
|
||||
Mode: models.RelationshipUpdateModeSet,
|
||||
},
|
||||
Date: models.NewOptionalDate(date),
|
||||
Duration: models.NewOptionalInt(duration),
|
||||
Rating: models.NewOptionalInt(rating),
|
||||
StudioID: models.NewOptionalInt(studioIDs[studioIdxWithMovie]),
|
||||
CreatedAt: models.NewOptionalTime(createdAt),
|
||||
UpdatedAt: models.NewOptionalTime(updatedAt),
|
||||
TagIDs: &models.UpdateIDs{
|
||||
IDs: []int{tagIDs[tagIdx1WithMovie], tagIDs[tagIdx1WithDupName]},
|
||||
Mode: models.RelationshipUpdateModeSet,
|
||||
},
|
||||
},
|
||||
models.Movie{
|
||||
ID: movieIDs[movieIdxWithScene],
|
||||
Name: name,
|
||||
Director: director,
|
||||
Synopsis: synopsis,
|
||||
Aliases: aliases,
|
||||
URLs: models.NewRelatedStrings([]string{url}),
|
||||
Date: &date,
|
||||
Duration: &duration,
|
||||
Rating: &rating,
|
||||
StudioID: &studioIDs[studioIdxWithMovie],
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithMovie]}),
|
||||
},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"clear all",
|
||||
movieIDs[movieIdxWithScene],
|
||||
clearMoviePartial(),
|
||||
models.Movie{
|
||||
ID: movieIDs[movieIdxWithScene],
|
||||
Name: movieNames[movieIdxWithScene],
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"invalid id",
|
||||
invalidID,
|
||||
models.MoviePartial{},
|
||||
models.Movie{},
|
||||
true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
qb := db.Movie
|
||||
|
||||
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
||||
assert := assert.New(t)
|
||||
|
||||
got, err := qb.UpdatePartial(ctx, tt.id, tt.partial)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("movieQueryBuilder.UpdatePartial() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
if tt.wantErr {
|
||||
return
|
||||
}
|
||||
|
||||
// load relationships
|
||||
if err := loadMovieRelationships(ctx, tt.want, got); err != nil {
|
||||
t.Errorf("loadMovieRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(tt.want, *got)
|
||||
|
||||
s, err := qb.Find(ctx, tt.id)
|
||||
if err != nil {
|
||||
t.Errorf("movieQueryBuilder.Find() error = %v", err)
|
||||
}
|
||||
|
||||
// load relationships
|
||||
if err := loadMovieRelationships(ctx, tt.want, s); err != nil {
|
||||
t.Errorf("loadMovieRelationships() error = %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(tt.want, *s)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMovieFindByName(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
mqb := db.Movie
|
||||
|
|
@ -280,12 +617,12 @@ func TestMovieQueryURLExcludes(t *testing.T) {
|
|||
Name: &nameCriterion,
|
||||
}
|
||||
|
||||
movies := queryMovie(ctx, t, mqb, &filter, nil)
|
||||
movies := queryMovies(ctx, t, &filter, nil)
|
||||
assert.Len(t, movies, 0, "Expected no movies to be found")
|
||||
|
||||
// query for movies that exclude the URL "ccc"
|
||||
urlCriterion.Value = "ccc"
|
||||
movies = queryMovie(ctx, t, mqb, &filter, nil)
|
||||
movies = queryMovies(ctx, t, &filter, nil)
|
||||
|
||||
if assert.Len(t, movies, 1, "Expected one movie to be found") {
|
||||
assert.Equal(t, movie.Name, movies[0].Name)
|
||||
|
|
@ -300,7 +637,7 @@ func verifyMovieQuery(t *testing.T, filter models.MovieFilterType, verifyFn func
|
|||
t.Helper()
|
||||
sqb := db.Movie
|
||||
|
||||
movies := queryMovie(ctx, t, sqb, &filter, nil)
|
||||
movies := queryMovies(ctx, t, &filter, nil)
|
||||
|
||||
for _, movie := range movies {
|
||||
if err := movie.LoadURLs(ctx, sqb); err != nil {
|
||||
|
|
@ -319,7 +656,8 @@ func verifyMovieQuery(t *testing.T, filter models.MovieFilterType, verifyFn func
|
|||
})
|
||||
}
|
||||
|
||||
func queryMovie(ctx context.Context, t *testing.T, sqb models.MovieReader, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) []*models.Movie {
|
||||
func queryMovies(ctx context.Context, t *testing.T, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) []*models.Movie {
|
||||
sqb := db.Movie
|
||||
movies, _, err := sqb.Query(ctx, movieFilter, findFilter)
|
||||
if err != nil {
|
||||
t.Errorf("Error querying movie: %s", err.Error())
|
||||
|
|
@ -328,6 +666,102 @@ func queryMovie(ctx context.Context, t *testing.T, sqb models.MovieReader, movie
|
|||
return movies
|
||||
}
|
||||
|
||||
func TestMovieQueryTags(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
tagCriterion := models.HierarchicalMultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdxWithMovie]),
|
||||
strconv.Itoa(tagIDs[tagIdx1WithMovie]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
}
|
||||
|
||||
movieFilter := models.MovieFilterType{
|
||||
Tags: &tagCriterion,
|
||||
}
|
||||
|
||||
// ensure ids are correct
|
||||
movies := queryMovies(ctx, t, &movieFilter, nil)
|
||||
assert.Len(t, movies, 3)
|
||||
for _, movie := range movies {
|
||||
assert.True(t, movie.ID == movieIDs[movieIdxWithTag] || movie.ID == movieIDs[movieIdxWithTwoTags] || movie.ID == movieIDs[movieIdxWithThreeTags])
|
||||
}
|
||||
|
||||
tagCriterion = models.HierarchicalMultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdx1WithMovie]),
|
||||
strconv.Itoa(tagIDs[tagIdx2WithMovie]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludesAll,
|
||||
}
|
||||
|
||||
movies = queryMovies(ctx, t, &movieFilter, nil)
|
||||
|
||||
if assert.Len(t, movies, 2) {
|
||||
assert.Equal(t, sceneIDs[movieIdxWithTwoTags], movies[0].ID)
|
||||
assert.Equal(t, sceneIDs[movieIdxWithThreeTags], movies[1].ID)
|
||||
}
|
||||
|
||||
tagCriterion = models.HierarchicalMultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdx1WithMovie]),
|
||||
},
|
||||
Modifier: models.CriterionModifierExcludes,
|
||||
}
|
||||
|
||||
q := getSceneStringValue(movieIdxWithTwoTags, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
movies = queryMovies(ctx, t, &movieFilter, &findFilter)
|
||||
assert.Len(t, movies, 0)
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestMovieQueryTagCount(t *testing.T) {
|
||||
const tagCount = 1
|
||||
tagCountCriterion := models.IntCriterionInput{
|
||||
Value: tagCount,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyMoviesTagCount(t, tagCountCriterion)
|
||||
|
||||
tagCountCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyMoviesTagCount(t, tagCountCriterion)
|
||||
|
||||
tagCountCriterion.Modifier = models.CriterionModifierGreaterThan
|
||||
verifyMoviesTagCount(t, tagCountCriterion)
|
||||
|
||||
tagCountCriterion.Modifier = models.CriterionModifierLessThan
|
||||
verifyMoviesTagCount(t, tagCountCriterion)
|
||||
}
|
||||
|
||||
func verifyMoviesTagCount(t *testing.T, tagCountCriterion models.IntCriterionInput) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
sqb := db.Movie
|
||||
movieFilter := models.MovieFilterType{
|
||||
TagCount: &tagCountCriterion,
|
||||
}
|
||||
|
||||
movies := queryMovies(ctx, t, &movieFilter, nil)
|
||||
assert.Greater(t, len(movies), 0)
|
||||
|
||||
for _, movie := range movies {
|
||||
ids, err := sqb.GetTagIDs(ctx, movie.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
verifyInt(t, len(ids), tagCountCriterion)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestMovieQuerySorting(t *testing.T) {
|
||||
sort := "scenes_count"
|
||||
direction := models.SortDirectionEnumDesc
|
||||
|
|
@ -337,8 +771,7 @@ func TestMovieQuerySorting(t *testing.T) {
|
|||
}
|
||||
|
||||
withTxn(func(ctx context.Context) error {
|
||||
sqb := db.Movie
|
||||
movies := queryMovie(ctx, t, sqb, nil, &findFilter)
|
||||
movies := queryMovies(ctx, t, nil, &findFilter)
|
||||
|
||||
// scenes should be in same order as indexes
|
||||
firstMovie := movies[0]
|
||||
|
|
@ -348,7 +781,7 @@ func TestMovieQuerySorting(t *testing.T) {
|
|||
// sort in descending order
|
||||
direction = models.SortDirectionEnumAsc
|
||||
|
||||
movies = queryMovie(ctx, t, sqb, nil, &findFilter)
|
||||
movies = queryMovies(ctx, t, nil, &findFilter)
|
||||
lastMovie := movies[len(movies)-1]
|
||||
|
||||
assert.Equal(t, movieIDs[movieIdxWithScene], lastMovie.ID)
|
||||
|
|
|
|||
|
|
@ -23,6 +23,9 @@ const (
|
|||
performerAliasColumn = "alias"
|
||||
performersTagsTable = "performers_tags"
|
||||
|
||||
performerURLsTable = "performer_urls"
|
||||
performerURLColumn = "url"
|
||||
|
||||
performerImageBlobColumn = "image_blob"
|
||||
)
|
||||
|
||||
|
|
@ -31,9 +34,6 @@ type performerRow struct {
|
|||
Name null.String `db:"name"` // TODO: make schema non-nullable
|
||||
Disambigation zero.String `db:"disambiguation"`
|
||||
Gender zero.String `db:"gender"`
|
||||
URL zero.String `db:"url"`
|
||||
Twitter zero.String `db:"twitter"`
|
||||
Instagram zero.String `db:"instagram"`
|
||||
Birthdate NullDate `db:"birthdate"`
|
||||
Ethnicity zero.String `db:"ethnicity"`
|
||||
Country zero.String `db:"country"`
|
||||
|
|
@ -68,9 +68,6 @@ func (r *performerRow) fromPerformer(o models.Performer) {
|
|||
if o.Gender != nil && o.Gender.IsValid() {
|
||||
r.Gender = zero.StringFrom(o.Gender.String())
|
||||
}
|
||||
r.URL = zero.StringFrom(o.URL)
|
||||
r.Twitter = zero.StringFrom(o.Twitter)
|
||||
r.Instagram = zero.StringFrom(o.Instagram)
|
||||
r.Birthdate = NullDateFromDatePtr(o.Birthdate)
|
||||
r.Ethnicity = zero.StringFrom(o.Ethnicity)
|
||||
r.Country = zero.StringFrom(o.Country)
|
||||
|
|
@ -101,9 +98,6 @@ func (r *performerRow) resolve() *models.Performer {
|
|||
ID: r.ID,
|
||||
Name: r.Name.String,
|
||||
Disambiguation: r.Disambigation.String,
|
||||
URL: r.URL.String,
|
||||
Twitter: r.Twitter.String,
|
||||
Instagram: r.Instagram.String,
|
||||
Birthdate: r.Birthdate.DatePtr(),
|
||||
Ethnicity: r.Ethnicity.String,
|
||||
Country: r.Country.String,
|
||||
|
|
@ -148,9 +142,6 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) {
|
|||
r.setString("name", o.Name)
|
||||
r.setNullString("disambiguation", o.Disambiguation)
|
||||
r.setNullString("gender", o.Gender)
|
||||
r.setNullString("url", o.URL)
|
||||
r.setNullString("twitter", o.Twitter)
|
||||
r.setNullString("instagram", o.Instagram)
|
||||
r.setNullDate("birthdate", o.Birthdate)
|
||||
r.setNullString("ethnicity", o.Ethnicity)
|
||||
r.setNullString("country", o.Country)
|
||||
|
|
@ -272,6 +263,13 @@ func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performe
|
|||
}
|
||||
}
|
||||
|
||||
if newObject.URLs.Loaded() {
|
||||
const startPos = 0
|
||||
if err := performersURLsTableMgr.insertJoins(ctx, id, startPos, newObject.URLs.List()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if newObject.TagIDs.Loaded() {
|
||||
if err := performersTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs.List()); err != nil {
|
||||
return err
|
||||
|
|
@ -315,6 +313,12 @@ func (qb *PerformerStore) UpdatePartial(ctx context.Context, id int, partial mod
|
|||
}
|
||||
}
|
||||
|
||||
if partial.URLs != nil {
|
||||
if err := performersURLsTableMgr.modifyJoins(ctx, id, partial.URLs.Values, partial.URLs.Mode); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if partial.TagIDs != nil {
|
||||
if err := performersTagsTableMgr.modifyJoins(ctx, id, partial.TagIDs.IDs, partial.TagIDs.Mode); err != nil {
|
||||
return nil, err
|
||||
|
|
@ -343,6 +347,12 @@ func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Perf
|
|||
}
|
||||
}
|
||||
|
||||
if updatedObject.URLs.Loaded() {
|
||||
if err := performersURLsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.URLs.List()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if updatedObject.TagIDs.Loaded() {
|
||||
if err := performersTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs.List()); err != nil {
|
||||
return err
|
||||
|
|
@ -785,6 +795,10 @@ func (qb *PerformerStore) GetAliases(ctx context.Context, performerID int) ([]st
|
|||
return performersAliasesTableMgr.get(ctx, performerID)
|
||||
}
|
||||
|
||||
func (qb *PerformerStore) GetURLs(ctx context.Context, performerID int) ([]string, error) {
|
||||
return performersURLsTableMgr.get(ctx, performerID)
|
||||
}
|
||||
|
||||
func (qb *PerformerStore) GetStashIDs(ctx context.Context, performerID int) ([]models.StashID, error) {
|
||||
return performersStashIDsTableMgr.get(ctx, performerID)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -134,7 +134,7 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler {
|
|||
stringCriterionHandler(filter.Piercings, tableName+".piercings"),
|
||||
intCriterionHandler(filter.Rating100, tableName+".rating", nil),
|
||||
stringCriterionHandler(filter.HairColor, tableName+".hair_color"),
|
||||
stringCriterionHandler(filter.URL, tableName+".url"),
|
||||
qb.urlsCriterionHandler(filter.URL),
|
||||
intCriterionHandler(filter.Weight, tableName+".weight", nil),
|
||||
criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) {
|
||||
if filter.StashID != nil {
|
||||
|
|
@ -211,6 +211,9 @@ func (qb *performerFilterHandler) performerIsMissingCriterionHandler(isMissing *
|
|||
return func(ctx context.Context, f *filterBuilder) {
|
||||
if isMissing != nil && *isMissing != "" {
|
||||
switch *isMissing {
|
||||
case "url":
|
||||
performersURLsTableMgr.join(f, "", "performers.id")
|
||||
f.addWhere("performer_urls.url IS NULL")
|
||||
case "scenes": // Deprecated: use `scene_count == 0` filter instead
|
||||
f.addLeftJoin(performersScenesTable, "scenes_join", "scenes_join.performer_id = performers.id")
|
||||
f.addWhere("scenes_join.scene_id IS NULL")
|
||||
|
|
@ -241,6 +244,20 @@ func (qb *performerFilterHandler) performerAgeFilterCriterionHandler(age *models
|
|||
}
|
||||
}
|
||||
|
||||
func (qb *performerFilterHandler) urlsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc {
|
||||
h := stringListCriterionHandlerBuilder{
|
||||
primaryTable: performerTable,
|
||||
primaryFK: performerIDColumn,
|
||||
joinTable: performerURLsTable,
|
||||
stringColumn: performerURLColumn,
|
||||
addJoinTable: func(f *filterBuilder) {
|
||||
performersURLsTableMgr.join(f, "", "performers.id")
|
||||
},
|
||||
}
|
||||
|
||||
return h.handler(url)
|
||||
}
|
||||
|
||||
func (qb *performerFilterHandler) aliasCriterionHandler(alias *models.StringCriterionInput) criterionHandlerFunc {
|
||||
h := stringListCriterionHandlerBuilder{
|
||||
primaryTable: performerTable,
|
||||
|
|
|
|||
|
|
@ -22,6 +22,11 @@ func loadPerformerRelationships(ctx context.Context, expected models.Performer,
|
|||
return err
|
||||
}
|
||||
}
|
||||
if expected.URLs.Loaded() {
|
||||
if err := actual.LoadURLs(ctx, db.Performer); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if expected.TagIDs.Loaded() {
|
||||
if err := actual.LoadTagIDs(ctx, db.Performer); err != nil {
|
||||
return err
|
||||
|
|
@ -45,6 +50,7 @@ func Test_PerformerStore_Create(t *testing.T) {
|
|||
url = "url"
|
||||
twitter = "twitter"
|
||||
instagram = "instagram"
|
||||
urls = []string{url, twitter, instagram}
|
||||
rating = 3
|
||||
ethnicity = "ethnicity"
|
||||
country = "country"
|
||||
|
|
@ -84,9 +90,7 @@ func Test_PerformerStore_Create(t *testing.T) {
|
|||
Name: name,
|
||||
Disambiguation: disambiguation,
|
||||
Gender: &gender,
|
||||
URL: url,
|
||||
Twitter: twitter,
|
||||
Instagram: instagram,
|
||||
URLs: models.NewRelatedStrings(urls),
|
||||
Birthdate: &birthdate,
|
||||
Ethnicity: ethnicity,
|
||||
Country: country,
|
||||
|
|
@ -193,6 +197,7 @@ func Test_PerformerStore_Update(t *testing.T) {
|
|||
url = "url"
|
||||
twitter = "twitter"
|
||||
instagram = "instagram"
|
||||
urls = []string{url, twitter, instagram}
|
||||
rating = 3
|
||||
ethnicity = "ethnicity"
|
||||
country = "country"
|
||||
|
|
@ -233,9 +238,7 @@ func Test_PerformerStore_Update(t *testing.T) {
|
|||
Name: name,
|
||||
Disambiguation: disambiguation,
|
||||
Gender: &gender,
|
||||
URL: url,
|
||||
Twitter: twitter,
|
||||
Instagram: instagram,
|
||||
URLs: models.NewRelatedStrings(urls),
|
||||
Birthdate: &birthdate,
|
||||
Ethnicity: ethnicity,
|
||||
Country: country,
|
||||
|
|
@ -277,6 +280,7 @@ func Test_PerformerStore_Update(t *testing.T) {
|
|||
&models.Performer{
|
||||
ID: performerIDs[performerIdxWithGallery],
|
||||
Aliases: models.NewRelatedStrings([]string{}),
|
||||
URLs: models.NewRelatedStrings([]string{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||
},
|
||||
|
|
@ -341,9 +345,7 @@ func clearPerformerPartial() models.PerformerPartial {
|
|||
return models.PerformerPartial{
|
||||
Disambiguation: nullString,
|
||||
Gender: nullString,
|
||||
URL: nullString,
|
||||
Twitter: nullString,
|
||||
Instagram: nullString,
|
||||
URLs: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet},
|
||||
Birthdate: nullDate,
|
||||
Ethnicity: nullString,
|
||||
Country: nullString,
|
||||
|
|
@ -376,6 +378,7 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) {
|
|||
url = "url"
|
||||
twitter = "twitter"
|
||||
instagram = "instagram"
|
||||
urls = []string{url, twitter, instagram}
|
||||
rating = 3
|
||||
ethnicity = "ethnicity"
|
||||
country = "country"
|
||||
|
|
@ -418,21 +421,22 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) {
|
|||
Name: models.NewOptionalString(name),
|
||||
Disambiguation: models.NewOptionalString(disambiguation),
|
||||
Gender: models.NewOptionalString(gender.String()),
|
||||
URL: models.NewOptionalString(url),
|
||||
Twitter: models.NewOptionalString(twitter),
|
||||
Instagram: models.NewOptionalString(instagram),
|
||||
Birthdate: models.NewOptionalDate(birthdate),
|
||||
Ethnicity: models.NewOptionalString(ethnicity),
|
||||
Country: models.NewOptionalString(country),
|
||||
EyeColor: models.NewOptionalString(eyeColor),
|
||||
Height: models.NewOptionalInt(height),
|
||||
Measurements: models.NewOptionalString(measurements),
|
||||
FakeTits: models.NewOptionalString(fakeTits),
|
||||
PenisLength: models.NewOptionalFloat64(penisLength),
|
||||
Circumcised: models.NewOptionalString(circumcised.String()),
|
||||
CareerLength: models.NewOptionalString(careerLength),
|
||||
Tattoos: models.NewOptionalString(tattoos),
|
||||
Piercings: models.NewOptionalString(piercings),
|
||||
URLs: &models.UpdateStrings{
|
||||
Values: urls,
|
||||
Mode: models.RelationshipUpdateModeSet,
|
||||
},
|
||||
Birthdate: models.NewOptionalDate(birthdate),
|
||||
Ethnicity: models.NewOptionalString(ethnicity),
|
||||
Country: models.NewOptionalString(country),
|
||||
EyeColor: models.NewOptionalString(eyeColor),
|
||||
Height: models.NewOptionalInt(height),
|
||||
Measurements: models.NewOptionalString(measurements),
|
||||
FakeTits: models.NewOptionalString(fakeTits),
|
||||
PenisLength: models.NewOptionalFloat64(penisLength),
|
||||
Circumcised: models.NewOptionalString(circumcised.String()),
|
||||
CareerLength: models.NewOptionalString(careerLength),
|
||||
Tattoos: models.NewOptionalString(tattoos),
|
||||
Piercings: models.NewOptionalString(piercings),
|
||||
Aliases: &models.UpdateStrings{
|
||||
Values: aliases,
|
||||
Mode: models.RelationshipUpdateModeSet,
|
||||
|
|
@ -469,9 +473,7 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) {
|
|||
Name: name,
|
||||
Disambiguation: disambiguation,
|
||||
Gender: &gender,
|
||||
URL: url,
|
||||
Twitter: twitter,
|
||||
Instagram: instagram,
|
||||
URLs: models.NewRelatedStrings(urls),
|
||||
Birthdate: &birthdate,
|
||||
Ethnicity: ethnicity,
|
||||
Country: country,
|
||||
|
|
@ -516,6 +518,7 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) {
|
|||
ID: performerIDs[performerIdxWithTwoTags],
|
||||
Name: getPerformerStringValue(performerIdxWithTwoTags, "Name"),
|
||||
Favorite: getPerformerBoolValue(performerIdxWithTwoTags),
|
||||
URLs: models.NewRelatedStrings([]string{}),
|
||||
Aliases: models.NewRelatedStrings([]string{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||
|
|
@ -1290,7 +1293,14 @@ func TestPerformerQueryURL(t *testing.T) {
|
|||
|
||||
verifyFn := func(g *models.Performer) {
|
||||
t.Helper()
|
||||
verifyString(t, g.URL, urlCriterion)
|
||||
|
||||
urls := g.URLs.List()
|
||||
var url string
|
||||
if len(urls) > 0 {
|
||||
url = urls[0]
|
||||
}
|
||||
|
||||
verifyString(t, url, urlCriterion)
|
||||
}
|
||||
|
||||
verifyPerformerQuery(t, filter, verifyFn)
|
||||
|
|
@ -1318,6 +1328,12 @@ func verifyPerformerQuery(t *testing.T, filter models.PerformerFilterType, verif
|
|||
t.Helper()
|
||||
performers := queryPerformers(ctx, t, &filter, nil)
|
||||
|
||||
for _, performer := range performers {
|
||||
if err := performer.LoadURLs(ctx, db.Performer); err != nil {
|
||||
t.Errorf("Error loading movie relationships: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// assume it should find at least one
|
||||
assert.Greater(t, len(performers), 0)
|
||||
|
||||
|
|
|
|||
41
pkg/sqlite/relationships.go
Normal file
41
pkg/sqlite/relationships.go
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/stashapp/stash/pkg/models"
|
||||
)
|
||||
|
||||
type idRelationshipStore struct {
|
||||
joinTable *joinTable
|
||||
}
|
||||
|
||||
func (s *idRelationshipStore) createRelationships(ctx context.Context, id int, fkIDs models.RelatedIDs) error {
|
||||
if fkIDs.Loaded() {
|
||||
if err := s.joinTable.insertJoins(ctx, id, fkIDs.List()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *idRelationshipStore) modifyRelationships(ctx context.Context, id int, fkIDs *models.UpdateIDs) error {
|
||||
if fkIDs != nil {
|
||||
if err := s.joinTable.modifyJoins(ctx, id, fkIDs.IDs, fkIDs.Mode); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *idRelationshipStore) replaceRelationships(ctx context.Context, id int, fkIDs models.RelatedIDs) error {
|
||||
if fkIDs.Loaded() {
|
||||
if err := s.joinTable.replaceJoins(ctx, id, fkIDs.List()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
@ -141,23 +141,6 @@ func (qb *SavedFilterStore) Update(ctx context.Context, updatedObject *models.Sa
|
|||
return nil
|
||||
}
|
||||
|
||||
func (qb *SavedFilterStore) SetDefault(ctx context.Context, obj *models.SavedFilter) error {
|
||||
// find the existing default
|
||||
existing, err := qb.FindDefault(ctx, obj.Mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
obj.Name = savedFilterDefaultName
|
||||
|
||||
if existing != nil {
|
||||
obj.ID = existing.ID
|
||||
return qb.Update(ctx, obj)
|
||||
}
|
||||
|
||||
return qb.Create(ctx, obj)
|
||||
}
|
||||
|
||||
func (qb *SavedFilterStore) Destroy(ctx context.Context, id int) error {
|
||||
return qb.destroyExisting(ctx, []int{id})
|
||||
}
|
||||
|
|
@ -258,22 +241,6 @@ func (qb *SavedFilterStore) FindByMode(ctx context.Context, mode models.FilterMo
|
|||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *SavedFilterStore) FindDefault(ctx context.Context, mode models.FilterMode) (*models.SavedFilter, error) {
|
||||
// SELECT * FROM saved_filters WHERE mode = ? AND name = ?
|
||||
table := qb.table()
|
||||
sq := qb.selectDataset().Prepared(true).Where(
|
||||
table.Col("mode").Eq(mode),
|
||||
table.Col("name").Eq(savedFilterDefaultName),
|
||||
)
|
||||
|
||||
ret, err := qb.get(ctx, sq)
|
||||
if err != nil && !errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (qb *SavedFilterStore) All(ctx context.Context) ([]*models.SavedFilter, error) {
|
||||
return qb.getMany(ctx, qb.selectDataset())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -96,66 +96,6 @@ func TestSavedFilterDestroy(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestSavedFilterFindDefault(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
def, err := db.SavedFilter.FindDefault(ctx, models.FilterModeScenes)
|
||||
if err == nil {
|
||||
assert.Equal(t, savedFilterIDs[savedFilterIdxDefaultScene], def.ID)
|
||||
}
|
||||
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
func TestSavedFilterSetDefault(t *testing.T) {
|
||||
filterQ := ""
|
||||
filterPage := 1
|
||||
filterPerPage := 40
|
||||
filterSort := "date"
|
||||
filterDirection := models.SortDirectionEnumAsc
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &filterQ,
|
||||
Page: &filterPage,
|
||||
PerPage: &filterPerPage,
|
||||
Sort: &filterSort,
|
||||
Direction: &filterDirection,
|
||||
}
|
||||
objectFilter := map[string]interface{}{
|
||||
"test": "foo",
|
||||
}
|
||||
uiOptions := map[string]interface{}{
|
||||
"display_mode": 1,
|
||||
"zoom_index": 1,
|
||||
}
|
||||
|
||||
withTxn(func(ctx context.Context) error {
|
||||
err := db.SavedFilter.SetDefault(ctx, &models.SavedFilter{
|
||||
Mode: models.FilterModeMovies,
|
||||
FindFilter: &findFilter,
|
||||
ObjectFilter: objectFilter,
|
||||
UIOptions: uiOptions,
|
||||
})
|
||||
|
||||
return err
|
||||
})
|
||||
|
||||
var defID int
|
||||
withTxn(func(ctx context.Context) error {
|
||||
def, err := db.SavedFilter.FindDefault(ctx, models.FilterModeMovies)
|
||||
if err == nil {
|
||||
defID = def.ID
|
||||
assert.Equal(t, &findFilter, def.FindFilter)
|
||||
}
|
||||
|
||||
return err
|
||||
})
|
||||
|
||||
// destroy it again
|
||||
withTxn(func(ctx context.Context) error {
|
||||
return db.SavedFilter.Destroy(ctx, defID)
|
||||
})
|
||||
}
|
||||
|
||||
// TODO Update
|
||||
// TODO Destroy
|
||||
// TODO Find
|
||||
|
|
|
|||
|
|
@ -301,6 +301,7 @@ func (qb *SceneMarkerStore) makeQuery(ctx context.Context, sceneMarkerFilter *mo
|
|||
distinctIDs(&query, sceneMarkerTable)
|
||||
|
||||
if q := findFilter.Q; q != nil && *q != "" {
|
||||
query.join(sceneTable, "", "scenes.id = scene_markers.scene_id")
|
||||
query.join(tagTable, "", "scene_markers.primary_tag_id = tags.id")
|
||||
searchColumns := []string{"scene_markers.title", "scenes.title", "tags.name"}
|
||||
query.parseQueryString(searchColumns, *q)
|
||||
|
|
|
|||
|
|
@ -74,6 +74,27 @@ func TestMarkerCountByTagID(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestMarkerQueryQ(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
q := getSceneTitle(sceneIdxWithMarkers)
|
||||
m, _, err := db.SceneMarker.Query(ctx, nil, &models.FindFilterType{
|
||||
Q: &q,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Error querying scene markers: %s", err.Error())
|
||||
}
|
||||
|
||||
if !assert.Greater(t, len(m), 0) {
|
||||
return nil
|
||||
}
|
||||
|
||||
assert.Equal(t, sceneIDs[sceneIdxWithMarkers], m[0].SceneID)
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestMarkerQuerySortBySceneUpdated(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
sort := "scenes_updated_at"
|
||||
|
|
|
|||
|
|
@ -150,9 +150,12 @@ const (
|
|||
const (
|
||||
movieIdxWithScene = iota
|
||||
movieIdxWithStudio
|
||||
movieIdxWithTag
|
||||
movieIdxWithTwoTags
|
||||
movieIdxWithThreeTags
|
||||
// movies with dup names start from the end
|
||||
// create 10 more basic movies (can remove this if we add more indexes)
|
||||
movieIdxWithDupName = movieIdxWithStudio + 10
|
||||
// create 7 more basic movies (can remove this if we add more indexes)
|
||||
movieIdxWithDupName = movieIdxWithStudio + 7
|
||||
|
||||
moviesNameCase = movieIdxWithDupName
|
||||
moviesNameNoCase = 1
|
||||
|
|
@ -204,6 +207,9 @@ const (
|
|||
tagIdxWithPerformer
|
||||
tagIdx1WithPerformer
|
||||
tagIdx2WithPerformer
|
||||
tagIdxWithStudio
|
||||
tagIdx1WithStudio
|
||||
tagIdx2WithStudio
|
||||
tagIdxWithGallery
|
||||
tagIdx1WithGallery
|
||||
tagIdx2WithGallery
|
||||
|
|
@ -214,6 +220,10 @@ const (
|
|||
tagIdxWithParentAndChild
|
||||
tagIdxWithGrandParent
|
||||
tagIdx2WithMarkers
|
||||
tagIdxWithMovie
|
||||
tagIdx1WithMovie
|
||||
tagIdx2WithMovie
|
||||
tagIdx3WithMovie
|
||||
// new indexes above
|
||||
// tags with dup names start from the end
|
||||
tagIdx1WithDupName
|
||||
|
|
@ -238,6 +248,10 @@ const (
|
|||
studioIdxWithScenePerformer
|
||||
studioIdxWithImagePerformer
|
||||
studioIdxWithGalleryPerformer
|
||||
studioIdxWithTag
|
||||
studioIdx2WithTag
|
||||
studioIdxWithTwoTags
|
||||
studioIdxWithParentTag
|
||||
studioIdxWithGrandChild
|
||||
studioIdxWithParentAndChild
|
||||
studioIdxWithGrandParent
|
||||
|
|
@ -487,6 +501,12 @@ var (
|
|||
movieStudioLinks = [][2]int{
|
||||
{movieIdxWithStudio, studioIdxWithMovie},
|
||||
}
|
||||
|
||||
movieTags = linkMap{
|
||||
movieIdxWithTag: {tagIdxWithMovie},
|
||||
movieIdxWithTwoTags: {tagIdx1WithMovie, tagIdx2WithMovie},
|
||||
movieIdxWithThreeTags: {tagIdx1WithMovie, tagIdx2WithMovie, tagIdx3WithMovie},
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
@ -497,6 +517,15 @@ var (
|
|||
}
|
||||
)
|
||||
|
||||
var (
|
||||
studioTags = linkMap{
|
||||
studioIdxWithTag: {tagIdxWithStudio},
|
||||
studioIdx2WithTag: {tagIdx2WithStudio},
|
||||
studioIdxWithTwoTags: {tagIdx1WithStudio, tagIdx2WithStudio},
|
||||
studioIdxWithParentTag: {tagIdxWithParentAndChild},
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
performerTags = linkMap{
|
||||
performerIdxWithTag: {tagIdxWithPerformer},
|
||||
|
|
@ -622,14 +651,14 @@ func populateDB() error {
|
|||
|
||||
// TODO - link folders to zip files
|
||||
|
||||
if err := createMovies(ctx, db.Movie, moviesNameCase, moviesNameNoCase); err != nil {
|
||||
return fmt.Errorf("error creating movies: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := createTags(ctx, db.Tag, tagsNameCase, tagsNameNoCase); err != nil {
|
||||
return fmt.Errorf("error creating tags: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := createMovies(ctx, db.Movie, moviesNameCase, moviesNameNoCase); err != nil {
|
||||
return fmt.Errorf("error creating movies: %s", err.Error())
|
||||
}
|
||||
|
||||
if err := createPerformers(ctx, performersNameCase, performersNameNoCase); err != nil {
|
||||
return fmt.Errorf("error creating performers: %s", err.Error())
|
||||
}
|
||||
|
|
@ -1321,6 +1350,8 @@ func createMovies(ctx context.Context, mqb models.MovieReaderWriter, n int, o in
|
|||
index := i
|
||||
name := namePlain
|
||||
|
||||
tids := indexesToIDs(tagIDs, movieTags[i])
|
||||
|
||||
if i >= n { // i<n tags get normal names
|
||||
name = nameNoCase // i>=n movies get dup names if case is not checked
|
||||
index = n + o - (i + 1) // for the name to be the same the number (index) must be the same also
|
||||
|
|
@ -1333,6 +1364,7 @@ func createMovies(ctx context.Context, mqb models.MovieReaderWriter, n int, o in
|
|||
URLs: models.NewRelatedStrings([]string{
|
||||
getMovieEmptyString(i, urlField),
|
||||
}),
|
||||
TagIDs: models.NewRelatedIDs(tids),
|
||||
}
|
||||
|
||||
err := mqb.Create(ctx, &movie)
|
||||
|
|
@ -1358,6 +1390,15 @@ func getPerformerNullStringValue(index int, field string) string {
|
|||
return ret.String
|
||||
}
|
||||
|
||||
func getPerformerEmptyString(index int, field string) string {
|
||||
v := getPrefixedNullStringValue("performer", index, field)
|
||||
if !v.Valid {
|
||||
return ""
|
||||
}
|
||||
|
||||
return v.String
|
||||
}
|
||||
|
||||
func getPerformerBoolValue(index int) bool {
|
||||
index = index % 2
|
||||
return index == 1
|
||||
|
|
@ -1463,17 +1504,19 @@ func createPerformers(ctx context.Context, n int, o int) error {
|
|||
Name: getPerformerStringValue(index, name),
|
||||
Disambiguation: getPerformerStringValue(index, "disambiguation"),
|
||||
Aliases: models.NewRelatedStrings(performerAliases(index)),
|
||||
URL: getPerformerNullStringValue(i, urlField),
|
||||
Favorite: getPerformerBoolValue(i),
|
||||
Birthdate: getPerformerBirthdate(i),
|
||||
DeathDate: getPerformerDeathDate(i),
|
||||
Details: getPerformerStringValue(i, "Details"),
|
||||
Ethnicity: getPerformerStringValue(i, "Ethnicity"),
|
||||
PenisLength: getPerformerPenisLength(i),
|
||||
Circumcised: getPerformerCircumcised(i),
|
||||
Rating: getIntPtr(getRating(i)),
|
||||
IgnoreAutoTag: getIgnoreAutoTag(i),
|
||||
TagIDs: models.NewRelatedIDs(tids),
|
||||
URLs: models.NewRelatedStrings([]string{
|
||||
getPerformerEmptyString(i, urlField),
|
||||
}),
|
||||
Favorite: getPerformerBoolValue(i),
|
||||
Birthdate: getPerformerBirthdate(i),
|
||||
DeathDate: getPerformerDeathDate(i),
|
||||
Details: getPerformerStringValue(i, "Details"),
|
||||
Ethnicity: getPerformerStringValue(i, "Ethnicity"),
|
||||
PenisLength: getPerformerPenisLength(i),
|
||||
Circumcised: getPerformerCircumcised(i),
|
||||
Rating: getIntPtr(getRating(i)),
|
||||
IgnoreAutoTag: getIgnoreAutoTag(i),
|
||||
TagIDs: models.NewRelatedIDs(tids),
|
||||
}
|
||||
|
||||
careerLength := getPerformerCareerLength(i)
|
||||
|
|
@ -1539,6 +1582,11 @@ func getTagPerformerCount(id int) int {
|
|||
return len(performerTags.reverseLookup(idx))
|
||||
}
|
||||
|
||||
func getTagStudioCount(id int) int {
|
||||
idx := indexFromID(tagIDs, id)
|
||||
return len(studioTags.reverseLookup(idx))
|
||||
}
|
||||
|
||||
func getTagParentCount(id int) int {
|
||||
if id == tagIDs[tagIdxWithParentTag] || id == tagIDs[tagIdxWithGrandParent] || id == tagIDs[tagIdxWithParentAndChild] {
|
||||
return 1
|
||||
|
|
@ -1654,11 +1702,13 @@ func createStudios(ctx context.Context, n int, o int) error {
|
|||
// studios [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different
|
||||
|
||||
name = getStudioStringValue(index, name)
|
||||
tids := indexesToIDs(tagIDs, studioTags[i])
|
||||
studio := models.Studio{
|
||||
Name: name,
|
||||
URL: getStudioStringValue(index, urlField),
|
||||
Favorite: getStudioBoolValue(index),
|
||||
IgnoreAutoTag: getIgnoreAutoTag(i),
|
||||
TagIDs: models.NewRelatedIDs(tids),
|
||||
}
|
||||
// only add aliases for some scenes
|
||||
if i == studioIdxWithMovie || i%5 == 0 {
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ const (
|
|||
studioParentIDColumn = "parent_id"
|
||||
studioNameColumn = "name"
|
||||
studioImageBlobColumn = "image_blob"
|
||||
studiosTagsTable = "studios_tags"
|
||||
)
|
||||
|
||||
type studioRow struct {
|
||||
|
|
@ -94,6 +95,7 @@ type studioRepositoryType struct {
|
|||
repository
|
||||
|
||||
stashIDs stashIDRepository
|
||||
tags joinRepository
|
||||
|
||||
scenes repository
|
||||
images repository
|
||||
|
|
@ -124,11 +126,21 @@ var (
|
|||
tableName: galleryTable,
|
||||
idColumn: studioIDColumn,
|
||||
},
|
||||
tags: joinRepository{
|
||||
repository: repository{
|
||||
tableName: studiosTagsTable,
|
||||
idColumn: studioIDColumn,
|
||||
},
|
||||
fkColumn: tagIDColumn,
|
||||
foreignTable: tagTable,
|
||||
orderBy: "tags.name ASC",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
type StudioStore struct {
|
||||
blobJoinQueryBuilder
|
||||
tagRelationshipStore
|
||||
|
||||
tableMgr *table
|
||||
}
|
||||
|
|
@ -139,6 +151,11 @@ func NewStudioStore(blobStore *BlobStore) *StudioStore {
|
|||
blobStore: blobStore,
|
||||
joinTable: studioTable,
|
||||
},
|
||||
tagRelationshipStore: tagRelationshipStore{
|
||||
idRelationshipStore: idRelationshipStore{
|
||||
joinTable: studiosTagsTableMgr,
|
||||
},
|
||||
},
|
||||
|
||||
tableMgr: studioTableMgr,
|
||||
}
|
||||
|
|
@ -173,6 +190,10 @@ func (qb *StudioStore) Create(ctx context.Context, newObject *models.Studio) err
|
|||
}
|
||||
}
|
||||
|
||||
if err := qb.tagRelationshipStore.createRelationships(ctx, id, newObject.TagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if newObject.StashIDs.Loaded() {
|
||||
if err := studiosStashIDsTableMgr.insertJoins(ctx, id, newObject.StashIDs.List()); err != nil {
|
||||
return err
|
||||
|
|
@ -213,6 +234,10 @@ func (qb *StudioStore) UpdatePartial(ctx context.Context, input models.StudioPar
|
|||
}
|
||||
}
|
||||
|
||||
if err := qb.tagRelationshipStore.modifyRelationships(ctx, input.ID, input.TagIDs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if input.StashIDs != nil {
|
||||
if err := studiosStashIDsTableMgr.modifyJoins(ctx, input.ID, input.StashIDs.StashIDs, input.StashIDs.Mode); err != nil {
|
||||
return nil, err
|
||||
|
|
@ -237,6 +262,10 @@ func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.Studio)
|
|||
}
|
||||
}
|
||||
|
||||
if err := qb.tagRelationshipStore.replaceRelationships(ctx, updatedObject.ID, updatedObject.TagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if updatedObject.StashIDs.Loaded() {
|
||||
if err := studiosStashIDsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.StashIDs.List()); err != nil {
|
||||
return err
|
||||
|
|
@ -538,6 +567,15 @@ func (qb *StudioStore) Query(ctx context.Context, studioFilter *models.StudioFil
|
|||
return studios, countResult, nil
|
||||
}
|
||||
|
||||
func (qb *StudioStore) QueryCount(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) (int, error) {
|
||||
query, err := qb.makeQuery(ctx, studioFilter, findFilter)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return query.executeCount(ctx)
|
||||
}
|
||||
|
||||
var studioSortOptions = sortOptions{
|
||||
"child_count",
|
||||
"created_at",
|
||||
|
|
@ -569,6 +607,8 @@ func (qb *StudioStore) getStudioSort(findFilter *models.FindFilterType) (string,
|
|||
|
||||
sortQuery := ""
|
||||
switch sort {
|
||||
case "tag_count":
|
||||
sortQuery += getCountSort(studioTable, studiosTagsTable, studioIDColumn, direction)
|
||||
case "scenes_count":
|
||||
sortQuery += getCountSort(studioTable, sceneTable, studioIDColumn, direction)
|
||||
case "images_count":
|
||||
|
|
|
|||
|
|
@ -74,11 +74,13 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler {
|
|||
},
|
||||
|
||||
qb.isMissingCriterionHandler(studioFilter.IsMissing),
|
||||
qb.tagCountCriterionHandler(studioFilter.TagCount),
|
||||
qb.sceneCountCriterionHandler(studioFilter.SceneCount),
|
||||
qb.imageCountCriterionHandler(studioFilter.ImageCount),
|
||||
qb.galleryCountCriterionHandler(studioFilter.GalleryCount),
|
||||
qb.parentCriterionHandler(studioFilter.Parents),
|
||||
qb.aliasCriterionHandler(studioFilter.Aliases),
|
||||
qb.tagsCriterionHandler(studioFilter.Tags),
|
||||
qb.childCountCriterionHandler(studioFilter.ChildCount),
|
||||
×tampCriterionHandler{studioFilter.CreatedAt, studioTable + ".created_at", nil},
|
||||
×tampCriterionHandler{studioFilter.UpdatedAt, studioTable + ".updated_at", nil},
|
||||
|
|
@ -88,7 +90,7 @@ func (qb *studioFilterHandler) criterionHandler() criterionHandler {
|
|||
relatedRepo: sceneRepository.repository,
|
||||
relatedHandler: &sceneFilterHandler{studioFilter.ScenesFilter},
|
||||
joinFn: func(f *filterBuilder) {
|
||||
sceneRepository.innerJoin(f, "", "studios.id")
|
||||
studioRepository.scenes.innerJoin(f, "", "studios.id")
|
||||
},
|
||||
},
|
||||
|
||||
|
|
@ -161,6 +163,16 @@ func (qb *studioFilterHandler) galleryCountCriterionHandler(galleryCount *models
|
|||
}
|
||||
}
|
||||
|
||||
func (qb *studioFilterHandler) tagCountCriterionHandler(tagCount *models.IntCriterionInput) criterionHandlerFunc {
|
||||
h := countCriterionHandlerBuilder{
|
||||
primaryTable: studioTable,
|
||||
joinTable: studiosTagsTable,
|
||||
primaryFK: studioIDColumn,
|
||||
}
|
||||
|
||||
return h.handler(tagCount)
|
||||
}
|
||||
|
||||
func (qb *studioFilterHandler) parentCriterionHandler(parents *models.MultiCriterionInput) criterionHandlerFunc {
|
||||
addJoinsFunc := func(f *filterBuilder) {
|
||||
f.addLeftJoin("studios", "parent_studio", "parent_studio.id = studios.parent_id")
|
||||
|
|
@ -200,3 +212,18 @@ func (qb *studioFilterHandler) childCountCriterionHandler(childCount *models.Int
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (qb *studioFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc {
|
||||
h := joinedHierarchicalMultiCriterionHandlerBuilder{
|
||||
primaryTable: studioTable,
|
||||
foreignTable: tagTable,
|
||||
foreignFK: "tag_id",
|
||||
|
||||
relationsTable: "tags_relations",
|
||||
joinTable: studiosTagsTable,
|
||||
joinAs: "studio_tag",
|
||||
primaryFK: studioIDColumn,
|
||||
}
|
||||
|
||||
return h.handler(tags)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -704,6 +704,110 @@ func TestStudioQueryRating(t *testing.T) {
|
|||
verifyStudiosRating(t, ratingCriterion)
|
||||
}
|
||||
|
||||
func queryStudios(ctx context.Context, t *testing.T, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) []*models.Studio {
|
||||
t.Helper()
|
||||
studios, _, err := db.Studio.Query(ctx, studioFilter, findFilter)
|
||||
if err != nil {
|
||||
t.Errorf("Error querying studio: %s", err.Error())
|
||||
}
|
||||
|
||||
return studios
|
||||
}
|
||||
|
||||
func TestStudioQueryTags(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
tagCriterion := models.HierarchicalMultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdxWithStudio]),
|
||||
strconv.Itoa(tagIDs[tagIdx1WithStudio]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludes,
|
||||
}
|
||||
|
||||
studioFilter := models.StudioFilterType{
|
||||
Tags: &tagCriterion,
|
||||
}
|
||||
|
||||
// ensure ids are correct
|
||||
studios := queryStudios(ctx, t, &studioFilter, nil)
|
||||
assert.Len(t, studios, 2)
|
||||
for _, studio := range studios {
|
||||
assert.True(t, studio.ID == studioIDs[studioIdxWithTag] || studio.ID == studioIDs[studioIdxWithTwoTags])
|
||||
}
|
||||
|
||||
tagCriterion = models.HierarchicalMultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdx1WithStudio]),
|
||||
strconv.Itoa(tagIDs[tagIdx2WithStudio]),
|
||||
},
|
||||
Modifier: models.CriterionModifierIncludesAll,
|
||||
}
|
||||
|
||||
studios = queryStudios(ctx, t, &studioFilter, nil)
|
||||
|
||||
assert.Len(t, studios, 1)
|
||||
assert.Equal(t, sceneIDs[studioIdxWithTwoTags], studios[0].ID)
|
||||
|
||||
tagCriterion = models.HierarchicalMultiCriterionInput{
|
||||
Value: []string{
|
||||
strconv.Itoa(tagIDs[tagIdx1WithStudio]),
|
||||
},
|
||||
Modifier: models.CriterionModifierExcludes,
|
||||
}
|
||||
|
||||
q := getSceneStringValue(studioIdxWithTwoTags, titleField)
|
||||
findFilter := models.FindFilterType{
|
||||
Q: &q,
|
||||
}
|
||||
|
||||
studios = queryStudios(ctx, t, &studioFilter, &findFilter)
|
||||
assert.Len(t, studios, 0)
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestStudioQueryTagCount(t *testing.T) {
|
||||
const tagCount = 1
|
||||
tagCountCriterion := models.IntCriterionInput{
|
||||
Value: tagCount,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyStudiosTagCount(t, tagCountCriterion)
|
||||
|
||||
tagCountCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyStudiosTagCount(t, tagCountCriterion)
|
||||
|
||||
tagCountCriterion.Modifier = models.CriterionModifierGreaterThan
|
||||
verifyStudiosTagCount(t, tagCountCriterion)
|
||||
|
||||
tagCountCriterion.Modifier = models.CriterionModifierLessThan
|
||||
verifyStudiosTagCount(t, tagCountCriterion)
|
||||
}
|
||||
|
||||
func verifyStudiosTagCount(t *testing.T, tagCountCriterion models.IntCriterionInput) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
sqb := db.Studio
|
||||
studioFilter := models.StudioFilterType{
|
||||
TagCount: &tagCountCriterion,
|
||||
}
|
||||
|
||||
studios := queryStudios(ctx, t, &studioFilter, nil)
|
||||
assert.Greater(t, len(studios), 0)
|
||||
|
||||
for _, studio := range studios {
|
||||
ids, err := sqb.GetTagIDs(ctx, studio.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
verifyInt(t, len(ids), tagCountCriterion)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func verifyStudioQuery(t *testing.T, filter models.StudioFilterType, verifyFn func(ctx context.Context, s *models.Studio)) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
t.Helper()
|
||||
|
|
|
|||
|
|
@ -155,6 +155,10 @@ func (t *table) join(j joiner, as string, parentIDCol string) {
|
|||
type joinTable struct {
|
||||
table
|
||||
fkColumn exp.IdentifierExpression
|
||||
|
||||
// required for ordering
|
||||
foreignTable *table
|
||||
orderBy exp.OrderedExpression
|
||||
}
|
||||
|
||||
func (t *joinTable) invert() *joinTable {
|
||||
|
|
@ -170,6 +174,13 @@ func (t *joinTable) invert() *joinTable {
|
|||
func (t *joinTable) get(ctx context.Context, id int) ([]int, error) {
|
||||
q := dialect.Select(t.fkColumn).From(t.table.table).Where(t.idColumn.Eq(id))
|
||||
|
||||
if t.orderBy != nil {
|
||||
if t.foreignTable != nil {
|
||||
q = q.InnerJoin(t.foreignTable.table, goqu.On(t.foreignTable.idColumn.Eq(t.fkColumn)))
|
||||
}
|
||||
q = q.Order(t.orderBy)
|
||||
}
|
||||
|
||||
const single = false
|
||||
var ret []int
|
||||
if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error {
|
||||
|
|
|
|||
|
|
@ -29,13 +29,16 @@ var (
|
|||
scenesURLsJoinTable = goqu.T(scenesURLsTable)
|
||||
|
||||
performersAliasesJoinTable = goqu.T(performersAliasesTable)
|
||||
performersURLsJoinTable = goqu.T(performerURLsTable)
|
||||
performersTagsJoinTable = goqu.T(performersTagsTable)
|
||||
performersStashIDsJoinTable = goqu.T("performer_stash_ids")
|
||||
|
||||
studiosAliasesJoinTable = goqu.T(studioAliasesTable)
|
||||
studiosTagsJoinTable = goqu.T(studiosTagsTable)
|
||||
studiosStashIDsJoinTable = goqu.T("studio_stash_ids")
|
||||
|
||||
moviesURLsJoinTable = goqu.T(movieURLsTable)
|
||||
moviesTagsJoinTable = goqu.T(moviesTagsTable)
|
||||
|
||||
tagsAliasesJoinTable = goqu.T(tagAliasesTable)
|
||||
tagRelationsJoinTable = goqu.T(tagRelationsTable)
|
||||
|
|
@ -254,6 +257,14 @@ var (
|
|||
stringColumn: performersAliasesJoinTable.Col(performerAliasColumn),
|
||||
}
|
||||
|
||||
performersURLsTableMgr = &orderedValueTable[string]{
|
||||
table: table{
|
||||
table: performersURLsJoinTable,
|
||||
idColumn: performersURLsJoinTable.Col(performerIDColumn),
|
||||
},
|
||||
valueColumn: performersURLsJoinTable.Col(performerURLColumn),
|
||||
}
|
||||
|
||||
performersTagsTableMgr = &joinTable{
|
||||
table: table{
|
||||
table: performersTagsJoinTable,
|
||||
|
|
@ -284,6 +295,14 @@ var (
|
|||
stringColumn: studiosAliasesJoinTable.Col(studioAliasColumn),
|
||||
}
|
||||
|
||||
studiosTagsTableMgr = &joinTable{
|
||||
table: table{
|
||||
table: studiosTagsJoinTable,
|
||||
idColumn: studiosTagsJoinTable.Col(studioIDColumn),
|
||||
},
|
||||
fkColumn: studiosTagsJoinTable.Col(tagIDColumn),
|
||||
}
|
||||
|
||||
studiosStashIDsTableMgr = &stashIDTable{
|
||||
table: table{
|
||||
table: studiosStashIDsJoinTable,
|
||||
|
|
@ -330,6 +349,16 @@ var (
|
|||
},
|
||||
valueColumn: moviesURLsJoinTable.Col(movieURLColumn),
|
||||
}
|
||||
|
||||
moviesTagsTableMgr = &joinTable{
|
||||
table: table{
|
||||
table: moviesTagsJoinTable,
|
||||
idColumn: moviesTagsJoinTable.Col(movieIDColumn),
|
||||
},
|
||||
fkColumn: moviesTagsJoinTable.Col(tagIDColumn),
|
||||
foreignTable: tagTableMgr,
|
||||
orderBy: tagTableMgr.table.Col("name").Asc(),
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
|||
|
|
@ -424,6 +424,18 @@ func (qb *TagStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*mode
|
|||
return qb.queryTags(ctx, query, args)
|
||||
}
|
||||
|
||||
func (qb *TagStore) FindByMovieID(ctx context.Context, movieID int) ([]*models.Tag, error) {
|
||||
query := `
|
||||
SELECT tags.* FROM tags
|
||||
LEFT JOIN movies_tags as movies_join on movies_join.tag_id = tags.id
|
||||
WHERE movies_join.movie_id = ?
|
||||
GROUP BY tags.id
|
||||
`
|
||||
query += qb.getDefaultTagSort()
|
||||
args := []interface{}{movieID}
|
||||
return qb.queryTags(ctx, query, args)
|
||||
}
|
||||
|
||||
func (qb *TagStore) FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) {
|
||||
query := `
|
||||
SELECT tags.* FROM tags
|
||||
|
|
@ -436,6 +448,18 @@ func (qb *TagStore) FindBySceneMarkerID(ctx context.Context, sceneMarkerID int)
|
|||
return qb.queryTags(ctx, query, args)
|
||||
}
|
||||
|
||||
func (qb *TagStore) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) {
|
||||
query := `
|
||||
SELECT tags.* FROM tags
|
||||
LEFT JOIN studios_tags as studios_join on studios_join.tag_id = tags.id
|
||||
WHERE studios_join.studio_id = ?
|
||||
GROUP BY tags.id
|
||||
`
|
||||
query += qb.getDefaultTagSort()
|
||||
args := []interface{}{studioID}
|
||||
return qb.queryTags(ctx, query, args)
|
||||
}
|
||||
|
||||
func (qb *TagStore) FindByName(ctx context.Context, name string, nocase bool) (*models.Tag, error) {
|
||||
// query := "SELECT * FROM tags WHERE name = ?"
|
||||
// if nocase {
|
||||
|
|
@ -615,6 +639,8 @@ var tagSortOptions = sortOptions{
|
|||
"galleries_count",
|
||||
"id",
|
||||
"images_count",
|
||||
"movies_count",
|
||||
"studios_count",
|
||||
"name",
|
||||
"performers_count",
|
||||
"random",
|
||||
|
|
@ -655,6 +681,10 @@ func (qb *TagStore) getTagSort(query *queryBuilder, findFilter *models.FindFilte
|
|||
sortQuery += getCountSort(tagTable, galleriesTagsTable, tagIDColumn, direction)
|
||||
case "performers_count":
|
||||
sortQuery += getCountSort(tagTable, performersTagsTable, tagIDColumn, direction)
|
||||
case "studios_count":
|
||||
sortQuery += getCountSort(tagTable, studiosTagsTable, tagIDColumn, direction)
|
||||
case "movies_count":
|
||||
sortQuery += getCountSort(tagTable, moviesTagsTable, tagIDColumn, direction)
|
||||
default:
|
||||
sortQuery += getSort(sort, direction, "tags")
|
||||
}
|
||||
|
|
@ -752,6 +782,7 @@ func (qb *TagStore) Merge(ctx context.Context, source []int, destination int) er
|
|||
galleriesTagsTable: galleryIDColumn,
|
||||
imagesTagsTable: imageIDColumn,
|
||||
"performers_tags": "performer_id",
|
||||
"studios_tags": "studio_id",
|
||||
}
|
||||
|
||||
args = append(args, destination)
|
||||
|
|
@ -888,3 +919,17 @@ SELECT t.*, c.path FROM tags t INNER JOIN children c ON t.id = c.child_id
|
|||
|
||||
return qb.queryTagPaths(ctx, query, args)
|
||||
}
|
||||
|
||||
type tagRelationshipStore struct {
|
||||
idRelationshipStore
|
||||
}
|
||||
|
||||
func (s *tagRelationshipStore) CountByTagID(ctx context.Context, tagID int) (int, error) {
|
||||
joinTable := s.joinTable.table.table
|
||||
q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(tagIDColumn).Eq(tagID))
|
||||
return count(ctx, q)
|
||||
}
|
||||
|
||||
func (s *tagRelationshipStore) GetTagIDs(ctx context.Context, id int) ([]int, error) {
|
||||
return s.joinTable.get(ctx, id)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -66,6 +66,8 @@ func (qb *tagFilterHandler) criterionHandler() criterionHandler {
|
|||
qb.imageCountCriterionHandler(tagFilter.ImageCount),
|
||||
qb.galleryCountCriterionHandler(tagFilter.GalleryCount),
|
||||
qb.performerCountCriterionHandler(tagFilter.PerformerCount),
|
||||
qb.studioCountCriterionHandler(tagFilter.StudioCount),
|
||||
qb.movieCountCriterionHandler(tagFilter.MovieCount),
|
||||
qb.markerCountCriterionHandler(tagFilter.MarkerCount),
|
||||
qb.parentsCriterionHandler(tagFilter.Parents),
|
||||
qb.childrenCriterionHandler(tagFilter.Children),
|
||||
|
|
@ -174,6 +176,28 @@ func (qb *tagFilterHandler) performerCountCriterionHandler(performerCount *model
|
|||
}
|
||||
}
|
||||
|
||||
func (qb *tagFilterHandler) studioCountCriterionHandler(studioCount *models.IntCriterionInput) criterionHandlerFunc {
|
||||
return func(ctx context.Context, f *filterBuilder) {
|
||||
if studioCount != nil {
|
||||
f.addLeftJoin("studios_tags", "", "studios_tags.tag_id = tags.id")
|
||||
clause, args := getIntCriterionWhereClause("count(distinct studios_tags.studio_id)", *studioCount)
|
||||
|
||||
f.addHaving(clause, args...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (qb *tagFilterHandler) movieCountCriterionHandler(movieCount *models.IntCriterionInput) criterionHandlerFunc {
|
||||
return func(ctx context.Context, f *filterBuilder) {
|
||||
if movieCount != nil {
|
||||
f.addLeftJoin("movies_tags", "", "movies_tags.tag_id = tags.id")
|
||||
clause, args := getIntCriterionWhereClause("count(distinct movies_tags.movie_id)", *movieCount)
|
||||
|
||||
f.addHaving(clause, args...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (qb *tagFilterHandler) markerCountCriterionHandler(markerCount *models.IntCriterionInput) criterionHandlerFunc {
|
||||
return func(ctx context.Context, f *filterBuilder) {
|
||||
if markerCount != nil {
|
||||
|
|
|
|||
|
|
@ -42,6 +42,33 @@ func TestMarkerFindBySceneMarkerID(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestTagFindByMovieID(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
tqb := db.Tag
|
||||
|
||||
movieID := movieIDs[movieIdxWithTag]
|
||||
|
||||
tags, err := tqb.FindByMovieID(ctx, movieID)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Error finding tags: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, tags, 1)
|
||||
assert.Equal(t, tagIDs[tagIdxWithMovie], tags[0].ID)
|
||||
|
||||
tags, err = tqb.FindByMovieID(ctx, 0)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Error finding tags: %s", err.Error())
|
||||
}
|
||||
|
||||
assert.Len(t, tags, 0)
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestTagFindByName(t *testing.T) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
tqb := db.Tag
|
||||
|
|
@ -203,6 +230,14 @@ func TestTagQuerySort(t *testing.T) {
|
|||
tags = queryTags(ctx, t, sqb, nil, findFilter)
|
||||
assert.Equal(tagIDs[tagIdx2WithPerformer], tags[0].ID)
|
||||
|
||||
sortBy = "studios_count"
|
||||
tags = queryTags(ctx, t, sqb, nil, findFilter)
|
||||
assert.Equal(tagIDs[tagIdx2WithStudio], tags[0].ID)
|
||||
|
||||
sortBy = "movies_count"
|
||||
tags = queryTags(ctx, t, sqb, nil, findFilter)
|
||||
assert.Equal(tagIDs[tagIdx1WithMovie], tags[0].ID)
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
|
@ -538,6 +573,45 @@ func verifyTagPerformerCount(t *testing.T, imageCountCriterion models.IntCriteri
|
|||
})
|
||||
}
|
||||
|
||||
func TestTagQueryStudioCount(t *testing.T) {
|
||||
countCriterion := models.IntCriterionInput{
|
||||
Value: 1,
|
||||
Modifier: models.CriterionModifierEquals,
|
||||
}
|
||||
|
||||
verifyTagStudioCount(t, countCriterion)
|
||||
|
||||
countCriterion.Modifier = models.CriterionModifierNotEquals
|
||||
verifyTagStudioCount(t, countCriterion)
|
||||
|
||||
countCriterion.Modifier = models.CriterionModifierLessThan
|
||||
verifyTagStudioCount(t, countCriterion)
|
||||
|
||||
countCriterion.Value = 0
|
||||
countCriterion.Modifier = models.CriterionModifierGreaterThan
|
||||
verifyTagStudioCount(t, countCriterion)
|
||||
}
|
||||
|
||||
func verifyTagStudioCount(t *testing.T, imageCountCriterion models.IntCriterionInput) {
|
||||
withTxn(func(ctx context.Context) error {
|
||||
qb := db.Tag
|
||||
tagFilter := models.TagFilterType{
|
||||
StudioCount: &imageCountCriterion,
|
||||
}
|
||||
|
||||
tags, _, err := qb.Query(ctx, &tagFilter, nil)
|
||||
if err != nil {
|
||||
t.Errorf("Error querying tag: %s", err.Error())
|
||||
}
|
||||
|
||||
for _, tag := range tags {
|
||||
verifyInt(t, getTagStudioCount(tag.ID), imageCountCriterion)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TestTagQueryParentCount(t *testing.T) {
|
||||
countCriterion := models.IntCriterionInput{
|
||||
Value: 1,
|
||||
|
|
@ -851,6 +925,9 @@ func TestTagMerge(t *testing.T) {
|
|||
tagIdxWithPerformer,
|
||||
tagIdx1WithPerformer,
|
||||
tagIdx2WithPerformer,
|
||||
tagIdxWithStudio,
|
||||
tagIdx1WithStudio,
|
||||
tagIdx2WithStudio,
|
||||
tagIdxWithGallery,
|
||||
tagIdx1WithGallery,
|
||||
tagIdx2WithGallery,
|
||||
|
|
@ -939,6 +1016,14 @@ func TestTagMerge(t *testing.T) {
|
|||
|
||||
assert.Contains(performerTagIDs, destID)
|
||||
|
||||
// ensure studio points to new tag
|
||||
studioTagIDs, err := db.Studio.GetTagIDs(ctx, studioIDs[studioIdxWithTwoTags])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
assert.Contains(studioTagIDs, destID)
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
t.Error(err.Error())
|
||||
|
|
|
|||
|
|
@ -68,6 +68,7 @@ func createFullStudio(id int, parentID int) models.Studio {
|
|||
Rating: &rating,
|
||||
IgnoreAutoTag: autoTagIgnored,
|
||||
Aliases: models.NewRelatedStrings(aliases),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
StashIDs: models.NewRelatedStashIDs(stashIDs),
|
||||
}
|
||||
|
||||
|
|
@ -84,6 +85,7 @@ func createEmptyStudio(id int) models.Studio {
|
|||
CreatedAt: createTime,
|
||||
UpdatedAt: updateTime,
|
||||
Aliases: models.NewRelatedStrings([]string{}),
|
||||
TagIDs: models.NewRelatedIDs([]int{}),
|
||||
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue