mirror of
https://github.com/stashapp/stash.git
synced 2025-12-06 08:26:00 +01:00
Initial commit
This commit is contained in:
commit
87eeed7e71
1093 changed files with 558731 additions and 0 deletions
50
.gitignore
vendored
Normal file
50
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
####
|
||||||
|
# Go
|
||||||
|
####
|
||||||
|
|
||||||
|
# Binaries for programs and plugins
|
||||||
|
*.exe
|
||||||
|
*.exe~
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
|
||||||
|
# Test binary, built with `go test -c`
|
||||||
|
*.test
|
||||||
|
|
||||||
|
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||||
|
*.out
|
||||||
|
|
||||||
|
####
|
||||||
|
# Jetbrains
|
||||||
|
####
|
||||||
|
|
||||||
|
# User-specific stuff
|
||||||
|
.idea/**/workspace.xml
|
||||||
|
.idea/**/tasks.xml
|
||||||
|
.idea/**/usage.statistics.xml
|
||||||
|
.idea/**/dictionaries
|
||||||
|
.idea/**/shelf
|
||||||
|
|
||||||
|
# Generated files
|
||||||
|
.idea/**/contentModel.xml
|
||||||
|
|
||||||
|
# Sensitive or high-churn files
|
||||||
|
.idea/**/dataSources/
|
||||||
|
.idea/**/dataSources.ids
|
||||||
|
.idea/**/dataSources.local.xml
|
||||||
|
.idea/**/sqlDataSources.xml
|
||||||
|
.idea/**/dynamic.xml
|
||||||
|
.idea/**/uiDesigner.xml
|
||||||
|
.idea/**/dbnavigator.xml
|
||||||
|
|
||||||
|
####
|
||||||
|
# Random
|
||||||
|
####
|
||||||
|
|
||||||
|
node_modules
|
||||||
|
|
||||||
|
*.db
|
||||||
|
|
||||||
|
stash
|
||||||
|
dist
|
||||||
5
.idea/codeStyles/codeStyleConfig.xml
Normal file
5
.idea/codeStyles/codeStyleConfig.xml
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
<component name="ProjectCodeStyleConfiguration">
|
||||||
|
<state>
|
||||||
|
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
|
||||||
|
</state>
|
||||||
|
</component>
|
||||||
14
.idea/dataSources.xml
Normal file
14
.idea/dataSources.xml
Normal file
|
|
@ -0,0 +1,14 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
|
||||||
|
<data-source source="LOCAL" name="stash-go" uuid="b8d0eb6d-e8e4-4865-8c0f-2798f78345a7">
|
||||||
|
<driver-ref>sqlite.xerial</driver-ref>
|
||||||
|
<synchronize>true</synchronize>
|
||||||
|
<jdbc-driver>org.sqlite.JDBC</jdbc-driver>
|
||||||
|
<jdbc-url>jdbc:sqlite:$USER_HOME$/.stash/stash-go.sqlite</jdbc-url>
|
||||||
|
<driver-properties>
|
||||||
|
<property name="enable_load_extension" value="true" />
|
||||||
|
</driver-properties>
|
||||||
|
</data-source>
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
4
.idea/encodings.xml
Normal file
4
.idea/encodings.xml
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="Encoding" addBOMForNewFiles="with NO BOM" />
|
||||||
|
</project>
|
||||||
8
.idea/go.iml
Normal file
8
.idea/go.iml
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="WEB_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager">
|
||||||
|
<content url="file://$MODULE_DIR$" />
|
||||||
|
<orderEntry type="inheritedJdk" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
</module>
|
||||||
6
.idea/misc.xml
Normal file
6
.idea/misc.xml
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="JavaScriptSettings">
|
||||||
|
<option name="languageLevel" value="ES6" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
8
.idea/modules.xml
Normal file
8
.idea/modules.xml
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectModuleManager">
|
||||||
|
<modules>
|
||||||
|
<module fileurl="file://$PROJECT_DIR$/.idea/go.iml" filepath="$PROJECT_DIR$/.idea/go.iml" />
|
||||||
|
</modules>
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
6
.idea/sqldialects.xml
Normal file
6
.idea/sqldialects.xml
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="SqlDialectMappings">
|
||||||
|
<file url="PROJECT" dialect="SQLite" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
6
.idea/vcs.xml
Normal file
6
.idea/vcs.xml
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="VcsDirectoryMappings">
|
||||||
|
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019 StashApp
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
8
Makefile
Normal file
8
Makefile
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
gqlgen:
|
||||||
|
go run scripts/gqlgen.go
|
||||||
|
|
||||||
|
build:
|
||||||
|
packr2 build
|
||||||
|
|
||||||
|
build-win:
|
||||||
|
CGO_ENABLED=1 GOOS=windows GOARCH=amd64 packr2 build -o stash.exe -v
|
||||||
27
README.md
Normal file
27
README.md
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
# Stash
|
||||||
|
|
||||||
|
**Stash is a rails app which organizes and serves your porn.**
|
||||||
|
|
||||||
|
See a demo [here](https://vimeo.com/275537038) (password is stashapp).
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
* `make build` - Builds the binary
|
||||||
|
* `make gqlgen` - Regenerate Go GraphQL files
|
||||||
|
|
||||||
|
### Building a release
|
||||||
|
|
||||||
|
1. cd into the UI directory and run `ng build --prod`
|
||||||
|
2. cd back to the root directory and run `make build` to build the executable
|
||||||
|
|
||||||
|
#### Notes for the dev
|
||||||
|
|
||||||
|
https://blog.filippo.io/easy-windows-and-linux-cross-compilers-for-macos/
|
||||||
28
certs/server.crt
Normal file
28
certs/server.crt
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIE2DCCAsCgAwIBAgIJAMb93LazWApIMA0GCSqGSIb3DQEBCwUAMBcxFTATBgNV
|
||||||
|
BAMMDHN0YXNoLnNlcnZlcjAeFw0xOTAxMjcyMzIxMzBaFw0zOTAxMjIyMzIxMzBa
|
||||||
|
MBcxFTATBgNVBAMMDHN0YXNoLnNlcnZlcjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
|
||||||
|
ADCCAgoCggIBAKLLRxjZ/K0zO5GaQPIEFYOkRMJs/HG0CRP7z+5ZF2E8Z9USkDO5
|
||||||
|
bDlJTTTt1oal2m0g3phijXc0lqTmH873iHK5RqLAGZxJhmEAtEfaPeNyM7T+9MEW
|
||||||
|
3JJIgrunjylu+4eYwcbaTY7mlCOxAwfwuRft8ypfZkofJjcVGM5xdydMCtNur84q
|
||||||
|
DoHhembcYMZAbaWp4u9mu+1HhpkNZ/qCHbSeYcbsvHUmMS/2tgASn/7fvajUCGhf
|
||||||
|
BRTSX1cYpZCn0JyNaOZj2pgMXao0qOTD4SEVlZFG70jBwZJW1Lqxtbvwfl5EFxH2
|
||||||
|
D01qFU0jKzGcyCaye1v/eSIIq/qQsqGStqSqRhmeWfEgNE/Ke4+tXfUEpUGSI2ps
|
||||||
|
40aPnSipkO/MA+uNT7jLZR4ASPZocW6wq5NVyn4jfrh1hJZhcOEV6Wj/9j0yKH1F
|
||||||
|
L223tZ0UbjQauTFQou30ga4PlkreElQe+OSlryDO153DKqcFIkHg70awCuCWbvLq
|
||||||
|
3QF+s09JvdWpJl960UUK9sXpSaY/Y+sEHbN0xJOekw5jTWOoRschfyWZXAYlVbgF
|
||||||
|
H9QDvx+Cap7lJJEsDT/er/8N30WIlgG/wWaHrJUZkATRw3g9wTQiXaTGbMT/4GVR
|
||||||
|
mCpYgpk17i+nHJqmHQOHMB7Q6HlQiOtPTse/GJyY1+OGkoYM0SrQC7UvAgMBAAGj
|
||||||
|
JzAlMCMGA1UdEQQcMBqCDHN0YXNoLnNlcnZlcocEfwAAAYcEwKgByDANBgkqhkiG
|
||||||
|
9w0BAQsFAAOCAgEAd8W/VUR6eAcs4QBUHzWHzxbO5CFuq9akVkI4REbaEcgsho8I
|
||||||
|
j6g9EYIDKBnPKRVE1YBeQv4ZGWQq8wL9EEJYqMqhZ4jaVZTqfoOkeaFPxgxtXDkb
|
||||||
|
KIt0xPGLQiX4nuNKMjT9mDR0VHXNpcsZHpnz+t4ra26Aw/rMuH6ycyGia6eTZ4tN
|
||||||
|
frEjf5Fgu0EQXbF5/PdiveqIoUaOlUr5vCM1Nqn0sZF9CF9fe/i2BLl3GxmAMh5u
|
||||||
|
WQ16mu9Se+gD+kJ9o6JynuKMXosgxFkmryPPkwi6UlDMk9RnXX19G15vlY9Tdbv3
|
||||||
|
0vXoga7Tn+hCJyAd8WMnxGbgtOf1BE/6fZkbWXB5kPfs1GAaVnl6J4WlQTzQdr5P
|
||||||
|
kl8uxGsWefM0ahZElhcJPZSL90QfFplpw0l+AHZ8jbl6uGxmXSMF+4JPsPHem/GA
|
||||||
|
LY9DuQoNAvHHYLvHbVu67nN+9kbOKNQH44PwVo9b8zzX0A1l6SBLuxog/s9Egbtw
|
||||||
|
9IRPls66b/znYj2YZE89zNyHiZxnrzweslrEC9as5cSUL8wraxgUYUZ3Ry7jXgaX
|
||||||
|
H/ImI7HkkZoAb3ku/q+fl4gpc0ZB5lCa1wXIktjKCKZ0xGBZbehm0vBJGJCn+OCb
|
||||||
|
IJ0OEhPH7deV4iT11jLJqbfvdlfg/tmfcTvLcnbuf+0kxOsHHUVqEXiRQ6A=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
51
certs/server.key
Normal file
51
certs/server.key
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIJKQIBAAKCAgEAostHGNn8rTM7kZpA8gQVg6REwmz8cbQJE/vP7lkXYTxn1RKQ
|
||||||
|
M7lsOUlNNO3WhqXabSDemGKNdzSWpOYfzveIcrlGosAZnEmGYQC0R9o943IztP70
|
||||||
|
wRbckkiCu6ePKW77h5jBxtpNjuaUI7EDB/C5F+3zKl9mSh8mNxUYznF3J0wK026v
|
||||||
|
zioOgeF6ZtxgxkBtpani72a77UeGmQ1n+oIdtJ5hxuy8dSYxL/a2ABKf/t+9qNQI
|
||||||
|
aF8FFNJfVxilkKfQnI1o5mPamAxdqjSo5MPhIRWVkUbvSMHBklbUurG1u/B+XkQX
|
||||||
|
EfYPTWoVTSMrMZzIJrJ7W/95Igir+pCyoZK2pKpGGZ5Z8SA0T8p7j61d9QSlQZIj
|
||||||
|
amzjRo+dKKmQ78wD641PuMtlHgBI9mhxbrCrk1XKfiN+uHWElmFw4RXpaP/2PTIo
|
||||||
|
fUUvbbe1nRRuNBq5MVCi7fSBrg+WSt4SVB745KWvIM7XncMqpwUiQeDvRrAK4JZu
|
||||||
|
8urdAX6zT0m91akmX3rRRQr2xelJpj9j6wQds3TEk56TDmNNY6hGxyF/JZlcBiVV
|
||||||
|
uAUf1AO/H4JqnuUkkSwNP96v/w3fRYiWAb/BZoeslRmQBNHDeD3BNCJdpMZsxP/g
|
||||||
|
ZVGYKliCmTXuL6ccmqYdA4cwHtDoeVCI609Ox78YnJjX44aShgzRKtALtS8CAwEA
|
||||||
|
AQKCAgBiW0b9cNcFHisjcXtDvcvx2VHh1k8/s3K4rC+Y5iMGEKT7S1fsqF8GzY7k
|
||||||
|
J76xY4tw26chkom+N/u8bX1SYqQCDIbs1fujPZj/Cpnhv64cuWccN95QuytRBpZW
|
||||||
|
IoqQkglOh7CsIEEz0YQ3fD9EkOCD4LRGKZV3uxIvvYNbV5qSF5OC4yE9ApP5TO3T
|
||||||
|
L9sYbNzSdq4xznREjC56Ums8I1knQ3n34hiK/yRAwmJ78MTSDQRd+J2L0b0UM3Z+
|
||||||
|
/NkraPjVXWq/qBzuOKpup5pTfTWL86qGN8FiyA3YCLZGhhAzGL6A3crCg1QDhdCQ
|
||||||
|
5mq2cu9K0hNOJjBdq7ecj8gdbrMCG13McfQk+eZY9L7yi43HwmD1ziZ1KQuMLhaQ
|
||||||
|
C92aZWd1EaMWfMjq+WzAMJzVel1sotcVbiBH724f1TSdIu03hoIHtL1OgSP5m5f8
|
||||||
|
IHC64mOzQi/1RUreBfjEUs3EPYf5eETjgBf4G5bzmmtn+lkDQUkAKqBbL5cxrt36
|
||||||
|
hWx+YESm8ohZIowjB6N21EGz01SmV5f7B+j5Wt8XpBqOzNeh/arFOAfVJE1JH7iJ
|
||||||
|
IPMQf+kUZ40k4pk4ImRdSSb/CxogUxUtCMs7vJJ0k8aQ61IhuMK62wYSmxNisn3Z
|
||||||
|
vjuem7d6aJg/6RMDBeqhKuBTHx6jYqZX8IyjRFAwg84LBUEwoQKCAQEAzZwfpOIQ
|
||||||
|
lGMQsSrWqlP8fTNQvPtGhJZN8iiQLA29VYMfZvig61xKnJ8IIW6Sth/V3eJtKwZr
|
||||||
|
UhxK6PJh5XlVehmpjtCGrlUj34OrvD+MWL/vazkRYORhd6nWjz52EUrUfv0dqU+N
|
||||||
|
rN41qai3QWiQrZED07NBiZAEah5H7OcGGUFHD0p6ktOUtOZiAK/GXHWRDgx2GQ7r
|
||||||
|
oo4H2jZVNiUFuYs0urgsNkHic7hq51D+oZoihHZZBVgGrwFEOmGjj43ylcNUa6B7
|
||||||
|
jbdVSnPvjyUFHmS9eIoajeG5IQ9tMJ6qljiG1mKaHdzUywviIPMoRMfSFvgBceJe
|
||||||
|
TxVNLpfbV5hFzQKCAQEAyrDokk3cDxfa3eMMyTYRj8spI2Kd7HxQqW/z5NfaIKHH
|
||||||
|
xauHfi32xzSP7UrECVHX5ZXt67wzEnR67ijhkXQsHmGDywi0HDPudC32QV38VFtx
|
||||||
|
FKQhYptJVFstWPpAaHoqhysR7tAIgztnzkDP+LcPoTXdJqhPpqUkJLIjtdwWd/Qm
|
||||||
|
1HnPi5Otn9MtimVek8rUTIG89N/bdGBhSu8KFyvJ7dp4scdUirT/AQDHHOVtw3dE
|
||||||
|
XhT45tr9wJFNiNR/y1WyZFZVnUDkYEYO5L9c7JPoLac7wccCrpI5oPXE/Tutxpqk
|
||||||
|
yfGl5GffwNPUXR3vHLjbAuo0Bl7G0goH8E9nSxAq6wKCAQEAr5HzS35mne8B3+cu
|
||||||
|
qv9QiTB6bwU6MBmQsYmForiQxiZTeEAyWQfUBm2eKb4btTz2CKRCiAzRM6pJJSVk
|
||||||
|
BulW3g5vxS2288JsAX6T9QlmtJogvTRb7JrXx7DVMDgr5ZQ6Y72LyHnQBp8wv5D7
|
||||||
|
+vRpJmzAEdPt0KOTwXHeBSM5B/O/1ATLKflV8CsR4Hg4L9eMZ6yoY7lj6MjTV7gx
|
||||||
|
hyyR+OAvxIlj52jXD+2YC7IT3LabZvUQCwohmuq/8gJSAlE5pjzHJ6hAGMFGKW9a
|
||||||
|
TqdT+aIYUIfNKP5ocRyVJoE2NVRLf2FvLLTXWBLroys/ZYwjj81ynmaoFWuhHno5
|
||||||
|
qFC0uQKCAQEAxC8RHLLhwXYccNHXyVVdECnbCpXkN0ZoKGfk0GYgiZ8tJmWNApNo
|
||||||
|
nY2BlXLdZgvihSRmIdqKMF2vsp69HbUtEsbd8oHdGsMlFSx7gu9+UQQk4o6uhau6
|
||||||
|
AT0zGkE2YCmGykfk1ppi8upn4hmXcqt1JFtnT4hvd+O1pRMLP3XkBXy6rMOuH/Hz
|
||||||
|
hpO3K5JMRY53iAm3I+kcl3rGEOpngN/wVhk04xYE/TCM9aKUFqL2Z9Ajy1BPTds+
|
||||||
|
6sl5G2xauSucPEPYIBn0Mj79IWkbfxz2CGIqk7eVH/UGdIYNgfHmlCmQQKp1ICtu
|
||||||
|
RkyB6xMTImDGEv8vFc9cb2YoIvAoEV53uwKCAQAz2wA/UOU49TPtvlDHJKA+9FpF
|
||||||
|
runF8DIfk5Nxl729YbqmI4b/bS2CGk5bv1jl3g3LTaQ+hTDmBC+MbnLplzBYq8m4
|
||||||
|
z4vBB3ckCySBhFEjwt7Or0sxlNVixKk50zX+jQ0aoJEGnKnGNmmqsfV234ZIKkLx
|
||||||
|
WKh65Uj0Z1A1pSsfPIVGPx2WfKttGPO1QtAY1DRKS6EP9Ri6sZ2QpFvUEfkzMQ2G
|
||||||
|
e1cGQQxP4tiJTyxLptIBDCtDoNSzmi9+y5ZoS2BgDCnYdn0Hxjfyxx5Rd17ES5xH
|
||||||
|
zvodt4WKqtqdxPbmEYa09k5wCflcVnYoAQiUayyRRtj3zKAuxJQ6oTF6Fgzq
|
||||||
|
-----END RSA PRIVATE KEY-----
|
||||||
20
go.mod
Normal file
20
go.mod
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
module github.com/stashapp/stash
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/99designs/gqlgen v0.4.5-0.20190127090136-055fb4bc9a6a
|
||||||
|
github.com/PuerkitoBio/goquery v1.5.0
|
||||||
|
github.com/bmatcuk/doublestar v1.1.1
|
||||||
|
github.com/disintegration/imaging v1.6.0
|
||||||
|
github.com/go-chi/chi v4.0.1+incompatible
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.15
|
||||||
|
github.com/golang-migrate/migrate/v4 v4.2.2
|
||||||
|
github.com/h2non/filetype v1.0.6
|
||||||
|
github.com/jmoiron/sqlx v1.2.0
|
||||||
|
github.com/mattn/go-sqlite3 v1.9.0
|
||||||
|
github.com/rs/cors v1.6.0
|
||||||
|
github.com/sirupsen/logrus v1.3.0
|
||||||
|
github.com/spf13/afero v1.2.0 // indirect
|
||||||
|
github.com/vektah/gqlparser v1.1.0
|
||||||
|
golang.org/x/image v0.0.0-20190118043309-183bebdce1b2 // indirect
|
||||||
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e
|
||||||
|
)
|
||||||
810
go.sum
Normal file
810
go.sum
Normal file
|
|
@ -0,0 +1,810 @@
|
||||||
|
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
|
cloud.google.com/go v0.28.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
|
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
|
contrib.go.opencensus.io/exporter/stackdriver v0.6.0/go.mod h1:QeFzMJDAw8TXt5+aRaSuE8l5BwaMIOIlaVkBOPRuMuw=
|
||||||
|
git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||||
|
git.apache.org/thrift.git v0.0.0-20180924222215-a9235805469b/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg=
|
||||||
|
github.com/99designs/gqlgen v0.4.5-0.20190127090136-055fb4bc9a6a h1:oTsAt8YXjEk1fo7uZR7gya1jrH48oPulx5oF6zWTHRw=
|
||||||
|
github.com/99designs/gqlgen v0.4.5-0.20190127090136-055fb4bc9a6a/go.mod h1:st7qHA6ssU3uRZkmv+wzrzgX4srvIqEIdE5iuRW8GhE=
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
|
||||||
|
github.com/Boostport/migration v0.15.0/go.mod h1:cT0NWVRLid2n9b2K8mPSeT5nWT2gt4rEtVmlhvo2QB8=
|
||||||
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/DATA-DOG/go-sqlmock v1.3.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||||
|
github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
|
||||||
|
github.com/Masterminds/squirrel v1.1.0 h1:baP1qLdoQCeTw3ifCdOq2dkYc6vGcmRdaociKLbEJXs=
|
||||||
|
github.com/Masterminds/squirrel v1.1.0/go.mod h1:yaPeOnPG5ZRwL9oKdTsO/prlkPbXWZlRVMQ/gGlzIuA=
|
||||||
|
github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA=
|
||||||
|
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
|
||||||
|
github.com/OpenPeeDeeP/depguard v0.0.0-20180806142446-a69c782687b2/go.mod h1:7/4sitnI9YlQgTLLk734QlzXT8DuHVnAyztLplQjk+o=
|
||||||
|
github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
|
||||||
|
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
|
||||||
|
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
|
||||||
|
github.com/agnivade/levenshtein v1.0.1 h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=
|
||||||
|
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||||
|
github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
|
||||||
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
|
||||||
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||||
|
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
|
||||||
|
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
|
||||||
|
github.com/apache/calcite-avatica-go/v3 v3.2.0/go.mod h1:Kipaz+iNp/AUBrUPxiLGjn1Km6PLkG0jZoL9VnwFMhg=
|
||||||
|
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||||
|
github.com/aws/aws-sdk-go v1.15.54/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
|
||||||
|
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||||
|
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
|
||||||
|
github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
|
||||||
|
github.com/bmatcuk/doublestar v1.1.1 h1:YroD6BJCZBYx06yYFEWvUuKVWQn3vLLQAVmDmvTSaiQ=
|
||||||
|
github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w=
|
||||||
|
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||||
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
|
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||||
|
github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c/go.mod h1:XGLbWH/ujMcbPbhZq52Nv6UrCghb1yGn//133kEsvDk=
|
||||||
|
github.com/codegangsta/negroni v1.0.0/go.mod h1:v0y3T5G7Y1UlFfyxFn/QLRU4a2EuNau2iZY63YTKWo0=
|
||||||
|
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
|
github.com/coreos/etcd v3.3.11+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
|
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
||||||
|
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
|
github.com/cznic/b v0.0.0-20180115125044-35e9bbe41f07/go.mod h1:URriBxXwVq5ijiJ12C7iIZqlA69nTlI+LgI6/pwftG8=
|
||||||
|
github.com/cznic/fileutil v0.0.0-20180108211300-6a051e75936f/go.mod h1:8S58EK26zhXSxzv7NQFpnliaOQsmDUxvoQO3rt154Vg=
|
||||||
|
github.com/cznic/golex v0.0.0-20170803123110-4ab7c5e190e4/go.mod h1:+bmmJDNmKlhWNG+gwWCkaBoTy39Fs+bzRxVBzoTQbIc=
|
||||||
|
github.com/cznic/internal v0.0.0-20180608152220-f44710a21d00/go.mod h1:olo7eAdKwJdXxb55TKGLiJ6xt1H0/tiiRCWKVLmtjY4=
|
||||||
|
github.com/cznic/lldb v1.1.0/go.mod h1:FIZVUmYUVhPwRiPzL8nD/mpFcJ/G7SSXjjXYG4uRI3A=
|
||||||
|
github.com/cznic/mathutil v0.0.0-20180504122225-ca4c9f2c1369/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM=
|
||||||
|
github.com/cznic/ql v1.2.0/go.mod h1:FbpzhyZrqr0PVlK6ury+PoW3T0ODUV22OeWIxcaOrSE=
|
||||||
|
github.com/cznic/sortutil v0.0.0-20150617083342-4c7342852e65/go.mod h1:q2w6Bg5jeox1B+QkJ6Wp/+Vn0G/bo3f1uY7Fn3vivIQ=
|
||||||
|
github.com/cznic/strutil v0.0.0-20171016134553-529a34b1c186/go.mod h1:AHHPPPXTw0h6pVabbcbyGRK1DckRn7r/STdZEeIDzZc=
|
||||||
|
github.com/cznic/zappy v0.0.0-20160723133515-2533cb5b45cc/go.mod h1:Y1SNZ4dRUOKXshKUbwUapqNncRrho4mkjQebgEHZLj8=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||||
|
github.com/dhui/dktest v0.3.0/go.mod h1:cyzIUfGsBEbZ6BT7tnXqAShHSXCZhSNmFl70sZ7c1yc=
|
||||||
|
github.com/disintegration/imaging v1.6.0 h1:nVPXRUUQ36Z7MNf0O77UzgnOb1mkMMor7lmJMJXc/mA=
|
||||||
|
github.com/disintegration/imaging v1.6.0/go.mod h1:xuIt+sRxDFrHS0drzXUlCJthkJ8k7lkkUojDSR247MQ=
|
||||||
|
github.com/docker/distribution v2.7.0+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||||
|
github.com/docker/docker v0.7.3-0.20190103212154-2b7e084dc98b/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
|
github.com/docker/docker v0.7.3-0.20190108045446-77df18c24acf/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
|
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
|
||||||
|
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
|
github.com/dustin/go-humanize v0.0.0-20180713052910-9f541cc9db5d/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||||
|
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||||
|
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
||||||
|
github.com/fatih/color v1.6.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
|
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||||
|
github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||||
|
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/fsouza/fake-gcs-server v1.3.0/go.mod h1:Lq+43m2znsXfDKHnQMfdA0HpYYAEJsfizsbpk5k3TLo=
|
||||||
|
github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
|
||||||
|
github.com/go-chi/chi v4.0.1+incompatible h1:RSRC5qmFPtO90t7pTL0DBMNpZFsb/sHF3RXVlDgFisA=
|
||||||
|
github.com/go-chi/chi v4.0.1+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ=
|
||||||
|
github.com/go-critic/checkers v0.0.0-20181204210945-97246d3b3c67/go.mod h1:Cg5JCP9M6m93z6fecpRcVgD2lZf2RvPtb85ldjiShZc=
|
||||||
|
github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||||
|
github.com/go-ini/ini v1.39.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||||
|
github.com/go-lintpack/lintpack v0.5.1/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTDqfpXvXAN0sXM=
|
||||||
|
github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8=
|
||||||
|
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||||
|
github.com/go-sql-driver/mysql v1.4.1 h1:g24URVg0OFbNUTx9qqY1IRZ9D9z3iPyi5zKhQZpNwpA=
|
||||||
|
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||||
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
|
github.com/go-toolsmith/astcast v0.0.0-20181028201508-b7a89ed70af1/go.mod h1:TEo3Ghaj7PsZawQHxT/oBvo4HK/sl1RcuUHDKTTju+o=
|
||||||
|
github.com/go-toolsmith/astcopy v0.0.0-20180903214859-79b422d080c4/go.mod h1:c9CPdq2AzM8oPomdlPniEfPAC6g1s7NqZzODt8y6ib8=
|
||||||
|
github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY=
|
||||||
|
github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086/go.mod h1:mP93XdblcopXwlyN4X4uodxXQhldPGZbcEJIimQHrkg=
|
||||||
|
github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30/go.mod h1:SV2ur98SGypH1UjcPpCatrV5hPazG6+IfNHbkDXBRrk=
|
||||||
|
github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks=
|
||||||
|
github.com/go-toolsmith/pkgload v0.0.0-20181120203407-5122569a890b/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks=
|
||||||
|
github.com/go-toolsmith/strparse v0.0.0-20180903215201-830b6daa1241/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8=
|
||||||
|
github.com/go-toolsmith/typep v0.0.0-20181030061450-d63dc7650676/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
|
||||||
|
github.com/gobuffalo/buffalo v0.12.8-0.20181004233540-fac9bb505aa8/go.mod h1:sLyT7/dceRXJUxSsE813JTQtA3Eb1vjxWfo/N//vXIY=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.0/go.mod h1:Mjn1Ba9wpIbpbrD+lIDMy99pQ0H0LiddMIIDGse7qT4=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.1/go.mod h1:K9c22KLfDz7obgxvHv1amvJtCQEZNiox9+q6FDJ1Zcs=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.2/go.mod h1:vA8I4Dwcfkx7RAzIRHVDZxfS3QJR7muiOjX4r8P2/GE=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.4/go.mod h1:y2jbKkO0k49OrNIOAkbWQiPBqxAFpHn5OKnkc7BDh+I=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.5/go.mod h1:hPcP12TkFSZmT3gUVHZ24KRhTX3deSgu6QSgn0nbWf4=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.6/go.mod h1:/Pm0MPLusPhWDayjRD+/vKYnelScIiv0sX9YYek0wpg=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.7/go.mod h1:3gQwZhI8DSbqmDqlFh7kfwuv/wd40rqdVxXtFWlCQHw=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.9/go.mod h1:vIItiQkTHq46D1p+bw8mFc5w3BwrtJhMvYjSIYK3yjE=
|
||||||
|
github.com/gobuffalo/buffalo v0.13.12/go.mod h1:Y9e0p0cdo/eI+lHm7EFzlkc9YzjwGo5QeDj+FbsyqVA=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.0.2/go.mod h1:pOp/uF7X3IShFHyobahTkTLZaeUXwb0GrUTb9ngJWTs=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.0.4/go.mod h1:pWS1vjtQ6uD17MVFWf7i3zfThrEKWlI5+PYLw/NaDB4=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.4.3/go.mod h1:uCzTY0woez4nDMdQjkcOYKanngeUVRO2HZi7ezmAjWY=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.5.1/go.mod h1:jbmwSZK5+PiAP9cC09VQOrGMZFCa/P0UMlIS3O12r5w=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.1/go.mod h1:/XZt7UuuDnx5P4v3cStK0+XoYiNOA2f0wDIsm1oLJQA=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.4/go.mod h1:/+N1aophkA2jZ1ifB2O3Y9yGwu6gKOVMtUmJnbg+OZI=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.5/go.mod h1:0HVkbgrVs/MnPZ/FOseDMVanCTm2RNcdM0PuXcL1NNI=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.6/go.mod h1:hSWAEkJyL9RENJlmanMivgnNkrQ9RC4xJARz8dQryi0=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.7/go.mod h1:ZGZRkzz2PiKWHs0z7QsPBOTo2EpcGRArMEym6ghKYgk=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.9/go.mod h1:yYlYTrPdMCz+6/+UaXg5Jm4gN3xhsvsQ2ygVatZV5vw=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.10/go.mod h1:HxzPZjAEzh9H0gnHelObxxrut9O+1dxydf7U93SYsc8=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.6.11/go.mod h1:eAA6xJIL8OuynJZ8amXjRmHND6YiusVAaJdHDN1Lu8Q=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.7.2/go.mod h1:vEbx30cLFeeZ48gBA/rkhbqC2M/2JpsKs5CoESWhkPw=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.8.1/go.mod h1:vu71J3fD4b7KKywJQ1tyaJGtahG837Cj6kgbxX0e4UI=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.8.2/go.mod h1:9te6/VjEQ7pKp7lXlDIMqzxgGpjlKoAcAANdCgoR960=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.8.3/go.mod h1:IAWq6vjZJVXebIq2qGTLOdlXzmpyTZ5iJG5b59fza5U=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.9.3/go.mod h1:BNRunDThMZKjqx6R+n14Rk3sRSOWgbMuzCKXLqbd7m0=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.9.4/go.mod h1:grCV6DGsQlVzQwk6XdgcL3ZPgLm9BVxlBmXPMF8oBHI=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.10.0/go.mod h1:4osg8d9s60txLuGwXnqH+RCjPHj9K466cDFRl3PErHI=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.11.0 h1:yZ6USaSdAKpogRS8DZJgeG7/CTPGmyhplwifphmmegw=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.11.0/go.mod h1:rtIvAYRjYibgmWhnjKmo7OadtnxuMG5ZQLr25ozAzjg=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.12.0 h1:5rvYQ7mwfPwUW9zqcMd9ahWtPVOOouMKZjv88q45Z7c=
|
||||||
|
github.com/gobuffalo/buffalo-plugins v1.12.0/go.mod h1:kw4Mj2vQXqe4X5TI36PEQgswbL30heGQwJEeDKd1v+4=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.0.5/go.mod h1:Fw/LfFDnSmB/vvQXPvcXEjzP98Tc+AudyNWUBWKCwQ8=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.1.2/go.mod h1:czNLXcYbg5/fjr+uht0NyjZaQ0V2W23H1jzyORgCzQ4=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.1.5/go.mod h1:H01JIg42XwOHS4gRMhSeDZqBovNVlfBUsVXckU617s4=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.1.8/go.mod h1:1uaxOFzzVud/zR5f1OEBr21tMVLQS3OZpQ1A5cr0svE=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.1.13/go.mod h1:47GQoBjCMcl5Pw40iCWHQYJvd0HsT9kdaOPWgnzHzk4=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.1.14/go.mod h1:sAMh6+s7wytCn5cHqZIuItJbAqzvs6M7FemLexl+pwc=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.1.15/go.mod h1:vnvvxhbEFAaEbac9E2ZPjsBeL7WHkma2UyKNVA4y9Wo=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.2.1/go.mod h1:SHqojN0bVzaAzCbQDdWtsib202FDIxqwmCO8VDdweF4=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.3.0/go.mod h1:P0PhA225dRGyv0WkgYjYKqgoxPdDPDFZDvHj60AGF5w=
|
||||||
|
github.com/gobuffalo/buffalo-pop v1.6.0/go.mod h1:vrEVNOBKe042HjSNMj72J4FgER/VG6lt4xW6WMpTdlY=
|
||||||
|
github.com/gobuffalo/envy v1.6.4/go.mod h1:Abh+Jfw475/NWtYMEt+hnJWRiC8INKWibIMyNt1w2Mc=
|
||||||
|
github.com/gobuffalo/envy v1.6.5/go.mod h1:N+GkhhZ/93bGZc6ZKhJLP6+m+tCNPKwgSpH9kaifseQ=
|
||||||
|
github.com/gobuffalo/envy v1.6.6/go.mod h1:N+GkhhZ/93bGZc6ZKhJLP6+m+tCNPKwgSpH9kaifseQ=
|
||||||
|
github.com/gobuffalo/envy v1.6.7/go.mod h1:N+GkhhZ/93bGZc6ZKhJLP6+m+tCNPKwgSpH9kaifseQ=
|
||||||
|
github.com/gobuffalo/envy v1.6.8/go.mod h1:N+GkhhZ/93bGZc6ZKhJLP6+m+tCNPKwgSpH9kaifseQ=
|
||||||
|
github.com/gobuffalo/envy v1.6.9/go.mod h1:N+GkhhZ/93bGZc6ZKhJLP6+m+tCNPKwgSpH9kaifseQ=
|
||||||
|
github.com/gobuffalo/envy v1.6.10/go.mod h1:X0CFllQjTV5ogsnUrg+Oks2yTI+PU2dGYBJOEI2D1Uo=
|
||||||
|
github.com/gobuffalo/envy v1.6.11/go.mod h1:Fiq52W7nrHGDggFPhn2ZCcHw4u/rqXkqo+i7FB6EAcg=
|
||||||
|
github.com/gobuffalo/envy v1.6.12 h1:zkhss8DXz/pty2HAyA8BnvWMTYxo4gjd4+WCnYovoxY=
|
||||||
|
github.com/gobuffalo/envy v1.6.12/go.mod h1:qJNrJhKkZpEW0glh5xP2syQHH5kgdmgsKss2Kk8PTP0=
|
||||||
|
github.com/gobuffalo/events v1.0.3/go.mod h1:Txo8WmqScapa7zimEQIwgiJBvMECMe9gJjsKNPN3uZw=
|
||||||
|
github.com/gobuffalo/events v1.0.7/go.mod h1:z8txf6H9jWhQ5Scr7YPLWg/cgXBRj8Q4uYI+rsVCCSQ=
|
||||||
|
github.com/gobuffalo/events v1.0.8/go.mod h1:A5KyqT1sA+3GJiBE4QKZibse9mtOcI9nw8gGrDdqYGs=
|
||||||
|
github.com/gobuffalo/events v1.1.1/go.mod h1:Ia9OgHMco9pEhJaPrPQJ4u4+IZlkxYVco2VbJ2XgnAE=
|
||||||
|
github.com/gobuffalo/events v1.1.3/go.mod h1:9yPGWYv11GENtzrIRApwQRMYSbUgCsZ1w6R503fCfrk=
|
||||||
|
github.com/gobuffalo/events v1.1.4/go.mod h1:09/YRRgZHEOts5Isov+g9X2xajxdvOAcUuAHIX/O//A=
|
||||||
|
github.com/gobuffalo/events v1.1.5/go.mod h1:3YUSzgHfYctSjEjLCWbkXP6djH2M+MLaVRzb4ymbAK0=
|
||||||
|
github.com/gobuffalo/events v1.1.6/go.mod h1:H/3ZB9BA+WorMb/0F79UvU6u0Cyo2hU97WA51bG2ONY=
|
||||||
|
github.com/gobuffalo/events v1.1.7/go.mod h1:6fGqxH2ing5XMb3EYRq9LEkVlyPGs4oO/eLzh+S8CxY=
|
||||||
|
github.com/gobuffalo/events v1.1.8/go.mod h1:UFy+W6X6VbCWS8k2iT81HYX65dMtiuVycMy04cplt/8=
|
||||||
|
github.com/gobuffalo/events v1.1.9 h1:ukq5ys/h0TuiX7eLJyZBD1dJOy0r19JTEYmgXKG9j+Y=
|
||||||
|
github.com/gobuffalo/events v1.1.9/go.mod h1:/0nf8lMtP5TkgNbzYxR6Bl4GzBy5s5TebgNTdRfRbPM=
|
||||||
|
github.com/gobuffalo/events v1.2.0 h1:YovlMNcwNTfIm/3OdB+KemDOm8yUz4XIH+4kbMhGXWw=
|
||||||
|
github.com/gobuffalo/events v1.2.0/go.mod h1:pxvpvsKXKZNPtHuIxUV3K+g+KP5o4forzaeFj++bh68=
|
||||||
|
github.com/gobuffalo/fizz v1.0.12/go.mod h1:C0sltPxpYK8Ftvf64kbsQa2yiCZY4RZviurNxXdAKwc=
|
||||||
|
github.com/gobuffalo/fizz v1.0.15/go.mod h1:EI3mEpjImuji6Bwu++N2uXhljQwOhwtimZQJ89zwyF4=
|
||||||
|
github.com/gobuffalo/fizz v1.0.16/go.mod h1:EI3mEpjImuji6Bwu++N2uXhljQwOhwtimZQJ89zwyF4=
|
||||||
|
github.com/gobuffalo/fizz v1.1.2/go.mod h1:THqzNTlNxNaF5hq3ddp16SnEcl2m83bTeTzJEoD+kqc=
|
||||||
|
github.com/gobuffalo/fizz v1.1.3/go.mod h1:THqzNTlNxNaF5hq3ddp16SnEcl2m83bTeTzJEoD+kqc=
|
||||||
|
github.com/gobuffalo/fizz v1.3.0/go.mod h1:THqzNTlNxNaF5hq3ddp16SnEcl2m83bTeTzJEoD+kqc=
|
||||||
|
github.com/gobuffalo/fizz v1.5.0/go.mod h1:Uu3ch14M4S7LDU7LAP1GQ+KNCRmZYd05Gqasc96XLa0=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20180907193754-dc14d8acaf9f/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181002182613-4571df4b1daf/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181007231023-ae7ed6bfe683/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181018182602-fd24a256709f/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181019110701-3d6f0b585514/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181024204909-8f6be1a8c6c2/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181104133451-1f6e9779237a/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181108195648-8fe1b44cfe32/go.mod h1:rCiQgmAE4axgBNl3jZWzS5rETRYTGOsrixTRaCPzNdA=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181109221320-179d36177b5b/go.mod h1:0HvNbHdfh+WOvDSIASqJOSxTOWSxCCUF++k/Y53v9rI=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181114183036-47375f6d8328/go.mod h1:0HvNbHdfh+WOvDSIASqJOSxTOWSxCCUF++k/Y53v9rI=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20181210151238-24a2b68e0316/go.mod h1:en58vff74S9b99Eg42Dr+/9yPu437QjlNsO/hBYPuOk=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20190104192022-4af577e09bf2/go.mod h1:en58vff74S9b99Eg42Dr+/9yPu437QjlNsO/hBYPuOk=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20190117212819-a62e61d96794 h1:HZOs07hF3AmoaUj4HJQHV5RqfOuGnPZI7aFcireIrww=
|
||||||
|
github.com/gobuffalo/flect v0.0.0-20190117212819-a62e61d96794/go.mod h1:397QT6v05LkZkn07oJXXT6y9FCfwC8Pug0WA2/2mE9k=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20180924032338-7af3a40f2252/go.mod h1:tUTQOogrr7tAQnhajMSH6rv1BVev34H2sa1xNHMy94g=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181003150629-3786a0744c5d/go.mod h1:WAd8HmjMVrnkAZbmfgH5dLBUchsZfqzp/WS5sQz+uTM=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181005145118-318a41a134cc/go.mod h1:WAd8HmjMVrnkAZbmfgH5dLBUchsZfqzp/WS5sQz+uTM=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181007153042-b8de7d566757/go.mod h1:+oG5Ljrw04czAHbPXREwaFojJbpUvcIy4DiOnbEJFTA=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181012161047-33e5f43d83a6/go.mod h1:+oG5Ljrw04czAHbPXREwaFojJbpUvcIy4DiOnbEJFTA=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181017160347-90a774534246/go.mod h1:+oG5Ljrw04czAHbPXREwaFojJbpUvcIy4DiOnbEJFTA=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181019144442-df0a36fdd146/go.mod h1:IyRrGrQb/sbHu/0z9i5mbpZroIsdxjCYfj+zFiFiWZQ=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181024195656-51392254bf53/go.mod h1:o9GEH5gn5sCKLVB5rHFC4tq40rQ3VRUzmx6WwmaqISE=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181025145300-af3f81d526b8/go.mod h1:uZ1fFYvdcP8mu0B/Ynarf6dsGvp7QFIpk/QACUuFUVI=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181027191429-94d6cfb5c7fc/go.mod h1:x7SkrQQBx204Y+O9EwRXeszLJDTaWN0GnEasxgLrQTA=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181027195209-3887b7171c4f/go.mod h1:JbKx8HSWICu5zyqWOa0dVV1pbbXOHusrSzQUprW6g+w=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181030163439-ed103521b8ec/go.mod h1:3Xm9z7/2oRxlB7PSPLxvadZ60/0UIek1YWmcC7QSaVs=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181106193839-7dcb0924caf1/go.mod h1:x61yHxvbDCgQ/7cOAbJCacZQuHgB0KMSzoYcw5debjU=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181107223128-f18346459dbe/go.mod h1:utQD3aKKEsdb03oR+Vi/6ztQb1j7pO10N3OBoowRcSU=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181109163038-9539921b620f/go.mod h1:118bnhJR2oviiji++mZj0IH/IaFBCzwkWHaI4OQq5hQ=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181110202416-7b7d8756a9e2/go.mod h1:118bnhJR2oviiji++mZj0IH/IaFBCzwkWHaI4OQq5hQ=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181111200257-599b33630ab4/go.mod h1:w+iD/cdtIpPDFax6LlUFuCdXFD0DLRUXsfp3IeT/Doc=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181114215459-0a4decd77f5d/go.mod h1:kN2KZ8VgXF9VIIOj/GM0Eo7YK+un4Q3tTreKOf0q1ng=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181119162812-e8ff4adce8bb/go.mod h1:BA9htSe4bZwBDJLe8CUkoqkypq3hn3+CkoHqVOW718E=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181127225641-2d959acc795b/go.mod h1:l54xLXNkteX/PdZ+HlgPk1qtcrgeOr3XUBBPDbH+7CQ=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181128191930-77e34f71ba2a/go.mod h1:FW/D9p7cEEOqxYA71/hnrkOWm62JZ5ZNxcNIVJEaWBU=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181203165245-fda8bcce96b1/go.mod h1:wpNSANu9UErftfiaAlz1pDZclrYzLtO5lALifODyjuM=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181203201232-849d2c9534ea/go.mod h1:wpNSANu9UErftfiaAlz1pDZclrYzLtO5lALifODyjuM=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181206121324-d6fb8a0dbe36/go.mod h1:wpNSANu9UErftfiaAlz1pDZclrYzLtO5lALifODyjuM=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181207164119-84844398a37d/go.mod h1:y0ysCHGGQf2T3vOhCrGHheYN54Y/REj0ayd0Suf4C/8=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20181211165820-e26c8466f14d/go.mod h1:sHnK+ZSU4e2feXP3PA29ouij6PUEiN+RCwECjCTB3yM=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20190104222617-a71664fc38e7/go.mod h1:QPsQ1FnhEsiU8f+O0qKWXz2RE4TiDqLVChWkBuh1WaY=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20190112155932-f31a84fcacf5 h1:boQS3dA9PxhyufJEWIILrG6pJQbDnpwP2rFyvWacdoY=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20190112155932-f31a84fcacf5/go.mod h1:CIaHCrSIuJ4il6ka3Hub4DR4adDrGoXGEEt2FbBxoIo=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20190124191459-3310289fa4b4 h1:0hgER6ADOc40ws1xYtrSjYq7OQqMz/LsgaGooEz9RqY=
|
||||||
|
github.com/gobuffalo/genny v0.0.0-20190124191459-3310289fa4b4/go.mod h1:yIRqxhZV2sAzb+B3iPUMLauTRrYP8tJUlZ1zV9teKik=
|
||||||
|
github.com/gobuffalo/github_flavored_markdown v1.0.4/go.mod h1:uRowCdK+q8d/RF0Kt3/DSalaIXbb0De/dmTqMQdkQ4I=
|
||||||
|
github.com/gobuffalo/github_flavored_markdown v1.0.5/go.mod h1:U0643QShPF+OF2tJvYNiYDLDGDuQmJZXsf/bHOJPsMY=
|
||||||
|
github.com/gobuffalo/github_flavored_markdown v1.0.7/go.mod h1:w93Pd9Lz6LvyQXEG6DktTPHkOtCbr+arAD5mkwMzXLI=
|
||||||
|
github.com/gobuffalo/httptest v1.0.2/go.mod h1:7T1IbSrg60ankme0aDLVnEY0h056g9M1/ZvpVThtB7E=
|
||||||
|
github.com/gobuffalo/httptest v1.0.3/go.mod h1:7T1IbSrg60ankme0aDLVnEY0h056g9M1/ZvpVThtB7E=
|
||||||
|
github.com/gobuffalo/httptest v1.0.4/go.mod h1:7T1IbSrg60ankme0aDLVnEY0h056g9M1/ZvpVThtB7E=
|
||||||
|
github.com/gobuffalo/httptest v1.0.5/go.mod h1:7T1IbSrg60ankme0aDLVnEY0h056g9M1/ZvpVThtB7E=
|
||||||
|
github.com/gobuffalo/httptest v1.0.6/go.mod h1:7T1IbSrg60ankme0aDLVnEY0h056g9M1/ZvpVThtB7E=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20180924033006-eae28e638a42/go.mod h1:Ubo90Np8gpsSZqNScZZkVXXAo5DGhTb+WYFIjlnog8w=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181025145548-437d89de4f75/go.mod h1:x3lEpYxkRG/XtGCUNkio+6RZ/dlOvLzTI9M1auIwFcw=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181027200154-58051a75da95/go.mod h1:BzhaaxGd1tq1+OLKObzgdCV9kqVhbTulxOpYbvMQWS0=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181109171355-91a2a7aac9a7/go.mod h1:m+Ygox92pi9bdg+gVaycvqE8RVSjZp7mWw75+K5NPHk=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181116224424-1b7fd3f9cbb4/go.mod h1:icHYfF2FVDi6CpI8BK9Sy1ChkSijz/0GNN7Qzzdk6JE=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181128165715-cc7305f8abed/go.mod h1:oU9F9UCE+AzI/MueCKZamsezGOOHfSirltllOVeRTAE=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181128170751-82cc989582b9/go.mod h1:oU9F9UCE+AzI/MueCKZamsezGOOHfSirltllOVeRTAE=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181203160806-fe900bbede07/go.mod h1:ph6VDNvOzt1CdfaWC+9XwcBnlSTBz2j49PBwum6RFaU=
|
||||||
|
github.com/gobuffalo/licenser v0.0.0-20181211173111-f8a311c51159/go.mod h1:ve/Ue99DRuvnTaLq2zKa6F4KtHiYf7W046tDjuGYPfM=
|
||||||
|
github.com/gobuffalo/logger v0.0.0-20181022175615-46cfb361fc27/go.mod h1:8sQkgyhWipz1mIctHF4jTxmJh1Vxhp7mP8IqbljgJZo=
|
||||||
|
github.com/gobuffalo/logger v0.0.0-20181027144941-73d08d2bb969/go.mod h1:7uGg2duHKpWnN4+YmyKBdLXfhopkAdVM6H3nKbyFbz8=
|
||||||
|
github.com/gobuffalo/logger v0.0.0-20181027193913-9cf4dd0efe46/go.mod h1:7uGg2duHKpWnN4+YmyKBdLXfhopkAdVM6H3nKbyFbz8=
|
||||||
|
github.com/gobuffalo/logger v0.0.0-20181109185836-3feeab578c17/go.mod h1:oNErH0xLe+utO+OW8ptXMSA5DkiSEDW1u3zGIt8F9Ew=
|
||||||
|
github.com/gobuffalo/logger v0.0.0-20181117211126-8e9b89b7c264/go.mod h1:5etB91IE0uBlw9k756fVKZJdS+7M7ejVhmpXXiSFj0I=
|
||||||
|
github.com/gobuffalo/logger v0.0.0-20181127160119-5b956e21995c h1:Z/ppYX6EtPEysbW4VEGz2dO+4F4VTthWp2sWRUCANdU=
|
||||||
|
github.com/gobuffalo/logger v0.0.0-20181127160119-5b956e21995c/go.mod h1:+HxKANrR9VGw9yN3aOAppJKvhO05ctDi63w4mDnKv2U=
|
||||||
|
github.com/gobuffalo/makr v1.1.5/go.mod h1:Y+o0btAH1kYAMDJW/TX3+oAXEu0bmSLLoC9mIFxtzOw=
|
||||||
|
github.com/gobuffalo/mapi v1.0.0/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
|
||||||
|
github.com/gobuffalo/mapi v1.0.1 h1:JRuTiZzDEZhBHkFiHTxJkYRT6CbYuL0K/rn+1byJoEA=
|
||||||
|
github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20181018155829-df62557efcd3/go.mod h1:XTTOhwMNryif3x9LkTTBO/Llrveezd71u3quLd0u7CM=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20181018192820-8c6cef77dab3/go.mod h1:E94EPzx9NERGCY69UWlcj6Hipf2uK/vnfrF4QD0plVE=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20181025145500-3a985a084b0a/go.mod h1:YDAKBud2FP7NZdruCSlmTmDOZbVSa6bpK7LJ/A/nlKg=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20181109154556-f76929ccd5fa/go.mod h1:1rYI5QsanV6cLpT1BlTAkrFi9rtCZrGkvSK8PglwfS8=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20181114191255-b130ebedd2f7/go.mod h1:K6cRZ29ozr4Btvsqkjvg5nDFTLOgTqf03KA70Ks0ypE=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20181116202903-8850e47774f5/go.mod h1:K6cRZ29ozr4Btvsqkjvg5nDFTLOgTqf03KA70Ks0ypE=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20181127070345-0d7e59dd540b/go.mod h1:RLO7tMvE0IAKAM8wny1aN12pvEKn7EtkBLkUZR00Qf8=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20190120163247-50bbb1fa260d h1:cP3lJDiGboBok8q6axF0rqWjlg/MOCfgjuwuNp5TlhE=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20190120163247-50bbb1fa260d/go.mod h1:KKsH44nIK2gA8p0PJmRT9GvWJUdphkDUA8AJEvFWiqM=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20190121163014-ecaa953cbfb3/go.mod h1:KLfkGnS+Tucc+iTkUcAUBtxpwOJGfhw2pHRLddPxMQY=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20190126124307-c8fb6f4eb5a9 h1:fCkQorKjSY55bEwmkK58ZY9ECD2ZioRoG6KCriSL9Zw=
|
||||||
|
github.com/gobuffalo/meta v0.0.0-20190126124307-c8fb6f4eb5a9/go.mod h1:zoh6GLgkk9+iI/62dST4amAuVAczZrBXoAk/t64n7Ew=
|
||||||
|
github.com/gobuffalo/mw-basicauth v1.0.3/go.mod h1:dg7+ilMZOKnQFHDefUzUHufNyTswVUviCBgF244C1+0=
|
||||||
|
github.com/gobuffalo/mw-basicauth v1.0.6/go.mod h1:RFyeGeDLZlVgp/eBflqu2eavFqyv0j0fVVP87WPYFwY=
|
||||||
|
github.com/gobuffalo/mw-basicauth v1.0.7/go.mod h1:xJ9/OSiOWl+kZkjaSun62srODr3Cx8OB4AKr+G4FlS4=
|
||||||
|
github.com/gobuffalo/mw-contenttype v0.0.0-20180802152300-74f5a47f4d56/go.mod h1:7EvcmzBbeCvFtQm5GqF9ys6QnCxz2UM1x0moiWLq1No=
|
||||||
|
github.com/gobuffalo/mw-contenttype v0.0.0-20190129203934-2554e742333b/go.mod h1:7x87+mDrr9Peh7AqhOtESyJLanMd2zQNz2Hts+vtBoE=
|
||||||
|
github.com/gobuffalo/mw-csrf v0.0.0-20180802151833-446ff26e108b/go.mod h1:sbGtb8DmDZuDUQoxjr8hG1ZbLtZboD9xsn6p77ppcHo=
|
||||||
|
github.com/gobuffalo/mw-csrf v0.0.0-20190129204204-25460a055517/go.mod h1:o5u+nnN0Oa7LBeDYH9QP36qeMPnXV9qbVnbZ4D+Kb0Q=
|
||||||
|
github.com/gobuffalo/mw-forcessl v0.0.0-20180802152810-73921ae7a130/go.mod h1:JvNHRj7bYNAMUr/5XMkZaDcw3jZhUZpsmzhd//FFWmQ=
|
||||||
|
github.com/gobuffalo/mw-i18n v0.0.0-20180802152014-e3060b7e13d6/go.mod h1:91AQfukc52A6hdfIfkxzyr+kpVYDodgAeT5cjX1UIj4=
|
||||||
|
github.com/gobuffalo/mw-i18n v0.0.0-20181027200759-09e0c99be4d3/go.mod h1:1PpGPgqP8VsfUppgBA9FrTOXjI6X9gjqhh/8dmg48lg=
|
||||||
|
github.com/gobuffalo/mw-i18n v0.0.0-20190129204410-552713a3ebb4/go.mod h1:rBg2eHxsyxVjtYra6fGy4GSF5C8NysOvz+Znnzk42EM=
|
||||||
|
github.com/gobuffalo/mw-paramlogger v0.0.0-20181005191442-d6ee392ec72e/go.mod h1:6OJr6VwSzgJMqWMj7TYmRUqzNe2LXu/W1rRW4MAz/ME=
|
||||||
|
github.com/gobuffalo/mw-paramlogger v0.0.0-20190129202837-395da1998525/go.mod h1:gEo/ABCsKqvpp/KCxN2AIzDEe0OJUXbJ9293FYrXw+w=
|
||||||
|
github.com/gobuffalo/mw-tokenauth v0.0.0-20181001105134-8545f626c189/go.mod h1:UqBF00IfKvd39ni5+yI5MLMjAf4gX7cDKN/26zDOD6c=
|
||||||
|
github.com/gobuffalo/mw-tokenauth v0.0.0-20190129201951-95847f29c5c8/go.mod h1:n2oa93LHGD94hGI+PoJO+6cf60DNrXrAIv9L/Ke3GXc=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181027182251-01ad393492c8/go.mod h1:SmdBdhj6uhOsg1Ui4SFAyrhuc7U4VCildosO5IDJ3lc=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181027190505-aafc0d02c411/go.mod h1:SmdBdhj6uhOsg1Ui4SFAyrhuc7U4VCildosO5IDJ3lc=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181027194105-7ae579e6d213/go.mod h1:SmdBdhj6uhOsg1Ui4SFAyrhuc7U4VCildosO5IDJ3lc=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181028162033-6d52e0eabf41/go.mod h1:Yf2toFaISlyQrr5TfO3h6DB9pl9mZRmyvBGQb/aQ/pI=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181029140631-cf76bd87a5a6/go.mod h1:Yf2toFaISlyQrr5TfO3h6DB9pl9mZRmyvBGQb/aQ/pI=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181031195726-c82734870264/go.mod h1:Yf2toFaISlyQrr5TfO3h6DB9pl9mZRmyvBGQb/aQ/pI=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181103221656-16c4ed88b296/go.mod h1:Yf2toFaISlyQrr5TfO3h6DB9pl9mZRmyvBGQb/aQ/pI=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181104210303-d376b15f8e96/go.mod h1:Yf2toFaISlyQrr5TfO3h6DB9pl9mZRmyvBGQb/aQ/pI=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181111195323-b2e760a5f0ff/go.mod h1:Yf2toFaISlyQrr5TfO3h6DB9pl9mZRmyvBGQb/aQ/pI=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181114190715-f25c5d2471d7/go.mod h1:Yf2toFaISlyQrr5TfO3h6DB9pl9mZRmyvBGQb/aQ/pI=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181124090624-311c6248e5fb/go.mod h1:Foenia9ZvITEvG05ab6XpiD5EfBHPL8A6hush8SJ0o8=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181207120301-c49825f8f6f4/go.mod h1:LYc0TGKFBBFTRC9dg2pcRcMqGCTMD7T2BIMP7OBuQAA=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181212173646-eca3b8fd6687 h1:uZ+G4JprR0UEq0aHZs+6eP7TEZuFfrIkmQWejIBV/QQ=
|
||||||
|
github.com/gobuffalo/packd v0.0.0-20181212173646-eca3b8fd6687/go.mod h1:LYc0TGKFBBFTRC9dg2pcRcMqGCTMD7T2BIMP7OBuQAA=
|
||||||
|
github.com/gobuffalo/packr v1.13.7/go.mod h1:KkinLIn/n6+3tVXMwg6KkNvWwVsrRAz4ph+jgpk3Z24=
|
||||||
|
github.com/gobuffalo/packr v1.15.0/go.mod h1:t5gXzEhIviQwVlNx/+3SfS07GS+cZ2hn76WLzPp6MGI=
|
||||||
|
github.com/gobuffalo/packr v1.15.1/go.mod h1:IeqicJ7jm8182yrVmNbM6PR4g79SjN9tZLH8KduZZwE=
|
||||||
|
github.com/gobuffalo/packr v1.16.0/go.mod h1:Yx/lcR/7mDLXhuJSzsz2MauD/HUwSc+EK6oigMRGGsM=
|
||||||
|
github.com/gobuffalo/packr v1.19.0/go.mod h1:MstrNkfCQhd5o+Ct4IJ0skWlxN8emOq8DsoT1G98VIU=
|
||||||
|
github.com/gobuffalo/packr v1.20.0/go.mod h1:JDytk1t2gP+my1ig7iI4NcVaXr886+N0ecUga6884zw=
|
||||||
|
github.com/gobuffalo/packr v1.21.0/go.mod h1:H00jGfj1qFKxscFJSw8wcL4hpQtPe1PfU2wa6sg/SR0=
|
||||||
|
github.com/gobuffalo/packr v1.21.5/go.mod h1:zCvDxrZzFmq5Xd7Jw4vaGe/OYwzuXnma31D2EbTHMWk=
|
||||||
|
github.com/gobuffalo/packr v1.21.7/go.mod h1:73tmYjwi4Cvb1eNiAwpmrzZ0gxVA4KBqVSZ2FNeJodM=
|
||||||
|
github.com/gobuffalo/packr v1.21.9 h1:zBaEhCmJpYy/UdHGAGIC3vO5Uh7RW091le41+Ydcg4E=
|
||||||
|
github.com/gobuffalo/packr v1.21.9/go.mod h1:GC76q6nMzRtR+AEN/VV4w0z2/4q7SOaEmXh3Ooa8sOE=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.5/go.mod h1:e6gmOfhf3KmT4zl2X/NDRSfBXk2oV4TXZ+NNOM0xwt8=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.7/go.mod h1:BzhceHWfF3DMAkbPUONHYWs63uacCZxygFY1b4H9N2A=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.8/go.mod h1:y60QCdzwuMwO2R49fdQhsjCPv7tLQFR0ayzxxla9zes=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.9/go.mod h1:fQqADRfZpEsgkc7c/K7aMew3n4aF1Kji7+lIZeR98Fc=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.10/go.mod h1:4CWWn4I5T3v4c1OsJ55HbHlUEKNWMITG5iIkdr4Px4w=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.11/go.mod h1:JoieH/3h3U4UmatmV93QmqyPUdf4wVM9HELaHEu+3fk=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.12/go.mod h1:FV1zZTsVFi1DSCboO36Xgs4pzCZBjB/tDV9Cz/lSaR8=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.13/go.mod h1:2Mp7GhBFMdJlOK8vGfl7SYtfMP3+5roE39ejlfjw0rA=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.14/go.mod h1:06otbrNvDKO1eNQ3b8hst+1010UooI2MFg+B2Ze4MV8=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.15 h1:vSmYcMO6CtuNQvMSbEJeIJlaeZzz2zoxGLTy8HrDh80=
|
||||||
|
github.com/gobuffalo/packr/v2 v2.0.0-rc.15/go.mod h1:IMe7H2nJvcKXSF90y4X1rjYIRlNMJYCxEhssBXNZwWs=
|
||||||
|
github.com/gobuffalo/plush v3.7.16+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.20+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.21+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.22+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.23+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.30+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.31+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.32+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plush v3.7.33+incompatible/go.mod h1:rQ4zdtUUyZNqULlc6bqd5scsPfLKfT0+TGMChgduDvI=
|
||||||
|
github.com/gobuffalo/plushgen v0.0.0-20181128164830-d29dcb966cb2/go.mod h1:r9QwptTFnuvSaSRjpSp4S2/4e2D3tJhARYbvEBcKSb4=
|
||||||
|
github.com/gobuffalo/plushgen v0.0.0-20181203163832-9fc4964505c2/go.mod h1:opEdT33AA2HdrIwK1aibqnTJDVVKXC02Bar/GT1YRVs=
|
||||||
|
github.com/gobuffalo/plushgen v0.0.0-20181207152837-eedb135bd51b/go.mod h1:Lcw7HQbEVm09sAQrCLzIxuhFbB3nAgp4c55E+UlynR0=
|
||||||
|
github.com/gobuffalo/plushgen v0.0.0-20190104222512-177cd2b872b3/go.mod h1:tYxCozi8X62bpZyKXYHw1ncx2ZtT2nFvG42kuLwYjoc=
|
||||||
|
github.com/gobuffalo/pop v4.8.2+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.8.3+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.8.4+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.8.5+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.8.7+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.8.8+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.9.0+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.9.1+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.9.2+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.9.3+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.9.5+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/pop v4.9.6+incompatible/go.mod h1:DwBz3SD5SsHpTZiTubcsFWcVDpJWGsxjVjMPnkiThWg=
|
||||||
|
github.com/gobuffalo/release v1.0.35/go.mod h1:VtHFAKs61vO3wboCec5xr9JPTjYyWYcvaM3lclkc4x4=
|
||||||
|
github.com/gobuffalo/release v1.0.38/go.mod h1:VtHFAKs61vO3wboCec5xr9JPTjYyWYcvaM3lclkc4x4=
|
||||||
|
github.com/gobuffalo/release v1.0.42/go.mod h1:RPs7EtafH4oylgetOJpGP0yCZZUiO4vqHfTHJjSdpug=
|
||||||
|
github.com/gobuffalo/release v1.0.51/go.mod h1:RPs7EtafH4oylgetOJpGP0yCZZUiO4vqHfTHJjSdpug=
|
||||||
|
github.com/gobuffalo/release v1.0.52/go.mod h1:RPs7EtafH4oylgetOJpGP0yCZZUiO4vqHfTHJjSdpug=
|
||||||
|
github.com/gobuffalo/release v1.0.53/go.mod h1:FdF257nd8rqhNaqtDWFGhxdJ/Ig4J7VcS3KL7n/a+aA=
|
||||||
|
github.com/gobuffalo/release v1.0.54/go.mod h1:Pe5/RxRa/BE8whDpGfRqSI7D1a0evGK1T4JDm339tJc=
|
||||||
|
github.com/gobuffalo/release v1.0.61/go.mod h1:mfIO38ujUNVDlBziIYqXquYfBF+8FDHUjKZgYC1Hj24=
|
||||||
|
github.com/gobuffalo/release v1.0.63/go.mod h1:/7hQAikt0l8Iu/tAX7slC1qiOhD6Nb+3KMmn/htiUfc=
|
||||||
|
github.com/gobuffalo/release v1.0.72/go.mod h1:NP5NXgg/IX3M5XmHmWR99D687/3Dt9qZtTK/Lbwc1hU=
|
||||||
|
github.com/gobuffalo/release v1.0.74/go.mod h1:NP5NXgg/IX3M5XmHmWR99D687/3Dt9qZtTK/Lbwc1hU=
|
||||||
|
github.com/gobuffalo/release v1.1.1/go.mod h1:Sluak1Xd6kcp6snkluR1jeXAogdJZpFFRzTYRs/2uwg=
|
||||||
|
github.com/gobuffalo/release v1.1.3/go.mod h1:CuXc5/m+4zuq8idoDt1l4va0AXAn/OSs08uHOfMVr8E=
|
||||||
|
github.com/gobuffalo/release v1.1.6/go.mod h1:18naWa3kBsqO0cItXZNJuefCKOENpbbUIqRL1g+p6z0=
|
||||||
|
github.com/gobuffalo/shoulders v1.0.1/go.mod h1:V33CcVmaQ4gRUmHKwq1fiTXuf8Gp/qjQBUL5tHPmvbA=
|
||||||
|
github.com/gobuffalo/syncx v0.0.0-20181120191700-98333ab04150/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
|
||||||
|
github.com/gobuffalo/syncx v0.0.0-20181120194010-558ac7de985f h1:S5EeH1reN93KR0L6TQvkRpu9YggCYXrUqFh1iEgvdC0=
|
||||||
|
github.com/gobuffalo/syncx v0.0.0-20181120194010-558ac7de985f/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
|
||||||
|
github.com/gobuffalo/tags v2.0.11+incompatible/go.mod h1:9XmhOkyaB7UzvuY4UoZO4s67q8/xRMVJEaakauVQYeY=
|
||||||
|
github.com/gobuffalo/tags v2.0.14+incompatible/go.mod h1:9XmhOkyaB7UzvuY4UoZO4s67q8/xRMVJEaakauVQYeY=
|
||||||
|
github.com/gobuffalo/tags v2.0.15+incompatible/go.mod h1:9XmhOkyaB7UzvuY4UoZO4s67q8/xRMVJEaakauVQYeY=
|
||||||
|
github.com/gobuffalo/uuid v2.0.3+incompatible/go.mod h1:ErhIzkRhm0FtRuiE/PeORqcw4cVi1RtSpnwYrxuvkfE=
|
||||||
|
github.com/gobuffalo/uuid v2.0.4+incompatible/go.mod h1:ErhIzkRhm0FtRuiE/PeORqcw4cVi1RtSpnwYrxuvkfE=
|
||||||
|
github.com/gobuffalo/uuid v2.0.5+incompatible/go.mod h1:ErhIzkRhm0FtRuiE/PeORqcw4cVi1RtSpnwYrxuvkfE=
|
||||||
|
github.com/gobuffalo/validate v2.0.3+incompatible/go.mod h1:N+EtDe0J8252BgfzQUChBgfd6L93m9weay53EWFVsMM=
|
||||||
|
github.com/gobuffalo/x v0.0.0-20181003152136-452098b06085/go.mod h1:WevpGD+5YOreDJznWevcn8NTmQEW5STSBgIkpkjzqXc=
|
||||||
|
github.com/gobuffalo/x v0.0.0-20181007152206-913e47c59ca7/go.mod h1:9rDPXaB3kXdKWzMc4odGQQdG2e2DIEmANy5aSJ9yesY=
|
||||||
|
github.com/gobuffalo/x v0.0.0-20181025165825-f204f550da9d/go.mod h1:Qh2Pb/Ak1Ko2mzHlGPigrnxkhO4WTTCI1jJM58sbgtE=
|
||||||
|
github.com/gobuffalo/x v0.0.0-20181025192250-1ef645d63fe8/go.mod h1:AIlnMGlYXOCsoCntLPFLYtrJNS/pc2HD4IdSXH62TpU=
|
||||||
|
github.com/gobuffalo/x v0.0.0-20181109195216-5b3131238124/go.mod h1:GpdLUY6/Ztf/3FfxfwsLkDqAGZ0brhlh7LzIibHyZp0=
|
||||||
|
github.com/gobuffalo/x v0.0.0-20181110221217-14085ca3e1a9/go.mod h1:ig5vdn4+5IPtxgESlZWo1SSDyHKKef8EjVVKhY9kkIQ=
|
||||||
|
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||||
|
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||||
|
github.com/gocql/gocql v0.0.0-20181124151448-70385f88b28b/go.mod h1:4Fw1eo5iaEhDUs8XyuhSVCVy52Jq3L+/3GJgYkwc+/0=
|
||||||
|
github.com/gocraft/dbr v0.0.0-20181029195440-042fe86dc2da h1:iBCx9/LR++diJWHizvo5tuFH7jeJ2+X5SSA0Fb/i8Kk=
|
||||||
|
github.com/gocraft/dbr v0.0.0-20181029195440-042fe86dc2da/go.mod h1:K/9g3pPouf13kP5K7pdriQEJAy272R9yXuWuDIEWJTM=
|
||||||
|
github.com/gocraft/dbr v0.0.0-20190131145710-48a049970bd2 h1:zPA5FYTrmWSPMYWc3xJOTMGiqqm8lrdawrQqnamsw6w=
|
||||||
|
github.com/gocraft/dbr v0.0.0-20190131145710-48a049970bd2/go.mod h1:K/9g3pPouf13kP5K7pdriQEJAy272R9yXuWuDIEWJTM=
|
||||||
|
github.com/gofrs/uuid v3.1.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||||
|
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||||
|
github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||||
|
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||||
|
github.com/golang-migrate/migrate v3.5.4+incompatible h1:R7OzwvCJTCgwapPCiX6DyBiu2czIUMDCB118gFTKTUA=
|
||||||
|
github.com/golang-migrate/migrate v3.5.4+incompatible/go.mod h1:IsVUlFN5puWOmXrqjgGUfIRIbU7mr8oNBE2tyERd9Wk=
|
||||||
|
github.com/golang-migrate/migrate/v4 v4.2.2 h1:m9WF3B3yge1mKm5+/q6C3qPETMWqphrod3+osb+sP8A=
|
||||||
|
github.com/golang-migrate/migrate/v4 v4.2.2/go.mod h1:JRwdki93/aFawDXMUM4GcRu/FAIfyw+1Kuyd9vkbaeA=
|
||||||
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
|
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
|
||||||
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
|
github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
|
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
|
github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4=
|
||||||
|
github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk=
|
||||||
|
github.com/golangci/errcheck v0.0.0-20181003203344-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0=
|
||||||
|
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8=
|
||||||
|
github.com/golangci/go-tools v0.0.0-20180902103155-93eecd106a0b/go.mod h1:unzUULGw35sjyOYjUt0jMTXqHlZPpPc6e+xfO4cd6mM=
|
||||||
|
github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o=
|
||||||
|
github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU=
|
||||||
|
github.com/golangci/gofmt v0.0.0-20181105071733-0b8337e80d98/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU=
|
||||||
|
github.com/golangci/golangci-lint v1.12.5/go.mod h1:iMfuFWFYJ1CZxlMQfNWvPj3c22PuyUkw9RQ1UfhDFDk=
|
||||||
|
github.com/golangci/gosec v0.0.0-20180901114220-8afd9cbb6cfb/go.mod h1:ON/c2UR0VAAv6ZEAFKhjCLplESSmRFfZcDLASbI1GWo=
|
||||||
|
github.com/golangci/govet v0.0.0-20180818181408-44ddbe260190/go.mod h1:pPwb+AK755h3/r73avHz5bEN6sa51/2HEZlLaV53hCo=
|
||||||
|
github.com/golangci/ineffassign v0.0.0-20180808204949-2ee8f2867dde/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU=
|
||||||
|
github.com/golangci/interfacer v0.0.0-20180902080945-01958817a6ec/go.mod h1:yBorupihJ5OYDFE7/EZwrslyNyZaaidqqVptYTcNxnk=
|
||||||
|
github.com/golangci/lint v0.0.0-20170908181259-c2187e7932b5/go.mod h1:zs8jPuoOp76KrjiydDqO3CGeS4v9gq77HNNiYcxxTGw=
|
||||||
|
github.com/golangci/lint v0.0.0-20180902080404-c2187e7932b5/go.mod h1:zs8jPuoOp76KrjiydDqO3CGeS4v9gq77HNNiYcxxTGw=
|
||||||
|
github.com/golangci/lint-1 v0.0.0-20180610141402-4bf9709227d1/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y=
|
||||||
|
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o=
|
||||||
|
github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
|
||||||
|
github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI=
|
||||||
|
github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4=
|
||||||
|
github.com/golangci/tools v0.0.0-20180902102414-2cefd77fef9b/go.mod h1:zgj6NOYXOC1cexsdtDceI4/mj3aXK4JOVg9AV3C5LWI=
|
||||||
|
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
|
||||||
|
github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16/go.mod h1:KW2L33j82vo0S0U6RP6uUQSuat+0Q457Yf+1mXC98/M=
|
||||||
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
|
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
|
||||||
|
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||||
|
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||||
|
github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
|
||||||
|
github.com/gopherjs/gopherjs v0.0.0-20181004151105-1babbf986f6f/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
|
github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||||
|
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||||
|
github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||||
|
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||||
|
github.com/gorilla/mux v1.7.0/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||||
|
github.com/gorilla/pat v0.0.0-20180118222023-199c85a7f6d1/go.mod h1:YeAe0gNeiNT5hoiZRI4yiOky6jVdNvfO2N6Kav/HmxY=
|
||||||
|
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||||
|
github.com/gorilla/sessions v1.1.2/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
|
||||||
|
github.com/gorilla/sessions v1.1.3/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
|
||||||
|
github.com/gorilla/websocket v1.2.0 h1:VJtLvh6VQym50czpZzx07z/kw9EgAxI3x1ZB8taTMQQ=
|
||||||
|
github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||||
|
github.com/h2non/filetype v1.0.6 h1:g84/+gdkAT1hnYO+tHpCLoikm13Ju55OkN4KCb1uGEQ=
|
||||||
|
github.com/h2non/filetype v1.0.6/go.mod h1:isekKqOuhMj+s/7r3rIeTErIRy4Rub5uBWHfvMusLMU=
|
||||||
|
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
|
||||||
|
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
|
||||||
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
|
github.com/hashicorp/hcl v0.0.0-20180404174102-ef8a98b0bbce/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w=
|
||||||
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
|
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||||
|
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||||
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
|
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
|
||||||
|
github.com/jackc/pgx v3.2.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
||||||
|
github.com/jackc/pgx v3.3.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
||||||
|
github.com/jcmturner/gofork v0.0.0-20180107083740-2aebee971930/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
|
||||||
|
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||||
|
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||||
|
github.com/jmoiron/sqlx v0.0.0-20180614180643-0dae4fefe7c0/go.mod h1:IiEW3SEiiErVyFdH8NTuWjSifiEQKUoyK3LNqr2kCHU=
|
||||||
|
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
|
||||||
|
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
|
||||||
|
github.com/joho/godotenv v1.2.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||||
|
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||||
|
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||||
|
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||||
|
github.com/karrick/godirwalk v1.7.5/go.mod h1:2c9FRhkDxdIbgkOnCEvnSWs71Bhugbl46shStcFDJ34=
|
||||||
|
github.com/karrick/godirwalk v1.7.7/go.mod h1:2c9FRhkDxdIbgkOnCEvnSWs71Bhugbl46shStcFDJ34=
|
||||||
|
github.com/karrick/godirwalk v1.7.8 h1:VfG72pyIxgtC7+3X9CMHI0AOl4LwyRAg98WAgsvffi8=
|
||||||
|
github.com/karrick/godirwalk v1.7.8/go.mod h1:2c9FRhkDxdIbgkOnCEvnSWs71Bhugbl46shStcFDJ34=
|
||||||
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||||
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
|
github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
|
||||||
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
|
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/kshvakov/clickhouse v1.3.4/go.mod h1:DMzX7FxRymoNkVgizH0DWAL8Cur7wHLgx3MUnGwJqpE=
|
||||||
|
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw=
|
||||||
|
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o=
|
||||||
|
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=
|
||||||
|
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw=
|
||||||
|
github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
|
||||||
|
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
|
github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
|
||||||
|
github.com/magiconair/properties v1.7.6/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
|
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||||
|
github.com/markbates/deplist v1.0.4/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
||||||
|
github.com/markbates/deplist v1.0.5/go.mod h1:gRRbPbbuA8TmMiRvaOzUlRfzfjeCCBqX2A6arxN01MM=
|
||||||
|
github.com/markbates/going v1.0.2/go.mod h1:UWCk3zm0UKefHZ7l8BNqi26UyiEMniznk8naLdTcy6c=
|
||||||
|
github.com/markbates/going v1.0.3/go.mod h1:fQiT6v6yQar9UD6bd/D4Z5Afbk9J6BBVBtLiyY4gp2o=
|
||||||
|
github.com/markbates/grift v1.0.4/go.mod h1:wbmtW74veyx+cgfwFhlnnMWqhoz55rnHR47oMXzsyVs=
|
||||||
|
github.com/markbates/grift v1.0.5/go.mod h1:EHmVIjOQoj/OOBDzlZ8RW0ZkvOtQ4xRHjrPvmfoiFaU=
|
||||||
|
github.com/markbates/hmax v1.0.0/go.mod h1:cOkR9dktiESxIMu+65oc/r/bdY4bE8zZw3OLhLx0X2c=
|
||||||
|
github.com/markbates/inflect v1.0.0/go.mod h1:oTeZL2KHA7CUX6X+fovmK9OvIOFuqu0TwdQrZjLTh88=
|
||||||
|
github.com/markbates/inflect v1.0.1/go.mod h1:uv3UVNBe5qBIfCm8O8Q+DW+S1EopeyINj+Ikhc7rnCk=
|
||||||
|
github.com/markbates/inflect v1.0.3/go.mod h1:1fR9+pO2KHEO9ZRtto13gDwwZaAKstQzferVeWqbgNs=
|
||||||
|
github.com/markbates/inflect v1.0.4/go.mod h1:1fR9+pO2KHEO9ZRtto13gDwwZaAKstQzferVeWqbgNs=
|
||||||
|
github.com/markbates/oncer v0.0.0-20180924031910-e862a676800b/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
|
||||||
|
github.com/markbates/oncer v0.0.0-20180924034138-723ad0170a46/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
|
||||||
|
github.com/markbates/oncer v0.0.0-20181014194634-05fccaae8fc4/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
|
||||||
|
github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2 h1:JgVTCPf0uBVcUSWpyXmGpgOc62nK5HWUBKAGc3Qqa5k=
|
||||||
|
github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
|
||||||
|
github.com/markbates/refresh v1.4.10/go.mod h1:NDPHvotuZmTmesXxr95C9bjlw1/0frJwtME2dzcVKhc=
|
||||||
|
github.com/markbates/refresh v1.4.11/go.mod h1:awpJuyo4zgexB/JaHfmBX0sRdvOjo2dXwIayWIz9i3g=
|
||||||
|
github.com/markbates/refresh v1.5.0/go.mod h1:ZYMLkxV+x7wXQ2Xd7bXAPyF0EXiEWAMfiy/4URYb1+M=
|
||||||
|
github.com/markbates/safe v1.0.0/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
|
||||||
|
github.com/markbates/safe v1.0.1 h1:yjZkbvRM6IzKj9tlu/zMJLS0n/V351OZWRnF3QfaUxI=
|
||||||
|
github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
|
||||||
|
github.com/markbates/sigtx v1.0.0/go.mod h1:QF1Hv6Ic6Ca6W+T+DL0Y/ypborFKyvUY9HmuCD4VeTc=
|
||||||
|
github.com/markbates/willie v1.0.9/go.mod h1:fsrFVWl91+gXpx/6dv715j7i11fYPfZ9ZGfH0DQzY7w=
|
||||||
|
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||||
|
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||||
|
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||||
|
github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4=
|
||||||
|
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||||
|
github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o=
|
||||||
|
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||||
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
|
||||||
|
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
|
github.com/mitchellh/go-ps v0.0.0-20170309133038-4fdf99ab2936/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk=
|
||||||
|
github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mitchellh/mapstructure v0.0.0-20180220230111-00c29f56e238/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mongodb/mongo-go-driver v0.1.0/go.mod h1:NK/HWDIIZkaYsnYa0hmtP443T5ELr0KDecmIioVuuyU=
|
||||||
|
github.com/monoculum/formam v0.0.0-20180901015400-4e68be1d79ba/go.mod h1:RKgILGEJq24YyJ2ban8EO0RUVSJlF1pGsEvoLEACr/Q=
|
||||||
|
github.com/nbutton23/zxcvbn-go v0.0.0-20171102151520-eafdab6b0663/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
|
||||||
|
github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q=
|
||||||
|
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
|
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
|
github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
|
||||||
|
github.com/onsi/gomega v1.4.2/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||||
|
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
|
||||||
|
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
|
||||||
|
github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74=
|
||||||
|
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||||
|
github.com/openzipkin/zipkin-go v0.1.1/go.mod h1:NtoC/o8u3JlF1lSlyPNswIbeQH9bJTmOf0Erfk+hxe8=
|
||||||
|
github.com/pelletier/go-toml v1.1.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
|
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||||
|
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||||
|
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||||
|
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||||
|
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||||
|
github.com/prometheus/procfs v0.0.0-20180920065004-418d78d0b9a7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||||
|
github.com/rogpeppe/go-internal v1.0.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
|
github.com/rogpeppe/go-internal v1.1.0 h1:g0fH8RicVgNl+zVZDCDfbdWxAWoAEJyI7I3TZYXFiig=
|
||||||
|
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
|
github.com/rs/cors v1.6.0 h1:G9tHG9lebljV9mfp9SNPDL36nCDxmo3zTlAf1YgvzmI=
|
||||||
|
github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||||
|
github.com/rubenv/sql-migrate v0.0.0-20181213081019-5a8808c14925 h1:Kd1g/YuXjhiyHrGlppC2X3UTOEt9oHRU/yeHDKnyPZA=
|
||||||
|
github.com/rubenv/sql-migrate v0.0.0-20181213081019-5a8808c14925/go.mod h1:WS0rl9eEliYI8DPnr3TOwz4439pay+qNgzJoVya/DmY=
|
||||||
|
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||||
|
github.com/serenize/snaker v0.0.0-20171204205717-a683aaf2d516/go.mod h1:Yow6lPLSAXx2ifx470yD/nUe22Dv5vBvxK/UK9UUTVs=
|
||||||
|
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||||
|
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||||
|
github.com/shirou/gopsutil v0.0.0-20180427012116-c95755e4bcd7/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
|
||||||
|
github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc=
|
||||||
|
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||||
|
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
||||||
|
github.com/shurcooL/go v0.0.0-20190121191506-3fef8c783dec/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
|
||||||
|
github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ=
|
||||||
|
github.com/shurcooL/highlight_diff v0.0.0-20170515013008-09bb4053de1b/go.mod h1:ZpfEhSmds4ytuByIcDnOLkTHGUI6KNqRNPDLHDk+mUU=
|
||||||
|
github.com/shurcooL/highlight_diff v0.0.0-20181222201841-111da2e7d480/go.mod h1:ZpfEhSmds4ytuByIcDnOLkTHGUI6KNqRNPDLHDk+mUU=
|
||||||
|
github.com/shurcooL/highlight_go v0.0.0-20170515013102-78fb10f4a5f8/go.mod h1:UDKB5a1T23gOMUJrI+uSuH0VRDStOiUVSjBTRDVBVag=
|
||||||
|
github.com/shurcooL/highlight_go v0.0.0-20181215221002-9d8641ddf2e1/go.mod h1:UDKB5a1T23gOMUJrI+uSuH0VRDStOiUVSjBTRDVBVag=
|
||||||
|
github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||||
|
github.com/shurcooL/octicon v0.0.0-20180602230221-c42b0e3b24d9/go.mod h1:eWdoE5JD4R5UVWDucdOPg1g2fqQRq78IQa9zlOV1vpQ=
|
||||||
|
github.com/shurcooL/octicon v0.0.0-20181222203144-9ff1a4cf27f4/go.mod h1:eWdoE5JD4R5UVWDucdOPg1g2fqQRq78IQa9zlOV1vpQ=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v0.0.0-20170918181015-86672fcb3f95/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||||
|
github.com/sirupsen/logrus v1.0.5/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
||||||
|
github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
||||||
|
github.com/sirupsen/logrus v1.1.0/go.mod h1:zrgwTnHtNr00buQ1vSptGe8m1f/BbgsPukg8qsT7A+A=
|
||||||
|
github.com/sirupsen/logrus v1.1.1/go.mod h1:zrgwTnHtNr00buQ1vSptGe8m1f/BbgsPukg8qsT7A+A=
|
||||||
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
|
github.com/sirupsen/logrus v1.3.0 h1:hI/7Q+DtNZ2kINb6qt/lS+IyXnHQe9e90POfeewL/ME=
|
||||||
|
github.com/sirupsen/logrus v1.3.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||||
|
github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
|
||||||
|
github.com/sourcegraph/annotate v0.0.0-20160123013949-f4cad6c6324d/go.mod h1:UdhH50NIW0fCiwBSr0co2m7BnFLdv4fQTgdqdJTHFeE=
|
||||||
|
github.com/sourcegraph/syntaxhighlight v0.0.0-20170531221838-bd320f5d308e/go.mod h1:HuIsMU8RRBOtsCgI77wP899iHVBQpCmg4ErYMZB+2IA=
|
||||||
|
github.com/spf13/afero v1.1.0/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||||
|
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||||
|
github.com/spf13/afero v1.2.0/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
||||||
|
github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
||||||
|
github.com/spf13/cast v1.2.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
|
||||||
|
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
|
github.com/spf13/cobra v0.0.2/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||||
|
github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8=
|
||||||
|
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||||
|
github.com/spf13/jwalterweatherman v0.0.0-20180109140146-7c0cea34c8ec/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
|
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
|
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
|
github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
|
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
||||||
|
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
|
github.com/spf13/viper v1.0.2/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM=
|
||||||
|
github.com/spf13/viper v1.2.1/go.mod h1:P4AexN0a+C9tGAnUFNwDMYYZv3pjFuvmeiMyKRaNVlI=
|
||||||
|
github.com/spf13/viper v1.3.0/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||||
|
github.com/spf13/viper v1.3.1/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/tidwall/pretty v0.0.0-20180105212114-65a9db5fad51/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||||
|
github.com/ugorji/go v1.1.2/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ=
|
||||||
|
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||||
|
github.com/ugorji/go/codec v0.0.0-20190128213124-ee1426cffec0/go.mod h1:iT03XoTwV7xq/+UGwKO3UbC1nNNlopQiY61beSdrtOA=
|
||||||
|
github.com/unrolled/secure v0.0.0-20180918153822-f340ee86eb8b/go.mod h1:mnPT77IAdsi/kV7+Es7y+pXALeV3h7G6dQF6mNYjcLA=
|
||||||
|
github.com/unrolled/secure v0.0.0-20181005190816-ff9db2ff917f/go.mod h1:mnPT77IAdsi/kV7+Es7y+pXALeV3h7G6dQF6mNYjcLA=
|
||||||
|
github.com/unrolled/secure v0.0.0-20181022170031-4b6b7cf51606/go.mod h1:mnPT77IAdsi/kV7+Es7y+pXALeV3h7G6dQF6mNYjcLA=
|
||||||
|
github.com/unrolled/secure v0.0.0-20190103195806-76e6d4e9b90c/go.mod h1:mnPT77IAdsi/kV7+Es7y+pXALeV3h7G6dQF6mNYjcLA=
|
||||||
|
github.com/urfave/cli v1.20.0 h1:fDqGv3UG/4jbVl/QkFwEdddtEDjh/5Ov6X+0B/3bPaw=
|
||||||
|
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||||
|
github.com/vektah/dataloaden v0.2.0/go.mod h1:vxM6NuRlgiR0M6wbVTJeKp9vQIs81ZMfCYO+4yq/jbE=
|
||||||
|
github.com/vektah/gqlparser v1.1.0 h1:3668p2gUlO+PiS81x957Rpr3/FPRWG6cxgCXAvTS1hw=
|
||||||
|
github.com/vektah/gqlparser v1.1.0/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw=
|
||||||
|
github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
|
||||||
|
github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
|
||||||
|
github.com/xinsnake/go-http-digest-auth-client v0.4.0/go.mod h1:QK1t1v7ylyGb363vGWu+6Irh7gyFj+N7+UZzM0L6g8I=
|
||||||
|
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||||
|
go.opencensus.io v0.17.0/go.mod h1:mp1VrMQxhlqqDpKvH4UcQUa4YwlzNmymAjPrDdfxNpI=
|
||||||
|
golang.org/x/crypto v0.0.0-20180505025534-4ec37c66abab/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181001203147-e3636079e1a4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181015023909-0c41d7ab0a0e/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181024171144-74cb1d3d52f4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181025113841-85e1b3f9139a/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181025213731-e84da0312774/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181106171534-e4dc69e5b2fd/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181112202954-3d3f9f413869/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181127143415-eb0de9b17e85/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20190102171810-8d7daa0c54b3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20190103213133-ff983b9c42bc h1:F5tKCVGp+MUAHhKp5MZtGqAlGX3+oCsiL1Q629FL90M=
|
||||||
|
golang.org/x/crypto v0.0.0-20190103213133-ff983b9c42bc/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20190122013713-64072686203f/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/crypto v0.0.0-20190130090550-b01c7a725664 h1:YbZJ76lQ1BqNhVe7dKTSB67wDrc2VPRR75IyGyyPDX8=
|
||||||
|
golang.org/x/crypto v0.0.0-20190130090550-b01c7a725664/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
|
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
|
||||||
|
golang.org/x/image v0.0.0-20190118043309-183bebdce1b2 h1:FNSSV4jv1PrPsiM2iKGpqLPPgYACqh9Muav7Pollk1k=
|
||||||
|
golang.org/x/image v0.0.0-20190118043309-183bebdce1b2/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
|
||||||
|
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
|
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
|
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180404174746-b3c676e531a6/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180816102801-aaf60122140d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180921000356-2f5d2388922f/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180925072008-f04abc6bdfa7/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180926154720-4dfa2610cdf3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181017193950-04a2e542c03f/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181029044818-c44066c5c816/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181102091132-c10e9556a7bc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181106065722-10aee1819953/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181108082009-03003ca0c849/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181207154023-610586996380/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181213202711-891ebc4b82d6/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e h1:bRhVy7zSSasaqNksaRZiA5EEI+Ei4I1nO5Jh72wfHlg=
|
||||||
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190119204137-ed066c81e75e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw=
|
||||||
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20180816055513-1c9583448a9c/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180906133057-8cf3aee42992/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180921163948-d47a0f339242/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180925112736-b09afc3d579e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20180927150500-dad3d9fb7b6e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181005133103-4497e2df6f9e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181011152604-fa43e7bc11ba/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181019084534-8f1d3d21f81b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181022134430-8a28ead16f52/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181024145615-5cd93ef61a7c/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181025063200-d989b31c8746/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181026064943-731415f00dce/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181030150119-7e31e0c00fa0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181106135930-3a76605856fd/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181206074257-70b957f3b65e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181213150753-586ba8c9bb14/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20181213200352-4d1cda033e06/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190102155601-82a175fd1598/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190108104531-7fbe1cd0fcc2/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190116161447-11f53e031339 h1:g/Jesu8+QLnA0CPzF3E1pURg0Byr7i6jLoX5sqjcAh0=
|
||||||
|
golang.org/x/sys v0.0.0-20190116161447-11f53e031339/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190122071731-054c452bb702/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190130150945-aca44879d564 h1:o6ENHFwwr1TZ9CUPQcfo1HGvLP1OPsPOTB7xCIOPNmU=
|
||||||
|
golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180826000951-f6ba57429505/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180924175601-e93be7f42f9f/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181003024731-2f84ea8ef872/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181006002542-f60d9635b16a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181008205924-a2b3f7f249e9/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181013182035-5e66757b835f/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181017214349-06f26fdaaa28/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181019005945-6adeb8aab2de/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181024171208-a2dc47679d30/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181026183834-f60e5f99f081/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181030151751-bb28844c46df/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181102223251-96e9e165b75e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181105230042-78dc5bac0cac/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181107215632-34b416bd17b3/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181109152631-138c20b93253/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181109202920-92d8274bd7b8/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181111003725-6d71ab8aade0/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181114190951-94339b83286c/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181119130350-139d099f6620/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181120060634-fc4f04983f62/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181122213734-04b5d21e00f1/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181127195227-b4e97c0ed882/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181127232545-e782529d0ddd/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181201035826-d0ca3933b724/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181203210056-e5f3ab76ea4b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181205224935-3576414c54a4/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181206194817-bcd4e47d0288/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181207183836-8bc39b988060/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181212172921-837e80568c09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181213190329-bbccd8cae4a9/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20181220024903-92cdcd90bf52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190102213336-ca9055ed7d04/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190104182027-498d95493402/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190108222858-421f03a57a64/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190111214448-fc1d57b08d7b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190118193359-16909d206f00 h1:6OmoTtlNJlHuWNIjTEyUtMBHrryp8NRuf/XtnC7MmXM=
|
||||||
|
golang.org/x/tools v0.0.0-20190118193359-16909d206f00/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190122202912-9c309ee22fab/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190124004107-78ee07aa9465/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6 h1:iZgcI2DDp6zW5v9Z/5+f0NuqoxNdmzg4hivjk2WLXpY=
|
||||||
|
golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190130190128-9bdeaddf5f7f h1:phwpKT9f+doEU8H+Khk7QtSFIIzNwRqtcj7hzVhTP1Y=
|
||||||
|
golang.org/x/tools v0.0.0-20190130190128-9bdeaddf5f7f/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||||
|
google.golang.org/api v0.0.0-20180921000521-920bb1beccf7/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||||
|
google.golang.org/api v0.0.0-20181015145326-625cd1887957/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||||
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
|
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
|
||||||
|
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
|
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
|
google.golang.org/genproto v0.0.0-20180924164928-221a8d4f7494/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
|
google.golang.org/genproto v0.0.0-20190108161440-ae2f86662275/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg=
|
||||||
|
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||||
|
google.golang.org/grpc v1.15.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
|
||||||
|
google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
|
||||||
|
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
||||||
|
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
|
||||||
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
|
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
|
||||||
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw=
|
||||||
|
gopkg.in/gorp.v1 v1.7.2 h1:j3DWlAyGVv8whO7AcIWznQ2Yj7yJkn34B8s63GViAAw=
|
||||||
|
gopkg.in/gorp.v1 v1.7.2/go.mod h1:Wo3h+DBQZIxATwftsglhdD/62zRFPhGhTiu5jUJmCaw=
|
||||||
|
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
|
||||||
|
gopkg.in/ini.v1 v1.39.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
|
gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo=
|
||||||
|
gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q=
|
||||||
|
gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4=
|
||||||
|
gopkg.in/jcmturner/gokrb5.v6 v6.0.1/go.mod h1:NFjHNLrHQiruory+EmqDXCGv6CrjkeYeA+bR9mIfNFk=
|
||||||
|
gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8=
|
||||||
|
gopkg.in/mail.v2 v2.0.0-20180731213649-a0242b2233b4/go.mod h1:htwXN1Qh09vZJ1NVKxQqHPBaCBbzKhp5GzuJEA4VJWw=
|
||||||
|
gopkg.in/mail.v2 v2.3.1/go.mod h1:htwXN1Qh09vZJ1NVKxQqHPBaCBbzKhp5GzuJEA4VJWw=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
|
||||||
|
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
honnef.co/go/tools v0.0.0-20180920025451-e3ad64cb4ed3/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
sourcegraph.com/sourcegraph/appdash v0.0.0-20180110180208-2cc67fd64755/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU=
|
||||||
|
sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67/go.mod h1:L5q+DGLGOQFpo1snNEkLOJT2d1YTW66rWNzatr3He1k=
|
||||||
|
sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec/go.mod h1:R09mWeb9JcPbO+A3cYDc11xjz0wp6r9+KnqdqROAoRU=
|
||||||
|
sourcegraph.com/sqs/pbtypes v0.0.0-20160107090929-4d1b9dc7ffc3/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0=
|
||||||
|
upper.io/db.v3 v3.5.7+incompatible h1:3MJSnJQ+NMxBxuNwO+gOKFiugwv+f61LbyuZYSPzoi4=
|
||||||
|
upper.io/db.v3 v3.5.7+incompatible/go.mod h1:FgTdD24eBjJAbPKsQSiHUNgXjOR4Lub3u1UMHSIh82Y=
|
||||||
32
gqlgen.yml
Normal file
32
gqlgen.yml
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
# .gqlgen.yml example
|
||||||
|
#
|
||||||
|
# Refer to https://gqlgen.com/config/
|
||||||
|
# for detailed .gqlgen.yml documentation.
|
||||||
|
|
||||||
|
schema:
|
||||||
|
- schema/schema.graphql
|
||||||
|
exec:
|
||||||
|
filename: internal/models/generated_exec.go
|
||||||
|
model:
|
||||||
|
filename: internal/models/generated_models.go
|
||||||
|
resolver:
|
||||||
|
filename: internal/api/resolver.go
|
||||||
|
type: Resolver
|
||||||
|
|
||||||
|
struct_tag: gqlgen
|
||||||
|
|
||||||
|
models:
|
||||||
|
Gallery:
|
||||||
|
model: github.com/stashapp/stash/internal/models.Gallery
|
||||||
|
Performer:
|
||||||
|
model: github.com/stashapp/stash/internal/models.Performer
|
||||||
|
Scene:
|
||||||
|
model: github.com/stashapp/stash/internal/models.Scene
|
||||||
|
SceneMarker:
|
||||||
|
model: github.com/stashapp/stash/internal/models.SceneMarker
|
||||||
|
ScrapedItem:
|
||||||
|
model: github.com/stashapp/stash/internal/models.ScrapedItem
|
||||||
|
Studio:
|
||||||
|
model: github.com/stashapp/stash/internal/models.Studio
|
||||||
|
Tag:
|
||||||
|
model: github.com/stashapp/stash/internal/models.Tag
|
||||||
7
internal/api/api-packr.go
Normal file
7
internal/api/api-packr.go
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
// Code generated by github.com/gobuffalo/packr/v2. DO NOT EDIT.
|
||||||
|
|
||||||
|
// You can use the "packr clean" command to clean up this,
|
||||||
|
// and any other packr generated files.
|
||||||
|
package api
|
||||||
|
|
||||||
|
import _ "github.com/stashapp/stash/packrd"
|
||||||
156
internal/api/resolver.go
Normal file
156
internal/api/resolver.go
Normal file
|
|
@ -0,0 +1,156 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/scraper"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Resolver struct{}
|
||||||
|
|
||||||
|
func (r *Resolver) Gallery() models.GalleryResolver {
|
||||||
|
return &galleryResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) Mutation() models.MutationResolver {
|
||||||
|
return &mutationResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) Performer() models.PerformerResolver {
|
||||||
|
return &performerResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) Query() models.QueryResolver {
|
||||||
|
return &queryResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) Scene() models.SceneResolver {
|
||||||
|
return &sceneResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) SceneMarker() models.SceneMarkerResolver {
|
||||||
|
return &sceneMarkerResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) Studio() models.StudioResolver {
|
||||||
|
return &studioResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) Subscription() models.SubscriptionResolver {
|
||||||
|
return &subscriptionResolver{r}
|
||||||
|
}
|
||||||
|
func (r *Resolver) Tag() models.TagResolver {
|
||||||
|
return &tagResolver{r}
|
||||||
|
}
|
||||||
|
|
||||||
|
type mutationResolver struct{ *Resolver }
|
||||||
|
type queryResolver struct{ *Resolver }
|
||||||
|
type subscriptionResolver struct{ *Resolver }
|
||||||
|
|
||||||
|
type galleryResolver struct{ *Resolver }
|
||||||
|
type performerResolver struct{ *Resolver }
|
||||||
|
type sceneResolver struct{ *Resolver }
|
||||||
|
type sceneMarkerResolver struct{ *Resolver }
|
||||||
|
type studioResolver struct{ *Resolver }
|
||||||
|
type tagResolver struct{ *Resolver }
|
||||||
|
|
||||||
|
func (r *queryResolver) MarkerWall(ctx context.Context, q *string) ([]models.SceneMarker, error) {
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
return qb.Wall(q)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) SceneWall(ctx context.Context, q *string) ([]models.Scene, error) {
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
return qb.Wall(q)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) MarkerStrings(ctx context.Context, q *string, sort *string) ([]*models.MarkerStringsResultType, error) {
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
return qb.GetMarkerStrings(q, sort)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ValidGalleriesForScene(ctx context.Context, scene_id *string) ([]models.Gallery, error) {
|
||||||
|
if scene_id == nil {
|
||||||
|
panic("nil scene id") // TODO make scene_id mandatory
|
||||||
|
}
|
||||||
|
sceneID, _ := strconv.Atoi(*scene_id)
|
||||||
|
sqb := models.NewSceneQueryBuilder()
|
||||||
|
scene, err := sqb.Find(sceneID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
validGalleries, err := qb.ValidGalleriesForScenePath(scene.Path)
|
||||||
|
sceneGallery, _ := qb.FindBySceneID(sceneID, nil)
|
||||||
|
if sceneGallery != nil {
|
||||||
|
validGalleries = append(validGalleries, *sceneGallery)
|
||||||
|
}
|
||||||
|
return validGalleries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) Stats(ctx context.Context) (models.StatsResultType, error) {
|
||||||
|
//scenesCount, _ := runCountQuery(buildCountQuery(selectAll("scenes")), nil)
|
||||||
|
//galleryCount, _ := runCountQuery(buildCountQuery(selectAll("galleries")), nil)
|
||||||
|
//performersCount, _ := runCountQuery(buildCountQuery(selectAll("performers")), nil)
|
||||||
|
//studiosCount, _ := runCountQuery(buildCountQuery(selectAll("studios")), nil)
|
||||||
|
//tagsCount, _ := runCountQuery(buildCountQuery(selectAll("tags")), nil)
|
||||||
|
//return StatsResultType{
|
||||||
|
// SceneCount: scenesCount,
|
||||||
|
// GalleryCount: galleryCount,
|
||||||
|
// PerformerCount: performersCount,
|
||||||
|
// StudioCount: studiosCount,
|
||||||
|
// TagCount: tagsCount,
|
||||||
|
//}, nil
|
||||||
|
return models.StatsResultType{}, nil // TODO
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get scene marker tags which show up under the video.
|
||||||
|
func (r *queryResolver) SceneMarkerTags(ctx context.Context, scene_id string) ([]models.SceneMarkerTag, error) {
|
||||||
|
sceneID, _ := strconv.Atoi(scene_id)
|
||||||
|
sqb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
sceneMarkers, err := sqb.FindBySceneID(sceneID, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
tags := make(map[int]*models.SceneMarkerTag)
|
||||||
|
var keys []int
|
||||||
|
tqb := models.NewTagQueryBuilder()
|
||||||
|
for _, sceneMarker := range sceneMarkers {
|
||||||
|
if !sceneMarker.PrimaryTagID.Valid {
|
||||||
|
panic("missing primary tag id")
|
||||||
|
}
|
||||||
|
markerPrimaryTag, err := tqb.Find(int(sceneMarker.PrimaryTagID.Int64), nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_, hasKey := tags[markerPrimaryTag.ID]
|
||||||
|
var sceneMarkerTag *models.SceneMarkerTag
|
||||||
|
if !hasKey {
|
||||||
|
sceneMarkerTag = &models.SceneMarkerTag{ Tag: *markerPrimaryTag }
|
||||||
|
tags[markerPrimaryTag.ID] = sceneMarkerTag
|
||||||
|
keys = append(keys, markerPrimaryTag.ID)
|
||||||
|
} else {
|
||||||
|
sceneMarkerTag = tags[markerPrimaryTag.ID]
|
||||||
|
}
|
||||||
|
tags[markerPrimaryTag.ID].SceneMarkers = append(tags[markerPrimaryTag.ID].SceneMarkers, sceneMarker)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort so that primary tags that show up earlier in the video are first.
|
||||||
|
sort.Slice(keys, func(i, j int) bool {
|
||||||
|
a := tags[keys[i]]
|
||||||
|
b := tags[keys[j]]
|
||||||
|
return a.SceneMarkers[0].Seconds < b.SceneMarkers[0].Seconds
|
||||||
|
})
|
||||||
|
|
||||||
|
var result []models.SceneMarkerTag
|
||||||
|
for _, key := range keys {
|
||||||
|
result = append(result, *tags[key])
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ScrapeFreeones(ctx context.Context, performer_name string) (*models.ScrapedPerformer, error) {
|
||||||
|
return scraper.GetPerformer(performer_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) ScrapeFreeonesPerformerList(ctx context.Context, query string) ([]string, error) {
|
||||||
|
return scraper.GetPerformerNames(query)
|
||||||
|
}
|
||||||
20
internal/api/resolver_model_gallery.go
Normal file
20
internal/api/resolver_model_gallery.go
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *galleryResolver) ID(ctx context.Context, obj *models.Gallery) (string, error) {
|
||||||
|
return strconv.Itoa(obj.ID), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*string, error) {
|
||||||
|
return nil, nil // TODO remove this from schema
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]models.GalleryFilesType, error) {
|
||||||
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||||
|
return obj.GetFiles(baseURL), nil
|
||||||
|
}
|
||||||
141
internal/api/resolver_model_performer.go
Normal file
141
internal/api/resolver_model_performer.go
Normal file
|
|
@ -0,0 +1,141 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *performerResolver) ID(ctx context.Context, obj *models.Performer) (string, error) {
|
||||||
|
return strconv.Itoa(obj.ID), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Name(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Name.Valid {
|
||||||
|
return &obj.Name.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) URL(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Url.Valid {
|
||||||
|
return &obj.Url.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Twitter(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Twitter.Valid {
|
||||||
|
return &obj.Twitter.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Instagram(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Instagram.Valid {
|
||||||
|
return &obj.Instagram.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Birthdate.Valid {
|
||||||
|
return &obj.Birthdate.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Ethnicity(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Ethnicity.Valid {
|
||||||
|
return &obj.Ethnicity.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Country(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Country.Valid {
|
||||||
|
return &obj.Country.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) EyeColor(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.EyeColor.Valid {
|
||||||
|
return &obj.EyeColor.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Height(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Height.Valid {
|
||||||
|
return &obj.Height.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Measurements(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Measurements.Valid {
|
||||||
|
return &obj.Measurements.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) FakeTits(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.FakeTits.Valid {
|
||||||
|
return &obj.FakeTits.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.CareerLength.Valid {
|
||||||
|
return &obj.CareerLength.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Tattoos(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Tattoos.Valid {
|
||||||
|
return &obj.Tattoos.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Piercings(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Piercings.Valid {
|
||||||
|
return &obj.Piercings.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Aliases(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
if obj.Aliases.Valid {
|
||||||
|
return &obj.Aliases.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Favorite(ctx context.Context, obj *models.Performer) (bool, error) {
|
||||||
|
if obj.Favorite.Valid {
|
||||||
|
return obj.Favorite.Bool, nil
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) ImagePath(ctx context.Context, obj *models.Performer) (*string, error) {
|
||||||
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||||
|
imagePath := urlbuilders.NewPerformerURLBuilder(baseURL, obj.ID).GetPerformerImageUrl()
|
||||||
|
return &imagePath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) SceneCount(ctx context.Context, obj *models.Performer) (*int, error) {
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
res, err := qb.CountByPerformerID(obj.ID)
|
||||||
|
return &res, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *performerResolver) Scenes(ctx context.Context, obj *models.Performer) ([]models.Scene, error) {
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
return qb.FindByPerformerID(obj.ID)
|
||||||
|
}
|
||||||
116
internal/api/resolver_model_scene.go
Normal file
116
internal/api/resolver_model_scene.go
Normal file
|
|
@ -0,0 +1,116 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
|
"github.com/stashapp/stash/internal/manager"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *sceneResolver) ID(ctx context.Context, obj *models.Scene) (string, error) {
|
||||||
|
return strconv.Itoa(obj.ID), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||||
|
if obj.Title.Valid {
|
||||||
|
return &obj.Title.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Details(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||||
|
if obj.Details.Valid {
|
||||||
|
return &obj.Details.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) URL(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||||
|
if obj.Url.Valid {
|
||||||
|
return &obj.Url.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, error) {
|
||||||
|
if obj.Date.Valid {
|
||||||
|
result := utils.GetYMDFromDatabaseDate(obj.Date.String)
|
||||||
|
return &result, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) {
|
||||||
|
if obj.Rating.Valid {
|
||||||
|
rating := int(obj.Rating.Int64)
|
||||||
|
return &rating, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (models.SceneFileType, error) {
|
||||||
|
width := int(obj.Width.Int64)
|
||||||
|
height := int(obj.Height.Int64)
|
||||||
|
bitrate := int(obj.Bitrate.Int64)
|
||||||
|
return models.SceneFileType{
|
||||||
|
Size: &obj.Size.String,
|
||||||
|
Duration: &obj.Duration.Float64,
|
||||||
|
VideoCodec: &obj.VideoCodec.String,
|
||||||
|
AudioCodec: &obj.AudioCodec.String,
|
||||||
|
Width: &width,
|
||||||
|
Height: &height,
|
||||||
|
Framerate: &obj.Framerate.Float64,
|
||||||
|
Bitrate: &bitrate,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (models.ScenePathsType, error) {
|
||||||
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||||
|
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
|
||||||
|
screenshotPath := builder.GetScreenshotUrl()
|
||||||
|
previewPath := builder.GetStreamPreviewUrl()
|
||||||
|
streamPath := builder.GetStreamUrl()
|
||||||
|
webpPath := builder.GetStreamPreviewImageUrl()
|
||||||
|
vttPath := builder.GetSpriteVttUrl()
|
||||||
|
chaptersVttPath := builder.GetChaptersVttUrl()
|
||||||
|
return models.ScenePathsType{
|
||||||
|
Screenshot: &screenshotPath,
|
||||||
|
Preview: &previewPath,
|
||||||
|
Stream: &streamPath,
|
||||||
|
Webp: &webpPath,
|
||||||
|
Vtt: &vttPath,
|
||||||
|
ChaptersVtt: &chaptersVttPath,
|
||||||
|
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) IsStreamable(ctx context.Context, obj *models.Scene) (bool, error) {
|
||||||
|
return manager.IsStreamable(obj.Path, obj.Checksum)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) ([]models.SceneMarker, error) {
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
return qb.FindBySceneID(obj.ID, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Gallery(ctx context.Context, obj *models.Scene) (*models.Gallery, error) {
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
return qb.FindBySceneID(obj.ID, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (*models.Studio, error) {
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
return qb.FindBySceneID(obj.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) ([]models.Tag, error) {
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
return qb.FindBySceneID(obj.ID, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) ([]models.Performer, error) {
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
return qb.FindBySceneID(obj.ID, nil)
|
||||||
|
}
|
||||||
48
internal/api/resolver_model_scene_marker.go
Normal file
48
internal/api/resolver_model_scene_marker.go
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *sceneMarkerResolver) ID(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||||
|
return strconv.Itoa(obj.ID), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneMarkerResolver) Scene(ctx context.Context, obj *models.SceneMarker) (models.Scene, error) {
|
||||||
|
if !obj.SceneID.Valid {
|
||||||
|
panic("Invalid scene id")
|
||||||
|
}
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
sceneID := int(obj.SceneID.Int64)
|
||||||
|
scene, err := qb.Find(sceneID)
|
||||||
|
return *scene, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneMarkerResolver) PrimaryTag(ctx context.Context, obj *models.SceneMarker) (models.Tag, error) {
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
if !obj.PrimaryTagID.Valid {
|
||||||
|
panic("TODO no primary tag id")
|
||||||
|
}
|
||||||
|
tag, err := qb.Find(int(obj.PrimaryTagID.Int64), nil) // TODO make primary tag id not null in DB
|
||||||
|
return *tag, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneMarkerResolver) Tags(ctx context.Context, obj *models.SceneMarker) ([]models.Tag, error) {
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
return qb.FindBySceneMarkerID(obj.ID, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneMarkerResolver) Stream(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||||
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||||
|
sceneID := int(obj.SceneID.Int64)
|
||||||
|
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamUrl(obj.ID), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *sceneMarkerResolver) Preview(ctx context.Context, obj *models.SceneMarker) (string, error) {
|
||||||
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||||
|
sceneID := int(obj.SceneID.Int64)
|
||||||
|
return urlbuilders.NewSceneURLBuilder(baseURL, sceneID).GetSceneMarkerStreamPreviewUrl(obj.ID), nil
|
||||||
|
}
|
||||||
38
internal/api/resolver_model_studio.go
Normal file
38
internal/api/resolver_model_studio.go
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *studioResolver) ID(ctx context.Context, obj *models.Studio) (string, error) {
|
||||||
|
return strconv.Itoa(obj.ID), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *studioResolver) Name(ctx context.Context, obj *models.Studio) (string, error) {
|
||||||
|
if obj.Name.Valid {
|
||||||
|
return obj.Name.String, nil
|
||||||
|
}
|
||||||
|
panic("null name") // TODO make name required
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *studioResolver) URL(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||||
|
if obj.Url.Valid {
|
||||||
|
return &obj.Url.String, nil
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *studioResolver) ImagePath(ctx context.Context, obj *models.Studio) (*string, error) {
|
||||||
|
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
|
||||||
|
imagePath := urlbuilders.NewStudioURLBuilder(baseURL, obj.ID).GetStudioImageUrl()
|
||||||
|
return &imagePath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio) (*int, error) {
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
res, err := qb.CountByStudioID(obj.ID)
|
||||||
|
return &res, err
|
||||||
|
}
|
||||||
29
internal/api/resolver_model_tag.go
Normal file
29
internal/api/resolver_model_tag.go
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *tagResolver) ID(ctx context.Context, obj *models.Tag) (string, error) {
|
||||||
|
return strconv.Itoa(obj.ID), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag) (*int, error) {
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
if obj == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
count, err := qb.CountByTagID(obj.ID)
|
||||||
|
return &count, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *tagResolver) SceneMarkerCount(ctx context.Context, obj *models.Tag) (*int, error) {
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
if obj == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
count, err := qb.CountByTagID(obj.ID)
|
||||||
|
return &count, err
|
||||||
|
}
|
||||||
173
internal/api/resolver_mutation_performer.go
Normal file
173
internal/api/resolver_mutation_performer.go
Normal file
|
|
@ -0,0 +1,173 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.PerformerCreateInput) (*models.Performer, error) {
|
||||||
|
// Process the base 64 encoded image string
|
||||||
|
checksum, imageData, err := utils.ProcessBase64Image(input.Image)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate a new performer from the input
|
||||||
|
currentTime := time.Now()
|
||||||
|
newPerformer := models.Performer{
|
||||||
|
Image: imageData,
|
||||||
|
Checksum: checksum,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
if input.Name != nil {
|
||||||
|
newPerformer.Name = sql.NullString{ String: *input.Name, Valid: true }
|
||||||
|
}
|
||||||
|
if input.URL != nil {
|
||||||
|
newPerformer.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Birthdate != nil {
|
||||||
|
newPerformer.Birthdate = sql.NullString{ String: *input.Birthdate, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Ethnicity != nil {
|
||||||
|
newPerformer.Ethnicity = sql.NullString{ String: *input.Ethnicity, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Country != nil {
|
||||||
|
newPerformer.Country = sql.NullString{ String: *input.Country, Valid: true }
|
||||||
|
}
|
||||||
|
if input.EyeColor != nil {
|
||||||
|
newPerformer.EyeColor = sql.NullString{ String: *input.EyeColor, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Height != nil {
|
||||||
|
newPerformer.Height = sql.NullString{ String: *input.Height, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Measurements != nil {
|
||||||
|
newPerformer.Measurements = sql.NullString{ String: *input.Measurements, Valid: true }
|
||||||
|
}
|
||||||
|
if input.FakeTits != nil {
|
||||||
|
newPerformer.FakeTits = sql.NullString{ String: *input.FakeTits, Valid: true }
|
||||||
|
}
|
||||||
|
if input.CareerLength != nil {
|
||||||
|
newPerformer.CareerLength = sql.NullString{ String: *input.CareerLength, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Tattoos != nil {
|
||||||
|
newPerformer.Tattoos = sql.NullString{ String: *input.Tattoos, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Piercings != nil {
|
||||||
|
newPerformer.Piercings = sql.NullString{ String: *input.Piercings, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Aliases != nil {
|
||||||
|
newPerformer.Aliases = sql.NullString{ String: *input.Aliases, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Twitter != nil {
|
||||||
|
newPerformer.Twitter = sql.NullString{ String: *input.Twitter, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Instagram != nil {
|
||||||
|
newPerformer.Instagram = sql.NullString{ String: *input.Instagram, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Favorite != nil {
|
||||||
|
newPerformer.Favorite = sql.NullBool{ Bool: *input.Favorite, Valid: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the performer
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
performer, err := qb.Create(newPerformer, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return performer, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) {
|
||||||
|
// Populate performer from the input
|
||||||
|
performerID, _ := strconv.Atoi(input.ID)
|
||||||
|
updatedPerformer := models.Performer{
|
||||||
|
ID: performerID,
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||||
|
}
|
||||||
|
if input.Image != nil {
|
||||||
|
checksum, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
updatedPerformer.Image = imageData
|
||||||
|
updatedPerformer.Checksum = checksum
|
||||||
|
}
|
||||||
|
if input.Name != nil {
|
||||||
|
updatedPerformer.Name = sql.NullString{ String: *input.Name, Valid: true }
|
||||||
|
}
|
||||||
|
if input.URL != nil {
|
||||||
|
updatedPerformer.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Birthdate != nil {
|
||||||
|
updatedPerformer.Birthdate = sql.NullString{ String: *input.Birthdate, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Ethnicity != nil {
|
||||||
|
updatedPerformer.Ethnicity = sql.NullString{ String: *input.Ethnicity, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Country != nil {
|
||||||
|
updatedPerformer.Country = sql.NullString{ String: *input.Country, Valid: true }
|
||||||
|
}
|
||||||
|
if input.EyeColor != nil {
|
||||||
|
updatedPerformer.EyeColor = sql.NullString{ String: *input.EyeColor, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Height != nil {
|
||||||
|
updatedPerformer.Height = sql.NullString{ String: *input.Height, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Measurements != nil {
|
||||||
|
updatedPerformer.Measurements = sql.NullString{ String: *input.Measurements, Valid: true }
|
||||||
|
}
|
||||||
|
if input.FakeTits != nil {
|
||||||
|
updatedPerformer.FakeTits = sql.NullString{ String: *input.FakeTits, Valid: true }
|
||||||
|
}
|
||||||
|
if input.CareerLength != nil {
|
||||||
|
updatedPerformer.CareerLength = sql.NullString{ String: *input.CareerLength, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Tattoos != nil {
|
||||||
|
updatedPerformer.Tattoos = sql.NullString{ String: *input.Tattoos, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Piercings != nil {
|
||||||
|
updatedPerformer.Piercings = sql.NullString{ String: *input.Piercings, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Aliases != nil {
|
||||||
|
updatedPerformer.Aliases = sql.NullString{ String: *input.Aliases, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Twitter != nil {
|
||||||
|
updatedPerformer.Twitter = sql.NullString{ String: *input.Twitter, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Instagram != nil {
|
||||||
|
updatedPerformer.Instagram = sql.NullString{ String: *input.Instagram, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Favorite != nil {
|
||||||
|
updatedPerformer.Favorite = sql.NullBool{ Bool: *input.Favorite, Valid: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the performer
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
performer, err := qb.Update(updatedPerformer, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return performer, nil
|
||||||
|
}
|
||||||
208
internal/api/resolver_mutation_scene.go
Normal file
208
internal/api/resolver_mutation_scene.go
Normal file
|
|
@ -0,0 +1,208 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *mutationResolver) SceneUpdate(ctx context.Context, input models.SceneUpdateInput) (*models.Scene, error) {
|
||||||
|
// Populate scene from the input
|
||||||
|
sceneID, _ := strconv.Atoi(input.ID)
|
||||||
|
updatedTime := time.Now()
|
||||||
|
updatedScene := models.Scene{
|
||||||
|
ID: sceneID,
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: updatedTime },
|
||||||
|
}
|
||||||
|
if input.Title != nil {
|
||||||
|
updatedScene.Title = sql.NullString{ String: *input.Title, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Details != nil {
|
||||||
|
updatedScene.Details = sql.NullString{ String: *input.Details, Valid: true }
|
||||||
|
}
|
||||||
|
if input.URL != nil {
|
||||||
|
updatedScene.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Date != nil {
|
||||||
|
updatedScene.Date = sql.NullString{ String: *input.Date, Valid: true }
|
||||||
|
}
|
||||||
|
if input.Rating != nil {
|
||||||
|
updatedScene.Rating = sql.NullInt64{ Int64: int64(*input.Rating), Valid: true }
|
||||||
|
}
|
||||||
|
if input.StudioID != nil {
|
||||||
|
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
|
||||||
|
updatedScene.StudioID = sql.NullInt64{ Int64: studioID, Valid: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the scene marker
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
jqb := models.NewJoinsQueryBuilder()
|
||||||
|
scene, err := qb.Update(updatedScene, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.GalleryID != nil {
|
||||||
|
// Save the gallery
|
||||||
|
galleryID, _ := strconv.Atoi(*input.GalleryID)
|
||||||
|
updatedGallery := models.Gallery{
|
||||||
|
ID: galleryID,
|
||||||
|
SceneID: sql.NullInt64{ Int64: int64(sceneID), Valid: true },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedTime},
|
||||||
|
}
|
||||||
|
gqb := models.NewGalleryQueryBuilder()
|
||||||
|
_, err := gqb.Update(updatedGallery, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the performers
|
||||||
|
var performerJoins []models.PerformersScenes
|
||||||
|
for _, pid := range input.PerformerIds {
|
||||||
|
performerID, _ := strconv.Atoi(pid)
|
||||||
|
performerJoin := models.PerformersScenes{
|
||||||
|
PerformerID: performerID,
|
||||||
|
SceneID: sceneID,
|
||||||
|
}
|
||||||
|
performerJoins = append(performerJoins, performerJoin)
|
||||||
|
}
|
||||||
|
if err := jqb.UpdatePerformersScenes(sceneID, performerJoins, tx); err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the tags
|
||||||
|
var tagJoins []models.ScenesTags
|
||||||
|
for _, tid := range input.TagIds {
|
||||||
|
tagID, _ := strconv.Atoi(tid)
|
||||||
|
tagJoin := models.ScenesTags{
|
||||||
|
SceneID: sceneID,
|
||||||
|
TagID: tagID,
|
||||||
|
}
|
||||||
|
tagJoins = append(tagJoins, tagJoin)
|
||||||
|
}
|
||||||
|
if err := jqb.UpdateScenesTags(sceneID, tagJoins, tx); err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return scene, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input models.SceneMarkerCreateInput) (*models.SceneMarker, error) {
|
||||||
|
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
|
||||||
|
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||||
|
currentTime := time.Now()
|
||||||
|
newSceneMarker := models.SceneMarker{
|
||||||
|
Title: input.Title,
|
||||||
|
Seconds: input.Seconds,
|
||||||
|
PrimaryTagID: sql.NullInt64{ Int64: int64(primaryTagID), Valid: primaryTagID != 0 },
|
||||||
|
SceneID: sql.NullInt64{ Int64: int64(sceneID), Valid: sceneID != 0 },
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the scene marker
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
smqb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
jqb := models.NewJoinsQueryBuilder()
|
||||||
|
sceneMarker, err := smqb.Create(newSceneMarker, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the marker tags
|
||||||
|
var markerTagJoins []models.SceneMarkersTags
|
||||||
|
for _, tid := range input.TagIds {
|
||||||
|
tagID, _ := strconv.Atoi(tid)
|
||||||
|
markerTag := models.SceneMarkersTags{
|
||||||
|
SceneMarkerID: sceneMarker.ID,
|
||||||
|
TagID: tagID,
|
||||||
|
}
|
||||||
|
markerTagJoins = append(markerTagJoins, markerTag)
|
||||||
|
}
|
||||||
|
if err := jqb.CreateSceneMarkersTags(markerTagJoins, tx); err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return sceneMarker, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input models.SceneMarkerUpdateInput) (*models.SceneMarker, error) {
|
||||||
|
// Populate scene marker from the input
|
||||||
|
sceneMarkerID, _ := strconv.Atoi(input.ID)
|
||||||
|
sceneID, _ := strconv.Atoi(input.SceneID)
|
||||||
|
primaryTagID, _ := strconv.Atoi(input.PrimaryTagID)
|
||||||
|
updatedSceneMarker := models.SceneMarker{
|
||||||
|
ID: sceneMarkerID,
|
||||||
|
Title: input.Title,
|
||||||
|
Seconds: input.Seconds,
|
||||||
|
SceneID: sql.NullInt64{ Int64: int64(sceneID), Valid: sceneID != 0 },
|
||||||
|
PrimaryTagID: sql.NullInt64{ Int64: int64(primaryTagID), Valid: primaryTagID != 0 },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the scene marker
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
jqb := models.NewJoinsQueryBuilder()
|
||||||
|
sceneMarker, err := qb.Update(updatedSceneMarker, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the marker tags
|
||||||
|
var markerTagJoins []models.SceneMarkersTags
|
||||||
|
for _, tid := range input.TagIds {
|
||||||
|
tagID, _ := strconv.Atoi(tid)
|
||||||
|
markerTag := models.SceneMarkersTags{
|
||||||
|
SceneMarkerID: sceneMarkerID,
|
||||||
|
TagID: tagID,
|
||||||
|
}
|
||||||
|
markerTagJoins = append(markerTagJoins, markerTag)
|
||||||
|
}
|
||||||
|
if err := jqb.UpdateSceneMarkersTags(sceneMarkerID, markerTagJoins, tx); err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return sceneMarker, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) {
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
if err := qb.Destroy(id, tx); err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
87
internal/api/resolver_mutation_studio.go
Normal file
87
internal/api/resolver_mutation_studio.go
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *mutationResolver) StudioCreate(ctx context.Context, input models.StudioCreateInput) (*models.Studio, error) {
|
||||||
|
// Process the base 64 encoded image string
|
||||||
|
checksum, imageData, err := utils.ProcessBase64Image(input.Image)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate a new studio from the input
|
||||||
|
currentTime := time.Now()
|
||||||
|
newStudio := models.Studio{
|
||||||
|
Image: imageData,
|
||||||
|
Checksum: checksum,
|
||||||
|
Name: sql.NullString{ String: input.Name, Valid: true },
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
if input.URL != nil {
|
||||||
|
newStudio.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the studio
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
studio, err := qb.Create(newStudio, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return studio, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) {
|
||||||
|
// Populate studio from the input
|
||||||
|
studioID, _ := strconv.Atoi(input.ID)
|
||||||
|
updatedStudio := models.Studio{
|
||||||
|
ID: studioID,
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||||
|
}
|
||||||
|
if input.Image != nil {
|
||||||
|
checksum, imageData, err := utils.ProcessBase64Image(*input.Image)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
updatedStudio.Image = imageData
|
||||||
|
updatedStudio.Checksum = checksum
|
||||||
|
}
|
||||||
|
if input.Name != nil {
|
||||||
|
updatedStudio.Name = sql.NullString{ String: *input.Name, Valid: true }
|
||||||
|
}
|
||||||
|
if input.URL != nil {
|
||||||
|
updatedStudio.Url = sql.NullString{ String: *input.URL, Valid: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the studio
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
studio, err := qb.Update(updatedStudio, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return studio, nil
|
||||||
|
}
|
||||||
74
internal/api/resolver_mutation_tag.go
Normal file
74
internal/api/resolver_mutation_tag.go
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *mutationResolver) TagCreate(ctx context.Context, input models.TagCreateInput) (*models.Tag, error) {
|
||||||
|
// Populate a new tag from the input
|
||||||
|
currentTime := time.Now()
|
||||||
|
newTag := models.Tag{
|
||||||
|
Name: input.Name,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the studio
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
tag, err := qb.Create(newTag, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return tag, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) TagUpdate(ctx context.Context, input models.TagUpdateInput) (*models.Tag, error) {
|
||||||
|
// Populate tag from the input
|
||||||
|
tagID, _ := strconv.Atoi(input.ID)
|
||||||
|
updatedTag := models.Tag{
|
||||||
|
ID: tagID,
|
||||||
|
Name: input.Name,
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: time.Now() },
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the transaction and save the tag
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
tag, err := qb.Update(updatedTag, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return tag, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *mutationResolver) TagDestroy(ctx context.Context, input models.TagDestroyInput) (bool, error) {
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
if err := qb.Destroy(input.ID, tx); err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
22
internal/api/resolver_query_find_gallery.go
Normal file
22
internal/api/resolver_query_find_gallery.go
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) FindGallery(ctx context.Context, id string) (*models.Gallery, error) {
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
idInt, _ := strconv.Atoi(id)
|
||||||
|
return qb.Find(idInt)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) FindGalleries(ctx context.Context, filter *models.FindFilterType) (models.FindGalleriesResultType, error) {
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
galleries, total := qb.Query(filter)
|
||||||
|
return models.FindGalleriesResultType{
|
||||||
|
Count: total,
|
||||||
|
Galleries: galleries,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
27
internal/api/resolver_query_find_performer.go
Normal file
27
internal/api/resolver_query_find_performer.go
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) FindPerformer(ctx context.Context, id string) (*models.Performer, error) {
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
idInt, _ := strconv.Atoi(id)
|
||||||
|
return qb.Find(idInt)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) FindPerformers(ctx context.Context, performer_filter *models.PerformerFilterType, filter *models.FindFilterType) (models.FindPerformersResultType, error) {
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
performers, total := qb.Query(performer_filter, filter)
|
||||||
|
return models.FindPerformersResultType{
|
||||||
|
Count: total,
|
||||||
|
Performers: performers,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) AllPerformers(ctx context.Context) ([]models.Performer, error) {
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
return qb.All()
|
||||||
|
}
|
||||||
29
internal/api/resolver_query_find_scene.go
Normal file
29
internal/api/resolver_query_find_scene.go
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *string) (*models.Scene, error) {
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
idInt, _ := strconv.Atoi(*id)
|
||||||
|
var scene *models.Scene
|
||||||
|
var err error
|
||||||
|
if id != nil {
|
||||||
|
scene, err = qb.Find(idInt)
|
||||||
|
} else if checksum != nil {
|
||||||
|
scene, err = qb.FindByChecksum(*checksum)
|
||||||
|
}
|
||||||
|
return scene, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) FindScenes(ctx context.Context, scene_filter *models.SceneFilterType, scene_ids []int, filter *models.FindFilterType) (models.FindScenesResultType, error) {
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
scenes, total := qb.Query(scene_filter, filter)
|
||||||
|
return models.FindScenesResultType{
|
||||||
|
Count: total,
|
||||||
|
Scenes: scenes,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
15
internal/api/resolver_query_find_scene_marker.go
Normal file
15
internal/api/resolver_query_find_scene_marker.go
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) FindSceneMarkers(ctx context.Context, scene_marker_filter *models.SceneMarkerFilterType, filter *models.FindFilterType) (models.FindSceneMarkersResultType, error) {
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
sceneMarkers, total := qb.Query(scene_marker_filter, filter)
|
||||||
|
return models.FindSceneMarkersResultType{
|
||||||
|
Count: total,
|
||||||
|
SceneMarkers: sceneMarkers,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
27
internal/api/resolver_query_find_studio.go
Normal file
27
internal/api/resolver_query_find_studio.go
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) FindStudio(ctx context.Context, id string) (*models.Studio, error) {
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
idInt, _ := strconv.Atoi(id)
|
||||||
|
return qb.Find(idInt, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) FindStudios(ctx context.Context, filter *models.FindFilterType) (models.FindStudiosResultType, error) {
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
studios, total := qb.Query(filter)
|
||||||
|
return models.FindStudiosResultType{
|
||||||
|
Count: total,
|
||||||
|
Studios: studios,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) AllStudios(ctx context.Context) ([]models.Studio, error) {
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
return qb.All()
|
||||||
|
}
|
||||||
18
internal/api/resolver_query_find_tag.go
Normal file
18
internal/api/resolver_query_find_tag.go
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) FindTag(ctx context.Context, id string) (*models.Tag, error) {
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
idInt, _ := strconv.Atoi(id)
|
||||||
|
return qb.Find(idInt, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) AllTags(ctx context.Context) ([]models.Tag, error) {
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
return qb.All()
|
||||||
|
}
|
||||||
29
internal/api/resolver_query_metadata.go
Normal file
29
internal/api/resolver_query_metadata.go
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/stashapp/stash/internal/manager"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (r *queryResolver) MetadataScan(ctx context.Context) (string, error) {
|
||||||
|
manager.GetInstance().Scan()
|
||||||
|
return "todo", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) MetadataImport(ctx context.Context) (string, error) {
|
||||||
|
manager.GetInstance().Import()
|
||||||
|
return "todo", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) MetadataExport(ctx context.Context) (string, error) {
|
||||||
|
manager.GetInstance().Export()
|
||||||
|
return "todo", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) MetadataGenerate(ctx context.Context) (string, error) {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *queryResolver) MetadataClean(ctx context.Context) (string, error) {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
7
internal/api/resolver_subscription_metadata.go
Normal file
7
internal/api/resolver_subscription_metadata.go
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import "context"
|
||||||
|
|
||||||
|
func (r *subscriptionResolver) MetadataUpdate(ctx context.Context) (<-chan string, error) {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
54
internal/api/routes_gallery.go
Normal file
54
internal/api/routes_gallery.go
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/go-chi/chi"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type galleryRoutes struct{}
|
||||||
|
|
||||||
|
func (rs galleryRoutes) Routes() chi.Router {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
r.Route("/{galleryId}", func(r chi.Router) {
|
||||||
|
r.Use(GalleryCtx)
|
||||||
|
r.Get("/{fileIndex}", rs.File)
|
||||||
|
})
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs galleryRoutes) File(w http.ResponseWriter, r *http.Request) {
|
||||||
|
gallery := r.Context().Value("gallery").(*models.Gallery)
|
||||||
|
fileIndex, _ := strconv.Atoi(chi.URLParam(r, "fileIndex"))
|
||||||
|
thumb := r.URL.Query().Get("thumb")
|
||||||
|
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
|
||||||
|
if thumb == "true" {
|
||||||
|
_, _ = w.Write(gallery.GetThumbnail(fileIndex))
|
||||||
|
} else {
|
||||||
|
_, _ = w.Write(gallery.GetImage(fileIndex))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GalleryCtx(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
galleryID, err := strconv.Atoi(chi.URLParam(r, "galleryId"))
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
gallery, err := qb.Find(galleryID)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.WithValue(r.Context(), "gallery", gallery)
|
||||||
|
next.ServeHTTP(w, r.WithContext(ctx))
|
||||||
|
})
|
||||||
|
}
|
||||||
47
internal/api/routes_performer.go
Normal file
47
internal/api/routes_performer.go
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/go-chi/chi"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type performerRoutes struct{}
|
||||||
|
|
||||||
|
func (rs performerRoutes) Routes() chi.Router {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
r.Route("/{performerId}", func(r chi.Router) {
|
||||||
|
r.Use(PerformerCtx)
|
||||||
|
r.Get("/image", rs.Image)
|
||||||
|
})
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs performerRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||||
|
performer := r.Context().Value("performer").(*models.Performer)
|
||||||
|
_, _ = w.Write(performer.Image)
|
||||||
|
}
|
||||||
|
|
||||||
|
func PerformerCtx(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
performerID, err := strconv.Atoi(chi.URLParam(r, "performerId"))
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
performer, err := qb.Find(performerID)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.WithValue(r.Context(), "performer", performer)
|
||||||
|
next.ServeHTTP(w, r.WithContext(ctx))
|
||||||
|
})
|
||||||
|
}
|
||||||
151
internal/api/routes_scene.go
Normal file
151
internal/api/routes_scene.go
Normal file
|
|
@ -0,0 +1,151 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/go-chi/chi"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"github.com/stashapp/stash/internal/manager"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type sceneRoutes struct{}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) Routes() chi.Router {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
r.Route("/{sceneId}", func(r chi.Router) {
|
||||||
|
r.Use(SceneCtx)
|
||||||
|
r.Get("/stream", rs.Stream)
|
||||||
|
r.Get("/stream.mp4", rs.Stream)
|
||||||
|
r.Get("/screenshot", rs.Screenshot)
|
||||||
|
r.Get("/preview", rs.Preview)
|
||||||
|
r.Get("/webp", rs.Webp)
|
||||||
|
r.Get("/vtt/chapter", rs.ChapterVtt)
|
||||||
|
|
||||||
|
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
|
||||||
|
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
|
||||||
|
})
|
||||||
|
r.With(SceneCtx).Get("/{sceneId}_thumbs.vtt", rs.VttThumbs)
|
||||||
|
r.With(SceneCtx).Get("/{sceneId}_sprite.jpg", rs.VttSprite)
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// region Handlers
|
||||||
|
|
||||||
|
func (rs sceneRoutes) Stream(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
filepath := manager.GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.Checksum)
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) Screenshot(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
filepath := manager.GetInstance().Paths.Scene.GetScreenshotPath(scene.Checksum)
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) Preview(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewPath(scene.Checksum)
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) Webp(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
filepath := manager.GetInstance().Paths.Scene.GetStreamPreviewImagePath(scene.Checksum)
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
sceneMarkers, err := qb.FindBySceneID(scene.ID, nil)
|
||||||
|
if err != nil {
|
||||||
|
panic("invalid scene markers for chapter vtt")
|
||||||
|
}
|
||||||
|
|
||||||
|
vttLines := []string{"WEBVTT", ""}
|
||||||
|
for _, marker := range sceneMarkers {
|
||||||
|
time := utils.GetVTTTime(marker.Seconds)
|
||||||
|
vttLines = append(vttLines, time + " --> " + time)
|
||||||
|
vttLines = append(vttLines, marker.Title)
|
||||||
|
vttLines = append(vttLines, "")
|
||||||
|
}
|
||||||
|
vtt := strings.Join(vttLines, "\n")
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "text/vtt")
|
||||||
|
_, _ = w.Write([]byte(vtt))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
w.Header().Set("Content-Type", "text/vtt")
|
||||||
|
filepath := manager.GetInstance().Paths.Scene.GetSpriteVttFilePath(scene.Checksum)
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) VttSprite(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
w.Header().Set("Content-Type", "image/jpeg")
|
||||||
|
filepath := manager.GetInstance().Paths.Scene.GetSpriteImageFilePath(scene.Checksum)
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) SceneMarkerStream(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("Error when getting scene marker for stream")
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPath(scene.Checksum, int(sceneMarker.Seconds))
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs sceneRoutes) SceneMarkerPreview(w http.ResponseWriter, r *http.Request) {
|
||||||
|
scene := r.Context().Value("scene").(*models.Scene)
|
||||||
|
sceneMarkerID, _ := strconv.Atoi(chi.URLParam(r, "sceneMarkerId"))
|
||||||
|
qb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
sceneMarker, err := qb.Find(sceneMarkerID)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("Error when getting scene marker for stream")
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
filepath := manager.GetInstance().Paths.SceneMarkers.GetStreamPreviewImagePath(scene.Checksum, int(sceneMarker.Seconds))
|
||||||
|
http.ServeFile(w, r, filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// endregion
|
||||||
|
|
||||||
|
func SceneCtx(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
sceneIdentifierQueryParam := chi.URLParam(r, "sceneId")
|
||||||
|
sceneID, _ := strconv.Atoi(sceneIdentifierQueryParam)
|
||||||
|
|
||||||
|
var scene *models.Scene
|
||||||
|
var err error
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
if sceneID == 0 {
|
||||||
|
scene, err = qb.FindByChecksum(sceneIdentifierQueryParam)
|
||||||
|
} else {
|
||||||
|
scene, err = qb.Find(sceneID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.WithValue(r.Context(), "scene", scene)
|
||||||
|
next.ServeHTTP(w, r.WithContext(ctx))
|
||||||
|
})
|
||||||
|
}
|
||||||
47
internal/api/routes_studio.go
Normal file
47
internal/api/routes_studio.go
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/go-chi/chi"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type studioRoutes struct{}
|
||||||
|
|
||||||
|
func (rs studioRoutes) Routes() chi.Router {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
r.Route("/{studioId}", func(r chi.Router) {
|
||||||
|
r.Use(StudioCtx)
|
||||||
|
r.Get("/image", rs.Image)
|
||||||
|
})
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rs studioRoutes) Image(w http.ResponseWriter, r *http.Request) {
|
||||||
|
studio := r.Context().Value("studio").(*models.Studio)
|
||||||
|
_, _ = w.Write(studio.Image)
|
||||||
|
}
|
||||||
|
|
||||||
|
func StudioCtx(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
studioID, err := strconv.Atoi(chi.URLParam(r, "studioId"))
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
studio, err := qb.Find(studioID, nil)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, http.StatusText(404), 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.WithValue(r.Context(), "studio", studio)
|
||||||
|
next.ServeHTTP(w, r.WithContext(ctx))
|
||||||
|
})
|
||||||
|
}
|
||||||
139
internal/api/server.go
Normal file
139
internal/api/server.go
Normal file
|
|
@ -0,0 +1,139 @@
|
||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/tls"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/99designs/gqlgen/handler"
|
||||||
|
"github.com/go-chi/chi"
|
||||||
|
"github.com/go-chi/chi/middleware"
|
||||||
|
"github.com/gobuffalo/packr/v2"
|
||||||
|
"github.com/rs/cors"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"runtime/debug"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const httpPort = "9998"
|
||||||
|
const httpsPort = "9999"
|
||||||
|
|
||||||
|
var certsBox *packr.Box
|
||||||
|
var uiBox *packr.Box
|
||||||
|
|
||||||
|
func Start() {
|
||||||
|
//port := os.Getenv("PORT")
|
||||||
|
//if port == "" {
|
||||||
|
// port = defaultPort
|
||||||
|
//}
|
||||||
|
|
||||||
|
certsBox = packr.New("Cert Box", "../../certs")
|
||||||
|
uiBox = packr.New("UI Box", "../../ui/v1/dist/stash-frontend")
|
||||||
|
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
r.Use(middleware.Recoverer)
|
||||||
|
r.Use(middleware.Logger)
|
||||||
|
r.Use(middleware.DefaultCompress)
|
||||||
|
r.Use(middleware.StripSlashes)
|
||||||
|
r.Use(cors.AllowAll().Handler)
|
||||||
|
r.Use(BaseURLMiddleware)
|
||||||
|
|
||||||
|
recoverFunc := handler.RecoverFunc(func(ctx context.Context, err interface{}) error {
|
||||||
|
logger.Error(err)
|
||||||
|
debug.PrintStack()
|
||||||
|
|
||||||
|
message := fmt.Sprintf("Internal system error. Error <%v>", err)
|
||||||
|
return errors.New(message)
|
||||||
|
})
|
||||||
|
requestMiddleware := handler.RequestMiddleware(func(ctx context.Context, next func(ctx context.Context) []byte) []byte {
|
||||||
|
//api.GetRequestContext(ctx).Variables[]
|
||||||
|
return next(ctx)
|
||||||
|
})
|
||||||
|
gqlHandler := handler.GraphQL(models.NewExecutableSchema(models.Config{Resolvers: &Resolver{}}), recoverFunc, requestMiddleware)
|
||||||
|
|
||||||
|
// https://stash.server:9999/certs/server.crt
|
||||||
|
r.Handle("/certs/*", http.FileServer(certsBox))
|
||||||
|
|
||||||
|
r.Handle("/graphql", gqlHandler)
|
||||||
|
r.Handle("/playground", handler.Playground("GraphQL playground", "/graphql"))
|
||||||
|
|
||||||
|
r.Mount("/gallery", galleryRoutes{}.Routes())
|
||||||
|
r.Mount("/performer", performerRoutes{}.Routes())
|
||||||
|
r.Mount("/scene", sceneRoutes{}.Routes())
|
||||||
|
r.Mount("/studio", studioRoutes{}.Routes())
|
||||||
|
|
||||||
|
// Serve the angular app
|
||||||
|
r.HandleFunc("/*", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ext := path.Ext(r.URL.Path)
|
||||||
|
if ext == ".html" || ext == "" {
|
||||||
|
data := uiBox.Bytes("index.html")
|
||||||
|
_, _ = w.Write(data)
|
||||||
|
} else {
|
||||||
|
http.FileServer(uiBox).ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
httpsServer := &http.Server{
|
||||||
|
Addr: ":"+httpsPort,
|
||||||
|
Handler: r,
|
||||||
|
TLSConfig: makeTLSConfig(),
|
||||||
|
}
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: ":"+httpPort,
|
||||||
|
Handler: r,
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
logger.Infof("stash is running on HTTP at http://localhost:9998/")
|
||||||
|
logger.Fatal(server.ListenAndServe())
|
||||||
|
}()
|
||||||
|
|
||||||
|
logger.Infof("stash is running on HTTPS at https://localhost:9999/")
|
||||||
|
logger.Fatal(httpsServer.ListenAndServeTLS("", ""))
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeTLSConfig() *tls.Config {
|
||||||
|
cert, err := certsBox.Find("server.crt")
|
||||||
|
key, err := certsBox.Find("server.key")
|
||||||
|
|
||||||
|
certs := make([]tls.Certificate, 1)
|
||||||
|
certs[0], err = tls.X509KeyPair(cert, key)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
tlsConfig := &tls.Config{
|
||||||
|
Certificates: certs,
|
||||||
|
}
|
||||||
|
|
||||||
|
return tlsConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
type contextKey struct {
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
var (
|
||||||
|
BaseURLCtxKey = &contextKey{"BaseURL"}
|
||||||
|
)
|
||||||
|
func BaseURLMiddleware(next http.Handler) http.Handler {
|
||||||
|
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
var scheme string
|
||||||
|
if strings.Compare("https", r.URL.Scheme) == 0 || r.Proto == "HTTP/2.0" {
|
||||||
|
scheme = "https"
|
||||||
|
} else {
|
||||||
|
scheme = "http"
|
||||||
|
}
|
||||||
|
baseURL := scheme + "://" + r.Host
|
||||||
|
|
||||||
|
r = r.WithContext(context.WithValue(ctx, BaseURLCtxKey, baseURL))
|
||||||
|
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
return http.HandlerFunc(fn)
|
||||||
|
}
|
||||||
19
internal/api/urlbuilders/gallery.go
Normal file
19
internal/api/urlbuilders/gallery.go
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
package urlbuilders
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
type galleryURLBuilder struct {
|
||||||
|
BaseURL string
|
||||||
|
GalleryID string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGalleryURLBuilder(baseURL string, galleryID int) galleryURLBuilder {
|
||||||
|
return galleryURLBuilder{
|
||||||
|
BaseURL: baseURL,
|
||||||
|
GalleryID: strconv.Itoa(galleryID),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b galleryURLBuilder) GetGalleryImageUrl(fileIndex int) string {
|
||||||
|
return b.BaseURL + "/gallery/" + b.GalleryID + "/" + strconv.Itoa(fileIndex)
|
||||||
|
}
|
||||||
19
internal/api/urlbuilders/performer.go
Normal file
19
internal/api/urlbuilders/performer.go
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
package urlbuilders
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
type performerURLBuilder struct {
|
||||||
|
BaseURL string
|
||||||
|
PerformerID string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPerformerURLBuilder(baseURL string, performerID int) performerURLBuilder {
|
||||||
|
return performerURLBuilder{
|
||||||
|
BaseURL: baseURL,
|
||||||
|
PerformerID: strconv.Itoa(performerID),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b performerURLBuilder) GetPerformerImageUrl() string {
|
||||||
|
return b.BaseURL + "/performer/" + b.PerformerID + "/image"
|
||||||
|
}
|
||||||
47
internal/api/urlbuilders/scene.go
Normal file
47
internal/api/urlbuilders/scene.go
Normal file
|
|
@ -0,0 +1,47 @@
|
||||||
|
package urlbuilders
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
type sceneURLBuilder struct {
|
||||||
|
BaseURL string
|
||||||
|
SceneID string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSceneURLBuilder(baseURL string, sceneID int) sceneURLBuilder {
|
||||||
|
return sceneURLBuilder{
|
||||||
|
BaseURL: baseURL,
|
||||||
|
SceneID: strconv.Itoa(sceneID),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetStreamUrl() string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/stream.mp4"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetStreamPreviewUrl() string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/preview"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetStreamPreviewImageUrl() string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/webp"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetSpriteVttUrl() string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetScreenshotUrl() string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetChaptersVttUrl() string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/vtt/chapter"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetSceneMarkerStreamUrl(sceneMarkerId int) string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerId) + "/stream"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b sceneURLBuilder) GetSceneMarkerStreamPreviewUrl(sceneMarkerId int) string {
|
||||||
|
return b.BaseURL + "/scene/" + b.SceneID + "/scene_marker/" + strconv.Itoa(sceneMarkerId) + "/preview"
|
||||||
|
}
|
||||||
19
internal/api/urlbuilders/studio.go
Normal file
19
internal/api/urlbuilders/studio.go
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
package urlbuilders
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
type studioURLBuilder struct {
|
||||||
|
BaseURL string
|
||||||
|
StudioID string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewStudioURLBuilder(baseURL string, studioID int) studioURLBuilder {
|
||||||
|
return studioURLBuilder{
|
||||||
|
BaseURL: baseURL,
|
||||||
|
StudioID: strconv.Itoa(studioID),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b studioURLBuilder) GetStudioImageUrl() string {
|
||||||
|
return b.BaseURL + "/studio/" + b.StudioID + "/image"
|
||||||
|
}
|
||||||
7
internal/database/database-packr.go
Normal file
7
internal/database/database-packr.go
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
// Code generated by github.com/gobuffalo/packr/v2. DO NOT EDIT.
|
||||||
|
|
||||||
|
// You can use the "packr clean" command to clean up this,
|
||||||
|
// and any other packr generated files.
|
||||||
|
package database
|
||||||
|
|
||||||
|
import _ "github.com/stashapp/stash/packrd"
|
||||||
57
internal/database/database.go
Normal file
57
internal/database/database.go
Normal file
|
|
@ -0,0 +1,57 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/gobuffalo/packr/v2"
|
||||||
|
"github.com/golang-migrate/migrate/v4"
|
||||||
|
"github.com/golang-migrate/migrate/v4/source"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
_ "github.com/mattn/go-sqlite3"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
)
|
||||||
|
|
||||||
|
var DB *sqlx.DB
|
||||||
|
|
||||||
|
func Initialize(databasePath string) {
|
||||||
|
runMigrations(databasePath)
|
||||||
|
|
||||||
|
// https://github.com/mattn/go-sqlite3
|
||||||
|
conn, err := sqlx.Open("sqlite3", "file:"+databasePath+"?_fk=true")
|
||||||
|
conn.SetMaxOpenConns(10)
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatalf("db.Open(): %q\n", err)
|
||||||
|
}
|
||||||
|
DB = conn
|
||||||
|
}
|
||||||
|
|
||||||
|
func Reset(databasePath string) {
|
||||||
|
_, _ = DB.Exec("PRAGMA writable_schema = 1;")
|
||||||
|
_, _ = DB.Exec("delete from sqlite_master where type in ('table', 'index', 'trigger');")
|
||||||
|
_, _ = DB.Exec("PRAGMA writable_schema = 0;")
|
||||||
|
_, _ = DB.Exec("VACUUM;")
|
||||||
|
_, _ = DB.Exec("PRAGMA INTEGRITY_CHECK;")
|
||||||
|
runMigrations(databasePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate the database
|
||||||
|
func runMigrations(databasePath string) {
|
||||||
|
migrationsBox := packr.New("Migrations Box", "./migrations")
|
||||||
|
packrSource := &Packr2Source{
|
||||||
|
Box: migrationsBox,
|
||||||
|
Migrations: source.NewMigrations(),
|
||||||
|
}
|
||||||
|
s, _ := WithInstance(packrSource)
|
||||||
|
m, err := migrate.NewWithSourceInstance(
|
||||||
|
"packr2",
|
||||||
|
s,
|
||||||
|
fmt.Sprintf("sqlite3:%s", databasePath),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
panic(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
err = m.Steps(1)
|
||||||
|
if err != nil {
|
||||||
|
//panic(err.Error()) // TODO
|
||||||
|
}
|
||||||
|
}
|
||||||
1
internal/database/migrations/1_initial.down.sql
Normal file
1
internal/database/migrations/1_initial.down.sql
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
DROP TABLE IF EXISTS scenes;
|
||||||
139
internal/database/migrations/1_initial.up.sql
Normal file
139
internal/database/migrations/1_initial.up.sql
Normal file
|
|
@ -0,0 +1,139 @@
|
||||||
|
CREATE TABLE `tags` (
|
||||||
|
`id` integer not null primary key autoincrement,
|
||||||
|
`name` varchar(255),
|
||||||
|
`created_at` datetime not null,
|
||||||
|
`updated_at` datetime not null
|
||||||
|
);
|
||||||
|
CREATE TABLE `studios` (
|
||||||
|
`id` integer not null primary key autoincrement,
|
||||||
|
`image` blob not null,
|
||||||
|
`checksum` varchar(255) not null,
|
||||||
|
`name` varchar(255),
|
||||||
|
`url` varchar(255),
|
||||||
|
`created_at` datetime not null,
|
||||||
|
`updated_at` datetime not null
|
||||||
|
);
|
||||||
|
CREATE TABLE `scraped_items` (
|
||||||
|
`id` integer not null primary key autoincrement,
|
||||||
|
`title` varchar(255),
|
||||||
|
`description` text,
|
||||||
|
`url` varchar(255),
|
||||||
|
`date` date,
|
||||||
|
`rating` varchar(255),
|
||||||
|
`tags` varchar(510),
|
||||||
|
`models` varchar(510),
|
||||||
|
`episode` integer,
|
||||||
|
`gallery_filename` varchar(255),
|
||||||
|
`gallery_url` varchar(510),
|
||||||
|
`video_filename` varchar(255),
|
||||||
|
`video_url` varchar(255),
|
||||||
|
`studio_id` integer,
|
||||||
|
`created_at` datetime not null,
|
||||||
|
`updated_at` datetime not null,
|
||||||
|
foreign key(`studio_id`) references `studios`(`id`)
|
||||||
|
);
|
||||||
|
CREATE TABLE `scenes_tags` (
|
||||||
|
`scene_id` integer,
|
||||||
|
`tag_id` integer,
|
||||||
|
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE,
|
||||||
|
foreign key(`tag_id`) references `tags`(`id`)
|
||||||
|
);
|
||||||
|
CREATE TABLE `scenes` (
|
||||||
|
`id` integer not null primary key autoincrement,
|
||||||
|
`path` varchar(510) not null,
|
||||||
|
`checksum` varchar(255) not null,
|
||||||
|
`title` varchar(255),
|
||||||
|
`details` text,
|
||||||
|
`url` varchar(255),
|
||||||
|
`date` date,
|
||||||
|
`rating` tinyint,
|
||||||
|
`size` varchar(255),
|
||||||
|
`duration` float,
|
||||||
|
`video_codec` varchar(255),
|
||||||
|
`audio_codec` varchar(255),
|
||||||
|
`width` tinyint,
|
||||||
|
`height` tinyint,
|
||||||
|
`framerate` float,
|
||||||
|
`bitrate` integer,
|
||||||
|
`studio_id` integer,
|
||||||
|
`created_at` datetime not null,
|
||||||
|
`updated_at` datetime not null,
|
||||||
|
foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE
|
||||||
|
);
|
||||||
|
CREATE TABLE `scene_markers_tags` (
|
||||||
|
`scene_marker_id` integer,
|
||||||
|
`tag_id` integer,
|
||||||
|
foreign key(`scene_marker_id`) references `scene_markers`(`id`) on delete CASCADE,
|
||||||
|
foreign key(`tag_id`) references `tags`(`id`)
|
||||||
|
);
|
||||||
|
CREATE TABLE `scene_markers` (
|
||||||
|
`id` integer not null primary key autoincrement,
|
||||||
|
`title` varchar(255) not null,
|
||||||
|
`seconds` float not null,
|
||||||
|
`primary_tag_id` integer,
|
||||||
|
`scene_id` integer,
|
||||||
|
`created_at` datetime not null,
|
||||||
|
`updated_at` datetime not null,
|
||||||
|
foreign key(`primary_tag_id`) references `tags`(`id`),
|
||||||
|
foreign key(`scene_id`) references `scenes`(`id`)
|
||||||
|
);
|
||||||
|
CREATE TABLE `performers_scenes` (
|
||||||
|
`performer_id` integer,
|
||||||
|
`scene_id` integer,
|
||||||
|
foreign key(`performer_id`) references `performers`(`id`),
|
||||||
|
foreign key(`scene_id`) references `scenes`(`id`)
|
||||||
|
);
|
||||||
|
CREATE TABLE `performers` (
|
||||||
|
`id` integer not null primary key autoincrement,
|
||||||
|
`image` blob not null,
|
||||||
|
`checksum` varchar(255) not null,
|
||||||
|
`name` varchar(255),
|
||||||
|
`url` varchar(255),
|
||||||
|
`twitter` varchar(255),
|
||||||
|
`instagram` varchar(255),
|
||||||
|
`birthdate` date,
|
||||||
|
`ethnicity` varchar(255),
|
||||||
|
`country` varchar(255),
|
||||||
|
`eye_color` varchar(255),
|
||||||
|
`height` varchar(255),
|
||||||
|
`measurements` varchar(255),
|
||||||
|
`fake_tits` varchar(255),
|
||||||
|
`career_length` varchar(255),
|
||||||
|
`tattoos` varchar(255),
|
||||||
|
`piercings` varchar(255),
|
||||||
|
`aliases` varchar(255),
|
||||||
|
`favorite` boolean not null default '0',
|
||||||
|
`created_at` datetime not null,
|
||||||
|
`updated_at` datetime not null
|
||||||
|
);
|
||||||
|
CREATE TABLE `galleries` (
|
||||||
|
`id` integer not null primary key autoincrement,
|
||||||
|
`path` varchar(510) not null,
|
||||||
|
`checksum` varchar(255) not null,
|
||||||
|
`scene_id` integer,
|
||||||
|
`created_at` datetime not null,
|
||||||
|
`updated_at` datetime not null,
|
||||||
|
foreign key(`scene_id`) references `scenes`(`id`)
|
||||||
|
);
|
||||||
|
CREATE UNIQUE INDEX `studios_checksum_unique` on `studios` (`checksum`);
|
||||||
|
CREATE UNIQUE INDEX `scenes_path_unique` on `scenes` (`path`);
|
||||||
|
CREATE UNIQUE INDEX `scenes_checksum_unique` on `scenes` (`checksum`);
|
||||||
|
CREATE UNIQUE INDEX `performers_checksum_unique` on `performers` (`checksum`);
|
||||||
|
CREATE INDEX `index_tags_on_name` on `tags` (`name`);
|
||||||
|
CREATE INDEX `index_studios_on_name` on `studios` (`name`);
|
||||||
|
CREATE INDEX `index_studios_on_checksum` on `studios` (`checksum`);
|
||||||
|
CREATE INDEX `index_scraped_items_on_studio_id` on `scraped_items` (`studio_id`);
|
||||||
|
CREATE INDEX `index_scenes_tags_on_tag_id` on `scenes_tags` (`tag_id`);
|
||||||
|
CREATE INDEX `index_scenes_tags_on_scene_id` on `scenes_tags` (`scene_id`);
|
||||||
|
CREATE INDEX `index_scenes_on_studio_id` on `scenes` (`studio_id`);
|
||||||
|
CREATE INDEX `index_scene_markers_tags_on_tag_id` on `scene_markers_tags` (`tag_id`);
|
||||||
|
CREATE INDEX `index_scene_markers_tags_on_scene_marker_id` on `scene_markers_tags` (`scene_marker_id`);
|
||||||
|
CREATE INDEX `index_scene_markers_on_scene_id` on `scene_markers` (`scene_id`);
|
||||||
|
CREATE INDEX `index_scene_markers_on_primary_tag_id` on `scene_markers` (`primary_tag_id`);
|
||||||
|
CREATE INDEX `index_performers_scenes_on_scene_id` on `performers_scenes` (`scene_id`);
|
||||||
|
CREATE INDEX `index_performers_scenes_on_performer_id` on `performers_scenes` (`performer_id`);
|
||||||
|
CREATE INDEX `index_performers_on_name` on `performers` (`name`);
|
||||||
|
CREATE INDEX `index_performers_on_checksum` on `performers` (`checksum`);
|
||||||
|
CREATE INDEX `index_galleries_on_scene_id` on `galleries` (`scene_id`);
|
||||||
|
CREATE UNIQUE INDEX `galleries_path_unique` on `galleries` (`path`);
|
||||||
|
CREATE UNIQUE INDEX `galleries_checksum_unique` on `galleries` (`checksum`);
|
||||||
91
internal/database/packr_source.go
Normal file
91
internal/database/packr_source.go
Normal file
|
|
@ -0,0 +1,91 @@
|
||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"github.com/gobuffalo/packr/v2"
|
||||||
|
"github.com/golang-migrate/migrate/v4"
|
||||||
|
"github.com/golang-migrate/migrate/v4/source"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Packr2Source struct {
|
||||||
|
Box *packr.Box
|
||||||
|
Migrations *source.Migrations
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
source.Register("packr2", &Packr2Source{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithInstance(instance *Packr2Source) (source.Driver, error) {
|
||||||
|
for _, fi := range instance.Box.List() {
|
||||||
|
m, err := source.DefaultParse(fi)
|
||||||
|
if err != nil {
|
||||||
|
continue // ignore files that we can't parse
|
||||||
|
}
|
||||||
|
|
||||||
|
if !instance.Migrations.Append(m) {
|
||||||
|
return nil, fmt.Errorf("unable to parse file %v", fi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return instance, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Packr2Source) Open(url string) (source.Driver, error) {
|
||||||
|
return nil, fmt.Errorf("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Packr2Source) Close() error {
|
||||||
|
s.Migrations = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Packr2Source) First() (version uint, err error) {
|
||||||
|
if v, ok := s.Migrations.First(); !ok {
|
||||||
|
return 0, os.ErrNotExist
|
||||||
|
} else {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Packr2Source) Prev(version uint) (prevVersion uint, err error) {
|
||||||
|
if v, ok := s.Migrations.Prev(version); !ok {
|
||||||
|
return 0, os.ErrNotExist
|
||||||
|
} else {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Packr2Source) Next(version uint) (nextVersion uint, err error) {
|
||||||
|
if v, ok := s.Migrations.Next(version); !ok {
|
||||||
|
return 0, os.ErrNotExist
|
||||||
|
} else {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Packr2Source) ReadUp(version uint) (r io.ReadCloser, identifier string, err error) {
|
||||||
|
if migration, ok := s.Migrations.Up(version); !ok {
|
||||||
|
return nil, "", os.ErrNotExist
|
||||||
|
} else {
|
||||||
|
b := s.Box.Bytes(migration.Raw)
|
||||||
|
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||||
|
migration.Identifier,
|
||||||
|
nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Packr2Source) ReadDown(version uint) (r io.ReadCloser, identifier string, err error) {
|
||||||
|
if migration, ok := s.Migrations.Down(version); !ok {
|
||||||
|
return nil, "", migrate.ErrNilVersion
|
||||||
|
} else {
|
||||||
|
b := s.Box.Bytes(migration.Raw)
|
||||||
|
return ioutil.NopCloser(bytes.NewBuffer(b)),
|
||||||
|
migration.Identifier,
|
||||||
|
nil
|
||||||
|
}
|
||||||
|
}
|
||||||
120
internal/ffmpeg/encoder.go
Normal file
120
internal/ffmpeg/encoder.go
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
package ffmpeg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"io/ioutil"
|
||||||
|
"os/exec"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
var progressRegex = regexp.MustCompile(`time=(\d+):(\d+):(\d+.\d+)`)
|
||||||
|
|
||||||
|
type encoder struct {
|
||||||
|
Path string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewEncoder(ffmpegPath string) encoder {
|
||||||
|
return encoder{
|
||||||
|
Path: ffmpegPath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScreenshotOptions struct {
|
||||||
|
OutputPath string
|
||||||
|
Quality int
|
||||||
|
Time float64
|
||||||
|
Width int
|
||||||
|
Verbosity string
|
||||||
|
}
|
||||||
|
|
||||||
|
type TranscodeOptions struct {
|
||||||
|
OutputPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) Screenshot(probeResult FFProbeResult, options ScreenshotOptions) {
|
||||||
|
if options.Verbosity == "" {
|
||||||
|
options.Verbosity = "quiet"
|
||||||
|
}
|
||||||
|
if options.Quality == 0 {
|
||||||
|
options.Quality = 1
|
||||||
|
}
|
||||||
|
args := []string{
|
||||||
|
"-v", options.Verbosity,
|
||||||
|
"-ss", fmt.Sprintf("%v", options.Time),
|
||||||
|
"-y",
|
||||||
|
"-i", probeResult.Path, // TODO: Wrap in quotes?
|
||||||
|
"-vframes", "1",
|
||||||
|
"-q:v", fmt.Sprintf("%v", options.Quality),
|
||||||
|
"-vf", fmt.Sprintf("scale=%v:-1", options.Width),
|
||||||
|
"-f", "image2",
|
||||||
|
options.OutputPath,
|
||||||
|
}
|
||||||
|
_, _ = e.run(probeResult, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) Transcode(probeResult FFProbeResult, options TranscodeOptions) {
|
||||||
|
args := []string{
|
||||||
|
"-i", probeResult.Path,
|
||||||
|
"-c:v", "libx264",
|
||||||
|
"-profile:v", "high",
|
||||||
|
"-level", "4.2",
|
||||||
|
"-preset", "superfast",
|
||||||
|
"-crf", "23",
|
||||||
|
"-vf", "scale=iw:-2",
|
||||||
|
"-c:a", "aac",
|
||||||
|
options.OutputPath,
|
||||||
|
}
|
||||||
|
_, _ = e.run(probeResult, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *encoder) run(probeResult FFProbeResult, args []string) (string, error) {
|
||||||
|
cmd := exec.Command(e.Path, args...)
|
||||||
|
|
||||||
|
stderr, err := cmd.StderrPipe()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("FFMPEG stderr not available: " + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
stdout, err := cmd.StdoutPipe()
|
||||||
|
if nil != err {
|
||||||
|
logger.Error("FFMPEG stdout not available: " + err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = cmd.Start(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := make([]byte, 80)
|
||||||
|
for {
|
||||||
|
n, err := stderr.Read(buf)
|
||||||
|
if n > 0 {
|
||||||
|
data := string(buf[0:n])
|
||||||
|
regexResult := progressRegex.FindStringSubmatch(data)
|
||||||
|
if len(regexResult) == 4 && probeResult.Duration > 0 {
|
||||||
|
h, _ := strconv.ParseFloat(regexResult[1], 64)
|
||||||
|
m, _ := strconv.ParseFloat(regexResult[2], 64)
|
||||||
|
s, _ := strconv.ParseFloat(regexResult[3], 64)
|
||||||
|
hours := h * 3600
|
||||||
|
mins := m * 60
|
||||||
|
secs := s
|
||||||
|
time := hours + mins + secs
|
||||||
|
progress := time / probeResult.Duration
|
||||||
|
logger.Infof("Progress %.2f", progress)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stdoutData, _ := ioutil.ReadAll(stdout)
|
||||||
|
stdoutString := string(stdoutData)
|
||||||
|
|
||||||
|
if err := cmd.Wait(); err != nil {
|
||||||
|
return stdoutString, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdoutString, nil
|
||||||
|
}
|
||||||
121
internal/ffmpeg/ffprobe.go
Normal file
121
internal/ffmpeg/ffprobe.go
Normal file
|
|
@ -0,0 +1,121 @@
|
||||||
|
package ffmpeg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"runtime"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ffprobeExecutable struct {
|
||||||
|
Path string
|
||||||
|
}
|
||||||
|
|
||||||
|
type FFProbeResult struct {
|
||||||
|
JSON ffprobeJSON
|
||||||
|
|
||||||
|
Path string
|
||||||
|
Container string
|
||||||
|
Duration float64
|
||||||
|
StartTime float64
|
||||||
|
Bitrate int64
|
||||||
|
Size int64
|
||||||
|
CreationTime time.Time
|
||||||
|
|
||||||
|
VideoCodec string
|
||||||
|
VideoBitrate int64
|
||||||
|
Width int
|
||||||
|
Height int
|
||||||
|
FrameRate float64
|
||||||
|
Rotation int64
|
||||||
|
|
||||||
|
AudioCodec string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFFProbe(ffprobePath string) ffprobeExecutable {
|
||||||
|
return ffprobeExecutable{
|
||||||
|
Path: ffprobePath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute exec command and bind result to struct.
|
||||||
|
func (ffp *ffprobeExecutable) ProbeVideo(filePath string) (*FFProbeResult, error) {
|
||||||
|
args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", filePath}
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
args = append(args, "-count_frames")
|
||||||
|
}
|
||||||
|
out, err := exec.Command(ffp.Path, args...).Output()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("FFProbe encountered an error with <%s>.\nError JSON:\n%s\nError: %s", filePath, string(out), err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
probeJSON := &ffprobeJSON{}
|
||||||
|
if err := json.Unmarshal(out, probeJSON); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
result := ffp.newProbeResult(filePath, *probeJSON)
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ffp *ffprobeExecutable) newProbeResult(filePath string, probeJson ffprobeJSON) *FFProbeResult {
|
||||||
|
videoStreamIndex := ffp.getStreamIndex("video", probeJson)
|
||||||
|
audioStreamIndex := ffp.getStreamIndex("audio", probeJson)
|
||||||
|
|
||||||
|
result := &FFProbeResult{}
|
||||||
|
result.JSON = probeJson
|
||||||
|
result.Path = filePath
|
||||||
|
result.Container = probeJson.Format.FormatName
|
||||||
|
duration, _ := strconv.ParseFloat(probeJson.Format.Duration, 64)
|
||||||
|
result.Duration = math.Round(duration*100)/100
|
||||||
|
result.StartTime, _ = strconv.ParseFloat(probeJson.Format.StartTime, 64)
|
||||||
|
result.Bitrate, _ = strconv.ParseInt(probeJson.Format.BitRate, 10, 64)
|
||||||
|
fileStat, _ := os.Stat(filePath)
|
||||||
|
result.Size = fileStat.Size()
|
||||||
|
result.CreationTime = probeJson.Format.Tags.CreationTime
|
||||||
|
|
||||||
|
if videoStreamIndex != -1 {
|
||||||
|
videoStream := probeJson.Streams[videoStreamIndex]
|
||||||
|
result.VideoCodec = videoStream.CodecName
|
||||||
|
result.VideoBitrate, _ = strconv.ParseInt(videoStream.BitRate, 10, 64)
|
||||||
|
var framerate float64
|
||||||
|
if strings.Contains(videoStream.AvgFrameRate, "/") {
|
||||||
|
frameRateSplit := strings.Split(videoStream.AvgFrameRate, "/")
|
||||||
|
numerator, _ := strconv.ParseFloat(frameRateSplit[0], 64)
|
||||||
|
denominator, _ := strconv.ParseFloat(frameRateSplit[1], 64)
|
||||||
|
framerate = numerator / denominator
|
||||||
|
} else {
|
||||||
|
framerate, _ = strconv.ParseFloat(videoStream.AvgFrameRate, 64)
|
||||||
|
}
|
||||||
|
result.FrameRate = math.Round(framerate*100)/100
|
||||||
|
if rotate, err := strconv.ParseInt(videoStream.Tags.Rotate, 10, 64); err == nil && rotate != 180 {
|
||||||
|
result.Width = videoStream.Height
|
||||||
|
result.Height = videoStream.Width
|
||||||
|
} else {
|
||||||
|
result.Width = videoStream.Width
|
||||||
|
result.Height = videoStream.Height
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if audioStreamIndex != -1 {
|
||||||
|
result.AudioCodec = probeJson.Streams[audioStreamIndex].CodecName
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ffp *ffprobeExecutable) getStreamIndex(fileType string, probeJson ffprobeJSON) int {
|
||||||
|
for i, stream := range probeJson.Streams {
|
||||||
|
if stream.CodecType == fileType {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return -1
|
||||||
|
}
|
||||||
90
internal/ffmpeg/types.go
Normal file
90
internal/ffmpeg/types.go
Normal file
|
|
@ -0,0 +1,90 @@
|
||||||
|
package ffmpeg
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ffprobeJSON struct {
|
||||||
|
Format struct {
|
||||||
|
BitRate string `json:"bit_rate"`
|
||||||
|
Duration string `json:"duration"`
|
||||||
|
Filename string `json:"filename"`
|
||||||
|
FormatLongName string `json:"format_long_name"`
|
||||||
|
FormatName string `json:"format_name"`
|
||||||
|
NbPrograms int `json:"nb_programs"`
|
||||||
|
NbStreams int `json:"nb_streams"`
|
||||||
|
ProbeScore int `json:"probe_score"`
|
||||||
|
Size string `json:"size"`
|
||||||
|
StartTime string `json:"start_time"`
|
||||||
|
Tags struct {
|
||||||
|
CompatibleBrands string `json:"compatible_brands"`
|
||||||
|
CreationTime time.Time `json:"creation_time"`
|
||||||
|
Encoder string `json:"encoder"`
|
||||||
|
MajorBrand string `json:"major_brand"`
|
||||||
|
MinorVersion string `json:"minor_version"`
|
||||||
|
} `json:"tags"`
|
||||||
|
} `json:"format"`
|
||||||
|
Streams []struct {
|
||||||
|
AvgFrameRate string `json:"avg_frame_rate"`
|
||||||
|
BitRate string `json:"bit_rate"`
|
||||||
|
BitsPerRawSample string `json:"bits_per_raw_sample,omitempty"`
|
||||||
|
ChromaLocation string `json:"chroma_location,omitempty"`
|
||||||
|
CodecLongName string `json:"codec_long_name"`
|
||||||
|
CodecName string `json:"codec_name"`
|
||||||
|
CodecTag string `json:"codec_tag"`
|
||||||
|
CodecTagString string `json:"codec_tag_string"`
|
||||||
|
CodecTimeBase string `json:"codec_time_base"`
|
||||||
|
CodecType string `json:"codec_type"`
|
||||||
|
CodedHeight int `json:"coded_height,omitempty"`
|
||||||
|
CodedWidth int `json:"coded_width,omitempty"`
|
||||||
|
DisplayAspectRatio string `json:"display_aspect_ratio,omitempty"`
|
||||||
|
Disposition struct {
|
||||||
|
AttachedPic int `json:"attached_pic"`
|
||||||
|
CleanEffects int `json:"clean_effects"`
|
||||||
|
Comment int `json:"comment"`
|
||||||
|
Default int `json:"default"`
|
||||||
|
Dub int `json:"dub"`
|
||||||
|
Forced int `json:"forced"`
|
||||||
|
HearingImpaired int `json:"hearing_impaired"`
|
||||||
|
Karaoke int `json:"karaoke"`
|
||||||
|
Lyrics int `json:"lyrics"`
|
||||||
|
Original int `json:"original"`
|
||||||
|
TimedThumbnails int `json:"timed_thumbnails"`
|
||||||
|
VisualImpaired int `json:"visual_impaired"`
|
||||||
|
} `json:"disposition"`
|
||||||
|
Duration string `json:"duration"`
|
||||||
|
DurationTs int `json:"duration_ts"`
|
||||||
|
HasBFrames int `json:"has_b_frames,omitempty"`
|
||||||
|
Height int `json:"height,omitempty"`
|
||||||
|
Index int `json:"index"`
|
||||||
|
IsAvc string `json:"is_avc,omitempty"`
|
||||||
|
Level int `json:"level,omitempty"`
|
||||||
|
NalLengthSize string `json:"nal_length_size,omitempty"`
|
||||||
|
NbFrames string `json:"nb_frames"`
|
||||||
|
PixFmt string `json:"pix_fmt,omitempty"`
|
||||||
|
Profile string `json:"profile"`
|
||||||
|
RFrameRate string `json:"r_frame_rate"`
|
||||||
|
Refs int `json:"refs,omitempty"`
|
||||||
|
SampleAspectRatio string `json:"sample_aspect_ratio,omitempty"`
|
||||||
|
StartPts int `json:"start_pts"`
|
||||||
|
StartTime string `json:"start_time"`
|
||||||
|
Tags struct {
|
||||||
|
CreationTime time.Time `json:"creation_time"`
|
||||||
|
HandlerName string `json:"handler_name"`
|
||||||
|
Language string `json:"language"`
|
||||||
|
Rotate string `json:"rotate"`
|
||||||
|
} `json:"tags"`
|
||||||
|
TimeBase string `json:"time_base"`
|
||||||
|
Width int `json:"width,omitempty"`
|
||||||
|
BitsPerSample int `json:"bits_per_sample,omitempty"`
|
||||||
|
ChannelLayout string `json:"channel_layout,omitempty"`
|
||||||
|
Channels int `json:"channels,omitempty"`
|
||||||
|
MaxBitRate string `json:"max_bit_rate,omitempty"`
|
||||||
|
SampleFmt string `json:"sample_fmt,omitempty"`
|
||||||
|
SampleRate string `json:"sample_rate,omitempty"`
|
||||||
|
} `json:"streams"`
|
||||||
|
Error struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
String string `json:"string"`
|
||||||
|
} `json:"error"`
|
||||||
|
}
|
||||||
60
internal/logger/logger.go
Normal file
60
internal/logger/logger.go
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
package logger
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
var logger = logrus.New()
|
||||||
|
var progressLogger = logrus.New()
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
progressLogger.SetFormatter(new(ProgressFormatter))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Progressf(format string, args ...interface{}) {
|
||||||
|
progressLogger.Infof(format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Debug(args ...interface{}) {
|
||||||
|
logger.Debug(args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Debugf(format string, args ...interface{}) {
|
||||||
|
logger.Debugf(format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Info(args ...interface{}) {
|
||||||
|
logger.Info(args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Infof(format string, args ...interface{}) {
|
||||||
|
logger.Infof(format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Warn(args ...interface{}) {
|
||||||
|
logger.Warn(args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Warnf(format string, args ...interface{}) {
|
||||||
|
logger.Warnf(format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Error(args ...interface{}) {
|
||||||
|
logger.Error(args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Errorf(format string, args ...interface{}) {
|
||||||
|
logger.Errorf(format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Fatal(args ...interface{}) {
|
||||||
|
logger.Fatal(args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Fatalf(format string, args ...interface{}) {
|
||||||
|
logger.Fatalf(format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
//func WithRequest(req *http.Request) *logrus.Entry {
|
||||||
|
// return logger.WithFields(RequestFields(req))
|
||||||
|
//}
|
||||||
12
internal/logger/progress_formatter.go
Normal file
12
internal/logger/progress_formatter.go
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
package logger
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ProgressFormatter struct {}
|
||||||
|
|
||||||
|
func (f *ProgressFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||||
|
msg := []byte("Processing --> " + entry.Message + "\r")
|
||||||
|
return msg, nil
|
||||||
|
}
|
||||||
13
internal/manager/job_status.go
Normal file
13
internal/manager/job_status.go
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
type JobStatus int
|
||||||
|
|
||||||
|
const (
|
||||||
|
Idle JobStatus = 0
|
||||||
|
Import JobStatus = 1
|
||||||
|
Export JobStatus = 2
|
||||||
|
Scan JobStatus = 3
|
||||||
|
Generate JobStatus = 4
|
||||||
|
Clean JobStatus = 5
|
||||||
|
Scrape JobStatus = 6
|
||||||
|
)
|
||||||
45
internal/manager/json_utils.go
Normal file
45
internal/manager/json_utils.go
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import "github.com/stashapp/stash/internal/manager/jsonschema"
|
||||||
|
|
||||||
|
type jsonUtils struct {}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) getMappings() (*jsonschema.Mappings, error) {
|
||||||
|
return jsonschema.LoadMappingsFile(instance.Paths.JSON.MappingsFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) saveMappings(mappings *jsonschema.Mappings) error {
|
||||||
|
return jsonschema.SaveMappingsFile(instance.Paths.JSON.MappingsFile, mappings)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) getScraped() ([]jsonschema.ScrapedItem, error) {
|
||||||
|
return jsonschema.LoadScrapedFile(instance.Paths.JSON.ScrapedFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) saveScaped(scraped []jsonschema.ScrapedItem) error {
|
||||||
|
return jsonschema.SaveScrapedFile(instance.Paths.JSON.ScrapedFile, scraped)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) getPerformer(checksum string) (*jsonschema.Performer, error) {
|
||||||
|
return jsonschema.LoadPerformerFile(instance.Paths.JSON.PerformerJSONPath(checksum))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) savePerformer(checksum string, performer *jsonschema.Performer) error {
|
||||||
|
return jsonschema.SavePerformerFile(instance.Paths.JSON.PerformerJSONPath(checksum), performer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) getStudio(checksum string) (*jsonschema.Studio, error) {
|
||||||
|
return jsonschema.LoadStudioFile(instance.Paths.JSON.StudioJSONPath(checksum))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) saveStudio(checksum string, studio *jsonschema.Studio) error {
|
||||||
|
return jsonschema.SaveStudioFile(instance.Paths.JSON.StudioJSONPath(checksum), studio)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) getScene(checksum string) (*jsonschema.Scene, error) {
|
||||||
|
return jsonschema.LoadSceneFile(instance.Paths.JSON.SceneJSONPath(checksum))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonUtils) saveScene(checksum string, scene *jsonschema.Scene) error {
|
||||||
|
return jsonschema.SaveSceneFile(instance.Paths.JSON.SceneJSONPath(checksum), scene)
|
||||||
|
}
|
||||||
28
internal/manager/jsonschema/config.go
Normal file
28
internal/manager/jsonschema/config.go
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
Stash string `json:"stash"`
|
||||||
|
Metadata string `json:"metadata"`
|
||||||
|
// Generated string `json:"generated"` // TODO: Generated directory instead of metadata
|
||||||
|
Cache string `json:"cache"`
|
||||||
|
Downloads string `json:"downloads"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadConfigFile(file string) *Config {
|
||||||
|
var config Config
|
||||||
|
configFile, err := os.Open(file)
|
||||||
|
defer configFile.Close()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
}
|
||||||
|
jsonParser := json.NewDecoder(configFile)
|
||||||
|
parseError := jsonParser.Decode(&config)
|
||||||
|
if parseError != nil { panic(parseError) }
|
||||||
|
return &config
|
||||||
|
}
|
||||||
46
internal/manager/jsonschema/mappings.go
Normal file
46
internal/manager/jsonschema/mappings.go
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NameMapping struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PathMapping struct {
|
||||||
|
Path string `json:"path"`
|
||||||
|
Checksum string `json:"checksum"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Mappings struct {
|
||||||
|
Performers []NameMapping `json:"performers"`
|
||||||
|
Studios []NameMapping `json:"studios"`
|
||||||
|
Galleries []PathMapping `json:"galleries"`
|
||||||
|
Scenes []PathMapping `json:"scenes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadMappingsFile(filePath string) (*Mappings, error) {
|
||||||
|
var mappings Mappings
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
defer file.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
jsonParser := json.NewDecoder(file)
|
||||||
|
err = jsonParser.Decode(&mappings)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &mappings, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SaveMappingsFile(filePath string, mappings *Mappings) error {
|
||||||
|
if mappings == nil {
|
||||||
|
return fmt.Errorf("mappings must not be nil")
|
||||||
|
}
|
||||||
|
return marshalToFile(filePath, mappings)
|
||||||
|
}
|
||||||
49
internal/manager/jsonschema/performer.go
Normal file
49
internal/manager/jsonschema/performer.go
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Performer struct {
|
||||||
|
Name string `json:"name,omitempty"`
|
||||||
|
Url string `json:"url,omitempty"`
|
||||||
|
Twitter string `json:"twitter,omitempty"`
|
||||||
|
Instagram string `json:"instagram,omitempty"`
|
||||||
|
Birthdate string `json:"birthdate,omitempty"`
|
||||||
|
Ethnicity string `json:"ethnicity,omitempty"`
|
||||||
|
Country string `json:"country,omitempty"`
|
||||||
|
EyeColor string `json:"eye_color,omitempty"`
|
||||||
|
Height string `json:"height,omitempty"`
|
||||||
|
Measurements string `json:"measurements,omitempty"`
|
||||||
|
FakeTits string `json:"fake_tits,omitempty"`
|
||||||
|
CareerLength string `json:"career_length,omitempty"`
|
||||||
|
Tattoos string `json:"tattoos,omitempty"`
|
||||||
|
Piercings string `json:"piercings,omitempty"`
|
||||||
|
Aliases string `json:"aliases,omitempty"`
|
||||||
|
Favorite bool `json:"favorite,omitempty"`
|
||||||
|
Image string `json:"image,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadPerformerFile(filePath string) (*Performer, error) {
|
||||||
|
var performer Performer
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
defer file.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
jsonParser := json.NewDecoder(file)
|
||||||
|
err = jsonParser.Decode(&performer)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &performer, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SavePerformerFile(filePath string, performer *Performer) error {
|
||||||
|
if performer == nil {
|
||||||
|
return fmt.Errorf("performer must not be nil")
|
||||||
|
}
|
||||||
|
return marshalToFile(filePath, performer)
|
||||||
|
}
|
||||||
61
internal/manager/jsonschema/scene.go
Normal file
61
internal/manager/jsonschema/scene.go
Normal file
|
|
@ -0,0 +1,61 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SceneMarker struct {
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
Seconds string `json:"seconds,omitempty"`
|
||||||
|
PrimaryTag string `json:"primary_tag,omitempty"`
|
||||||
|
Tags []string `json:"tags,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneFile struct {
|
||||||
|
Size string `json:"size"`
|
||||||
|
Duration string `json:"duration"`
|
||||||
|
VideoCodec string `json:"video_codec"`
|
||||||
|
AudioCodec string `json:"audio_codec"`
|
||||||
|
Width int `json:"width"`
|
||||||
|
Height int `json:"height"`
|
||||||
|
Framerate string `json:"framerate"`
|
||||||
|
Bitrate int `json:"bitrate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Scene struct {
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
Studio string `json:"studio,omitempty"`
|
||||||
|
Url string `json:"url,omitempty"`
|
||||||
|
Date string `json:"date,omitempty"`
|
||||||
|
Rating int `json:"rating,omitempty"`
|
||||||
|
Details string `json:"details,omitempty"`
|
||||||
|
Gallery string `json:"gallery,omitempty"`
|
||||||
|
Performers []string `json:"performers,omitempty"`
|
||||||
|
Tags []string `json:"tags,omitempty"`
|
||||||
|
Markers []SceneMarker `json:"markers,omitempty"`
|
||||||
|
File *SceneFile `json:"file,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadSceneFile(filePath string) (*Scene, error) {
|
||||||
|
var scene Scene
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
defer file.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
jsonParser := json.NewDecoder(file)
|
||||||
|
err = jsonParser.Decode(&scene)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &scene, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SaveSceneFile(filePath string, scene *Scene) error {
|
||||||
|
if scene == nil {
|
||||||
|
return fmt.Errorf("scene must not be nil")
|
||||||
|
}
|
||||||
|
return marshalToFile(filePath, scene)
|
||||||
|
}
|
||||||
46
internal/manager/jsonschema/scraped.go
Normal file
46
internal/manager/jsonschema/scraped.go
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ScrapedItem struct {
|
||||||
|
Title string `json:"title,omitempty"`
|
||||||
|
Description string `json:"description,omitempty"`
|
||||||
|
Url string `json:"url,omitempty"`
|
||||||
|
Date string `json:"date,omitempty"`
|
||||||
|
Rating string `json:"rating,omitempty"`
|
||||||
|
Tags string `json:"tags,omitempty"`
|
||||||
|
Models string `json:"models,omitempty"`
|
||||||
|
Episode int `json:"episode,omitempty"`
|
||||||
|
GalleryFilename string `json:"gallery_filename,omitempty"`
|
||||||
|
GalleryUrl string `json:"gallery_url,omitempty"`
|
||||||
|
VideoFilename string `json:"video_filename,omitempty"`
|
||||||
|
VideoUrl string `json:"video_url,omitempty"`
|
||||||
|
Studio string `json:"studio,omitempty"`
|
||||||
|
UpdatedAt RailsTime `json:"updated_at,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadScrapedFile(filePath string) ([]ScrapedItem, error) {
|
||||||
|
var scraped []ScrapedItem
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
defer file.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
jsonParser := json.NewDecoder(file)
|
||||||
|
err = jsonParser.Decode(&scraped)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return scraped, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SaveScrapedFile(filePath string, scrapedItems []ScrapedItem) error {
|
||||||
|
if scrapedItems == nil {
|
||||||
|
return fmt.Errorf("scraped items must not be nil")
|
||||||
|
}
|
||||||
|
return marshalToFile(filePath, scrapedItems)
|
||||||
|
}
|
||||||
35
internal/manager/jsonschema/studio.go
Normal file
35
internal/manager/jsonschema/studio.go
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Studio struct {
|
||||||
|
Name string `json:"name,omitempty"`
|
||||||
|
Url string `json:"url,omitempty"`
|
||||||
|
Image string `json:"image,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadStudioFile(filePath string) (*Studio, error) {
|
||||||
|
var studio Studio
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
defer file.Close()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
jsonParser := json.NewDecoder(file)
|
||||||
|
err = jsonParser.Decode(&studio)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &studio, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func SaveStudioFile(filePath string, studio *Studio) error {
|
||||||
|
if studio == nil {
|
||||||
|
return fmt.Errorf("studio must not be nil")
|
||||||
|
}
|
||||||
|
return marshalToFile(filePath, studio)
|
||||||
|
}
|
||||||
37
internal/manager/jsonschema/time_rails.go
Normal file
37
internal/manager/jsonschema/time_rails.go
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RailsTime struct {
|
||||||
|
time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
const railsTimeLayout = "2006-01-02 15:04:05 MST"
|
||||||
|
|
||||||
|
func (ct *RailsTime) UnmarshalJSON(b []byte) (err error) {
|
||||||
|
s := strings.Trim(string(b), "\"")
|
||||||
|
if s == "null" {
|
||||||
|
ct.Time = time.Time{}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ct.Time, err = time.Parse(railsTimeLayout, s)
|
||||||
|
if err != nil {
|
||||||
|
ct.Time, err = time.Parse(time.RFC3339, s)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ct *RailsTime) MarshalJSON() ([]byte, error) {
|
||||||
|
if ct.Time.UnixNano() == nilTime {
|
||||||
|
return []byte("null"), nil
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf("\"%s\"", ct.Time.Format(time.RFC3339))), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ct *RailsTime) IsSet() bool {
|
||||||
|
return ct.UnixNano() != nilTime
|
||||||
|
}
|
||||||
39
internal/manager/jsonschema/utils.go
Normal file
39
internal/manager/jsonschema/utils.go
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
package jsonschema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"io/ioutil"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var nilTime = (time.Time{}).UnixNano()
|
||||||
|
|
||||||
|
func CompareJSON(a interface{}, b interface{}) bool {
|
||||||
|
aBuf, _ := encode(a)
|
||||||
|
bBuf, _ := encode(b)
|
||||||
|
return bytes.Compare(aBuf, bBuf) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalToFile(filePath string, j interface{}) error {
|
||||||
|
data, err := encode(j)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := ioutil.WriteFile(filePath, data, 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func encode(j interface{}) ([]byte, error) {
|
||||||
|
buffer := &bytes.Buffer{}
|
||||||
|
encoder := json.NewEncoder(buffer)
|
||||||
|
encoder.SetEscapeHTML(false)
|
||||||
|
encoder.SetIndent("", " ")
|
||||||
|
if err := encoder.Encode(j); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// Strip the newline at the end of the file
|
||||||
|
return bytes.TrimRight(buffer.Bytes(), "\n"), nil
|
||||||
|
}
|
||||||
94
internal/manager/manager.go
Normal file
94
internal/manager/manager.go
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/bmatcuk/doublestar"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"github.com/stashapp/stash/internal/manager/paths"
|
||||||
|
"path/filepath"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type singleton struct {
|
||||||
|
Status JobStatus
|
||||||
|
Paths *paths.Paths
|
||||||
|
JSON *jsonUtils
|
||||||
|
}
|
||||||
|
|
||||||
|
var instance *singleton
|
||||||
|
var once sync.Once
|
||||||
|
|
||||||
|
func GetInstance() *singleton {
|
||||||
|
Initialize()
|
||||||
|
return instance
|
||||||
|
}
|
||||||
|
|
||||||
|
func Initialize() *singleton {
|
||||||
|
once.Do(func() {
|
||||||
|
instance = &singleton{
|
||||||
|
Status: Idle,
|
||||||
|
Paths: paths.RefreshPaths(),
|
||||||
|
JSON: &jsonUtils{},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return instance
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *singleton) Scan() {
|
||||||
|
if s.Status != Idle { return }
|
||||||
|
s.Status = Scan
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer s.returnToIdleState()
|
||||||
|
|
||||||
|
globPath := filepath.Join(s.Paths.Config.Stash, "**/*.{zip,m4v,mp4,mov,wmv}")
|
||||||
|
globResults, _ := doublestar.Glob(globPath)
|
||||||
|
logger.Infof("Starting scan of %d files", len(globResults))
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
for _, path := range globResults {
|
||||||
|
wg.Add(1)
|
||||||
|
task := ScanTask{FilePath: path}
|
||||||
|
go task.Start(&wg)
|
||||||
|
wg.Wait()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *singleton) Import() {
|
||||||
|
if s.Status != Idle { return }
|
||||||
|
s.Status = Import
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer s.returnToIdleState()
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
wg.Add(1)
|
||||||
|
task := ImportTask{}
|
||||||
|
go task.Start(&wg)
|
||||||
|
wg.Wait()
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *singleton) Export() {
|
||||||
|
if s.Status != Idle { return }
|
||||||
|
s.Status = Export
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer s.returnToIdleState()
|
||||||
|
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
wg.Add(1)
|
||||||
|
task := ExportTask{}
|
||||||
|
go task.Start(&wg)
|
||||||
|
wg.Wait()
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *singleton) returnToIdleState() {
|
||||||
|
if r := recover(); r!= nil {
|
||||||
|
logger.Info("recovered from ", r)
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Status = Idle
|
||||||
|
}
|
||||||
64
internal/manager/paths/paths.go
Normal file
64
internal/manager/paths/paths.go
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
package paths
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"os"
|
||||||
|
"os/user"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Paths struct {
|
||||||
|
FixedPaths *fixedPaths
|
||||||
|
Config *jsonschema.Config
|
||||||
|
Generated *generatedPaths
|
||||||
|
JSON *jsonPaths
|
||||||
|
|
||||||
|
Gallery *galleryPaths
|
||||||
|
Scene *scenePaths
|
||||||
|
SceneMarkers *sceneMarkerPaths
|
||||||
|
}
|
||||||
|
|
||||||
|
func RefreshPaths() *Paths {
|
||||||
|
fp := newFixedPaths()
|
||||||
|
ensureConfigFile(fp)
|
||||||
|
return newPaths(fp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newPaths(fp *fixedPaths) *Paths {
|
||||||
|
p := Paths{}
|
||||||
|
p.FixedPaths = fp
|
||||||
|
p.Config = jsonschema.LoadConfigFile(p.FixedPaths.ConfigFile)
|
||||||
|
p.Generated = newGeneratedPaths(p)
|
||||||
|
p.JSON = newJSONPaths(p)
|
||||||
|
|
||||||
|
p.Gallery = newGalleryPaths(p.Config)
|
||||||
|
p.Scene = newScenePaths(p)
|
||||||
|
p.SceneMarkers = newSceneMarkerPaths(p)
|
||||||
|
return &p
|
||||||
|
}
|
||||||
|
|
||||||
|
func getExecutionDirectory() string {
|
||||||
|
ex, err := os.Executable()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return filepath.Dir(ex)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getHomeDirectory() string {
|
||||||
|
currentUser, err := user.Current()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return currentUser.HomeDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func ensureConfigFile(fp *fixedPaths) {
|
||||||
|
configFileExists, _ := utils.FileExists(fp.ConfigFile) // TODO: Verify JSON is correct. Pass verified
|
||||||
|
if configFileExists {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
panic("No config file found")
|
||||||
|
}
|
||||||
66
internal/manager/paths/paths_fixed.go
Normal file
66
internal/manager/paths/paths_fixed.go
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
package paths
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type fixedPaths struct {
|
||||||
|
ExecutionDirectory string
|
||||||
|
ConfigDirectory string
|
||||||
|
ConfigFile string
|
||||||
|
DatabaseFile string
|
||||||
|
|
||||||
|
FFMPEG string
|
||||||
|
FFProbe string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFixedPaths() *fixedPaths {
|
||||||
|
fp := fixedPaths{}
|
||||||
|
fp.ExecutionDirectory = getExecutionDirectory()
|
||||||
|
fp.ConfigDirectory = filepath.Join(getHomeDirectory(), ".stash")
|
||||||
|
fp.ConfigFile = filepath.Join(fp.ConfigDirectory, "config.json")
|
||||||
|
fp.DatabaseFile = filepath.Join(fp.ConfigDirectory, "stash-go.sqlite")
|
||||||
|
|
||||||
|
ffmpegDirectories := []string{fp.ExecutionDirectory, fp.ConfigDirectory}
|
||||||
|
ffmpegFileName := func() string {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "ffmpeg.exe"
|
||||||
|
} else {
|
||||||
|
return "ffmpeg"
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
ffprobeFileName := func() string {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "ffprobe.exe"
|
||||||
|
} else {
|
||||||
|
return "ffprobe"
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
for _, directory := range ffmpegDirectories {
|
||||||
|
ffmpegPath := filepath.Join(directory, ffmpegFileName)
|
||||||
|
ffprobePath := filepath.Join(directory, ffprobeFileName)
|
||||||
|
if exists, _ := utils.FileExists(ffmpegPath); exists {
|
||||||
|
fp.FFMPEG = ffmpegPath
|
||||||
|
}
|
||||||
|
if exists, _ := utils.FileExists(ffprobePath); exists {
|
||||||
|
fp.FFProbe = ffprobePath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
errorText := fmt.Sprintf(
|
||||||
|
"FFMPEG or FFProbe not found. Place it in one of the following folders:\n\n%s",
|
||||||
|
strings.Join(ffmpegDirectories, ","),
|
||||||
|
)
|
||||||
|
if exists, _ := utils.FileExists(fp.FFMPEG); !exists {
|
||||||
|
panic(errorText)
|
||||||
|
}
|
||||||
|
if exists, _ := utils.FileExists(fp.FFProbe); !exists {
|
||||||
|
panic(errorText)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &fp
|
||||||
|
}
|
||||||
24
internal/manager/paths/paths_gallery.go
Normal file
24
internal/manager/paths/paths_gallery.go
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
package paths
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type galleryPaths struct {
|
||||||
|
config *jsonschema.Config
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGalleryPaths(c *jsonschema.Config) *galleryPaths {
|
||||||
|
gp := galleryPaths{}
|
||||||
|
gp.config = c
|
||||||
|
return &gp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gp *galleryPaths) GetExtractedPath(checksum string) string {
|
||||||
|
return filepath.Join(gp.config.Cache, checksum)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gp *galleryPaths) GetExtractedFilePath(checksum string, fileName string) string {
|
||||||
|
return filepath.Join(gp.config.Cache, checksum, fileName)
|
||||||
|
}
|
||||||
45
internal/manager/paths/paths_generated.go
Normal file
45
internal/manager/paths/paths_generated.go
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
package paths
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type generatedPaths struct {
|
||||||
|
Screenshots string
|
||||||
|
Vtt string
|
||||||
|
Markers string
|
||||||
|
Transcodes string
|
||||||
|
Tmp string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGeneratedPaths(p Paths) *generatedPaths {
|
||||||
|
gp := generatedPaths{}
|
||||||
|
gp.Screenshots = filepath.Join(p.Config.Metadata, "screenshots")
|
||||||
|
gp.Vtt = filepath.Join(p.Config.Metadata, "vtt")
|
||||||
|
gp.Markers = filepath.Join(p.Config.Metadata, "markers")
|
||||||
|
gp.Transcodes = filepath.Join(p.Config.Metadata, "transcodes")
|
||||||
|
gp.Tmp = filepath.Join(p.Config.Metadata, "tmp")
|
||||||
|
|
||||||
|
_ = utils.EnsureDir(gp.Screenshots)
|
||||||
|
_ = utils.EnsureDir(gp.Vtt)
|
||||||
|
_ = utils.EnsureDir(gp.Markers)
|
||||||
|
_ = utils.EnsureDir(gp.Transcodes)
|
||||||
|
return &gp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gp *generatedPaths) GetTmpPath(fileName string) string {
|
||||||
|
return filepath.Join(gp.Tmp, fileName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gp *generatedPaths) EnsureTmpDir() {
|
||||||
|
_ = utils.EnsureDir(gp.Tmp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gp *generatedPaths) EmptyTmpDir() {
|
||||||
|
_ = utils.EmptyDir(gp.Tmp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gp *generatedPaths) RemoveTmpDir() {
|
||||||
|
_ = utils.RemoveDir(gp.Tmp)
|
||||||
|
}
|
||||||
44
internal/manager/paths/paths_json.go
Normal file
44
internal/manager/paths/paths_json.go
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
package paths
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type jsonPaths struct {
|
||||||
|
MappingsFile string
|
||||||
|
ScrapedFile string
|
||||||
|
|
||||||
|
Performers string
|
||||||
|
Scenes string
|
||||||
|
Galleries string
|
||||||
|
Studios string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newJSONPaths(p Paths) *jsonPaths {
|
||||||
|
jp := jsonPaths{}
|
||||||
|
jp.MappingsFile = filepath.Join(p.Config.Metadata, "mappings.json")
|
||||||
|
jp.ScrapedFile = filepath.Join(p.Config.Metadata, "scraped.json")
|
||||||
|
jp.Performers = filepath.Join(p.Config.Metadata, "performers")
|
||||||
|
jp.Scenes = filepath.Join(p.Config.Metadata, "scenes")
|
||||||
|
jp.Galleries = filepath.Join(p.Config.Metadata, "galleries")
|
||||||
|
jp.Studios = filepath.Join(p.Config.Metadata, "studios")
|
||||||
|
|
||||||
|
_ = utils.EnsureDir(jp.Performers)
|
||||||
|
_ = utils.EnsureDir(jp.Scenes)
|
||||||
|
_ = utils.EnsureDir(jp.Galleries)
|
||||||
|
_ = utils.EnsureDir(jp.Studios)
|
||||||
|
return &jp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonPaths) PerformerJSONPath(checksum string) string {
|
||||||
|
return filepath.Join(jp.Performers, checksum + ".json")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonPaths) SceneJSONPath(checksum string) string {
|
||||||
|
return filepath.Join(jp.Scenes, checksum + ".json")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (jp *jsonPaths) StudioJSONPath(checksum string) string {
|
||||||
|
return filepath.Join(jp.Studios, checksum + ".json")
|
||||||
|
}
|
||||||
24
internal/manager/paths/paths_scene_markers.go
Normal file
24
internal/manager/paths/paths_scene_markers.go
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
package paths
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type sceneMarkerPaths struct {
|
||||||
|
generated generatedPaths
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSceneMarkerPaths(p Paths) *sceneMarkerPaths {
|
||||||
|
sp := sceneMarkerPaths{}
|
||||||
|
sp.generated = *p.Generated
|
||||||
|
return &sp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *sceneMarkerPaths) GetStreamPath(checksum string, seconds int) string {
|
||||||
|
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds) + ".mp4")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *sceneMarkerPaths) GetStreamPreviewImagePath(checksum string, seconds int) string {
|
||||||
|
return filepath.Join(sp.generated.Markers, checksum, strconv.Itoa(seconds) + ".webp")
|
||||||
|
}
|
||||||
54
internal/manager/paths/paths_scenes.go
Normal file
54
internal/manager/paths/paths_scenes.go
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
package paths
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type scenePaths struct {
|
||||||
|
generated generatedPaths
|
||||||
|
}
|
||||||
|
|
||||||
|
func newScenePaths(p Paths) *scenePaths {
|
||||||
|
sp := scenePaths{}
|
||||||
|
sp.generated = *p.Generated
|
||||||
|
return &sp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetScreenshotPath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.Screenshots, checksum+".jpg")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetThumbnailScreenshotPath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.Screenshots, checksum+".thumb.jpg")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetTranscodePath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.Transcodes, checksum+".mp4")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetStreamPath(scenePath string, checksum string) string {
|
||||||
|
transcodePath := sp.GetTranscodePath(checksum)
|
||||||
|
transcodeExists, _ := utils.FileExists(transcodePath)
|
||||||
|
if transcodeExists {
|
||||||
|
return transcodePath
|
||||||
|
} else {
|
||||||
|
return scenePath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetStreamPreviewPath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.Screenshots, checksum+".mp4")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetStreamPreviewImagePath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.Screenshots, checksum+".webp")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetSpriteImageFilePath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.Vtt, checksum+"_sprite.jpg")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sp *scenePaths) GetSpriteVttFilePath(checksum string) string {
|
||||||
|
return filepath.Join(sp.generated.Vtt, checksum+"_thumbs.vtt")
|
||||||
|
}
|
||||||
7
internal/manager/task.go
Normal file
7
internal/manager/task.go
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import "sync"
|
||||||
|
|
||||||
|
type Task interface {
|
||||||
|
Start(wg *sync.WaitGroup)
|
||||||
|
}
|
||||||
459
internal/manager/task_export.go
Normal file
459
internal/manager/task_export.go
Normal file
|
|
@ -0,0 +1,459 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportTask struct {
|
||||||
|
Mappings *jsonschema.Mappings
|
||||||
|
Scraped []jsonschema.ScrapedItem
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) Start(wg *sync.WaitGroup) {
|
||||||
|
// @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count
|
||||||
|
|
||||||
|
t.Mappings = &jsonschema.Mappings{}
|
||||||
|
t.Scraped = []jsonschema.ScrapedItem{}
|
||||||
|
|
||||||
|
ctx := context.TODO()
|
||||||
|
|
||||||
|
t.ExportScenes(ctx)
|
||||||
|
t.ExportGalleries(ctx)
|
||||||
|
t.ExportPerformers(ctx)
|
||||||
|
t.ExportStudios(ctx)
|
||||||
|
|
||||||
|
if err := instance.JSON.saveMappings(t.Mappings); err != nil {
|
||||||
|
logger.Errorf("[mappings] failed to save json: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
t.ExportScrapedItems(ctx)
|
||||||
|
|
||||||
|
wg.Done()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) ExportScenes(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
studioQB := models.NewStudioQueryBuilder()
|
||||||
|
galleryQB := models.NewGalleryQueryBuilder()
|
||||||
|
performerQB := models.NewPerformerQueryBuilder()
|
||||||
|
tagQB := models.NewTagQueryBuilder()
|
||||||
|
sceneMarkerQB := models.NewSceneMarkerQueryBuilder()
|
||||||
|
scenes, err := qb.All()
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scenes] failed to fetch all scenes: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[scenes] exporting")
|
||||||
|
|
||||||
|
for i, scene := range scenes {
|
||||||
|
index := i + 1
|
||||||
|
logger.Progressf("[scenes] %d of %d", index, len(scenes))
|
||||||
|
|
||||||
|
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathMapping{ Path: scene.Path, Checksum: scene.Checksum })
|
||||||
|
newSceneJSON := jsonschema.Scene{}
|
||||||
|
|
||||||
|
var studioName string
|
||||||
|
if scene.StudioID.Valid {
|
||||||
|
studio, _ := studioQB.Find(int(scene.StudioID.Int64), tx)
|
||||||
|
if studio != nil {
|
||||||
|
studioName = studio.Name.String
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var galleryChecksum string
|
||||||
|
gallery, _ := galleryQB.FindBySceneID(scene.ID, tx)
|
||||||
|
if gallery != nil {
|
||||||
|
galleryChecksum = gallery.Checksum
|
||||||
|
}
|
||||||
|
|
||||||
|
performers, _ := performerQB.FindBySceneID(scene.ID, tx)
|
||||||
|
tags, _ := tagQB.FindBySceneID(scene.ID, tx)
|
||||||
|
sceneMarkers, _ := sceneMarkerQB.FindBySceneID(scene.ID, tx)
|
||||||
|
|
||||||
|
if scene.Title.Valid {
|
||||||
|
newSceneJSON.Title = scene.Title.String
|
||||||
|
}
|
||||||
|
if studioName != "" {
|
||||||
|
newSceneJSON.Studio = studioName
|
||||||
|
}
|
||||||
|
if scene.Url.Valid {
|
||||||
|
newSceneJSON.Url = scene.Url.String
|
||||||
|
}
|
||||||
|
if scene.Date.Valid {
|
||||||
|
newSceneJSON.Date = utils.GetYMDFromDatabaseDate(scene.Date.String)
|
||||||
|
}
|
||||||
|
if scene.Rating.Valid {
|
||||||
|
newSceneJSON.Rating = int(scene.Rating.Int64)
|
||||||
|
}
|
||||||
|
if scene.Details.Valid {
|
||||||
|
newSceneJSON.Details = scene.Details.String
|
||||||
|
}
|
||||||
|
if galleryChecksum != "" {
|
||||||
|
newSceneJSON.Gallery = galleryChecksum
|
||||||
|
}
|
||||||
|
|
||||||
|
newSceneJSON.Performers = t.getPerformerNames(performers)
|
||||||
|
newSceneJSON.Tags = t.getTagNames(tags)
|
||||||
|
|
||||||
|
for _, sceneMarker := range sceneMarkers {
|
||||||
|
var primaryTagID int
|
||||||
|
if sceneMarker.PrimaryTagID.Valid {
|
||||||
|
primaryTagID = int(sceneMarker.PrimaryTagID.Int64)
|
||||||
|
}
|
||||||
|
primaryTag, err := tagQB.Find(primaryTagID, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> invalid primary tag for scene marker: %s", scene.Checksum, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
sceneMarkerTags, err := tagQB.FindBySceneMarkerID(sceneMarker.ID, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> invalid tags for scene marker: %s", scene.Checksum, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if sceneMarker.Title == "" || sceneMarker.Seconds == 0 || primaryTag.Name == "" {
|
||||||
|
logger.Errorf("[scenes] invalid scene marker: %v", sceneMarker)
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneMarkerJSON := jsonschema.SceneMarker{
|
||||||
|
Title: sceneMarker.Title,
|
||||||
|
Seconds: t.getDecimalString(sceneMarker.Seconds),
|
||||||
|
PrimaryTag: primaryTag.Name,
|
||||||
|
Tags: t.getTagNames(sceneMarkerTags),
|
||||||
|
}
|
||||||
|
|
||||||
|
newSceneJSON.Markers = append(newSceneJSON.Markers, sceneMarkerJSON)
|
||||||
|
}
|
||||||
|
|
||||||
|
newSceneJSON.File = &jsonschema.SceneFile{}
|
||||||
|
if scene.Size.Valid {
|
||||||
|
newSceneJSON.File.Size = scene.Size.String
|
||||||
|
}
|
||||||
|
if scene.Duration.Valid {
|
||||||
|
newSceneJSON.File.Duration = t.getDecimalString(scene.Duration.Float64)
|
||||||
|
}
|
||||||
|
if scene.VideoCodec.Valid {
|
||||||
|
newSceneJSON.File.VideoCodec = scene.VideoCodec.String
|
||||||
|
}
|
||||||
|
if scene.AudioCodec.Valid {
|
||||||
|
newSceneJSON.File.AudioCodec = scene.AudioCodec.String
|
||||||
|
}
|
||||||
|
if scene.Width.Valid {
|
||||||
|
newSceneJSON.File.Width = int(scene.Width.Int64)
|
||||||
|
}
|
||||||
|
if scene.Height.Valid {
|
||||||
|
newSceneJSON.File.Height = int(scene.Height.Int64)
|
||||||
|
}
|
||||||
|
if scene.Framerate.Valid {
|
||||||
|
newSceneJSON.File.Framerate = t.getDecimalString(scene.Framerate.Float64)
|
||||||
|
}
|
||||||
|
if scene.Bitrate.Valid {
|
||||||
|
newSceneJSON.File.Bitrate = int(scene.Bitrate.Int64)
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneJSON, err := instance.JSON.getScene(scene.Checksum)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scenes] error reading scene json: %s", err.Error())
|
||||||
|
}
|
||||||
|
if jsonschema.CompareJSON(*sceneJSON, newSceneJSON) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := instance.JSON.saveScene(scene.Checksum, &newSceneJSON); err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> failed to save json: %s", scene.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("[scenes] export complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) ExportGalleries(ctx context.Context) {
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
galleries, err := qb.All()
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[galleries] failed to fetch all galleries: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[galleries] exporting")
|
||||||
|
|
||||||
|
for i, gallery := range galleries {
|
||||||
|
index := i + 1
|
||||||
|
logger.Progressf("[galleries] %d of %d", index, len(galleries))
|
||||||
|
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathMapping{ Path: gallery.Path, Checksum: gallery.Checksum })
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("[galleries] export complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) ExportPerformers(ctx context.Context) {
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
performers, err := qb.All()
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[performers] failed to fetch all performers: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[performers] exporting")
|
||||||
|
|
||||||
|
for i, performer := range performers {
|
||||||
|
index := i + 1
|
||||||
|
logger.Progressf("[performers] %d of %d", index, len(performers))
|
||||||
|
|
||||||
|
t.Mappings.Performers = append(t.Mappings.Performers, jsonschema.NameMapping{ Name: performer.Name.String, Checksum: performer.Checksum })
|
||||||
|
|
||||||
|
newPerformerJSON := jsonschema.Performer{}
|
||||||
|
|
||||||
|
if performer.Name.Valid {
|
||||||
|
newPerformerJSON.Name = performer.Name.String
|
||||||
|
}
|
||||||
|
if performer.Url.Valid {
|
||||||
|
newPerformerJSON.Url = performer.Url.String
|
||||||
|
}
|
||||||
|
if performer.Birthdate.Valid {
|
||||||
|
newPerformerJSON.Birthdate = utils.GetYMDFromDatabaseDate(performer.Birthdate.String)
|
||||||
|
}
|
||||||
|
if performer.Ethnicity.Valid {
|
||||||
|
newPerformerJSON.Ethnicity = performer.Ethnicity.String
|
||||||
|
}
|
||||||
|
if performer.Country.Valid {
|
||||||
|
newPerformerJSON.Country = performer.Country.String
|
||||||
|
}
|
||||||
|
if performer.EyeColor.Valid {
|
||||||
|
newPerformerJSON.EyeColor = performer.EyeColor.String
|
||||||
|
}
|
||||||
|
if performer.Height.Valid {
|
||||||
|
newPerformerJSON.Height = performer.Height.String
|
||||||
|
}
|
||||||
|
if performer.Measurements.Valid {
|
||||||
|
newPerformerJSON.Measurements = performer.Measurements.String
|
||||||
|
}
|
||||||
|
if performer.FakeTits.Valid {
|
||||||
|
newPerformerJSON.FakeTits = performer.FakeTits.String
|
||||||
|
}
|
||||||
|
if performer.CareerLength.Valid {
|
||||||
|
newPerformerJSON.CareerLength = performer.CareerLength.String
|
||||||
|
}
|
||||||
|
if performer.Tattoos.Valid {
|
||||||
|
newPerformerJSON.Tattoos = performer.Tattoos.String
|
||||||
|
}
|
||||||
|
if performer.Piercings.Valid {
|
||||||
|
newPerformerJSON.Piercings = performer.Piercings.String
|
||||||
|
}
|
||||||
|
if performer.Aliases.Valid {
|
||||||
|
newPerformerJSON.Aliases = performer.Aliases.String
|
||||||
|
}
|
||||||
|
if performer.Twitter.Valid {
|
||||||
|
newPerformerJSON.Twitter = performer.Twitter.String
|
||||||
|
}
|
||||||
|
if performer.Instagram.Valid {
|
||||||
|
newPerformerJSON.Instagram = performer.Instagram.String
|
||||||
|
}
|
||||||
|
if performer.Favorite.Valid {
|
||||||
|
newPerformerJSON.Favorite = performer.Favorite.Bool
|
||||||
|
}
|
||||||
|
|
||||||
|
newPerformerJSON.Image = utils.GetBase64StringFromData(performer.Image)
|
||||||
|
|
||||||
|
performerJSON, err := instance.JSON.getPerformer(performer.Checksum)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[performers] error reading performer json: %s", err.Error())
|
||||||
|
}
|
||||||
|
if jsonschema.CompareJSON(*performerJSON, newPerformerJSON) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := instance.JSON.savePerformer(performer.Checksum, &newPerformerJSON); err != nil {
|
||||||
|
logger.Errorf("[performers] <%s> failed to save json: %s", performer.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("[performers] export complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) ExportStudios(ctx context.Context) {
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
studios, err := qb.All()
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[studios] failed to fetch all studios: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[studios] exporting")
|
||||||
|
|
||||||
|
for i, studio := range studios {
|
||||||
|
index := i + 1
|
||||||
|
logger.Progressf("[studios] %d of %d", index, len(studios))
|
||||||
|
|
||||||
|
t.Mappings.Studios = append(t.Mappings.Studios, jsonschema.NameMapping{ Name: studio.Name.String, Checksum: studio.Checksum })
|
||||||
|
|
||||||
|
newStudioJSON := jsonschema.Studio{}
|
||||||
|
|
||||||
|
if studio.Name.Valid {
|
||||||
|
newStudioJSON.Name = studio.Name.String
|
||||||
|
}
|
||||||
|
if studio.Url.Valid {
|
||||||
|
newStudioJSON.Url = studio.Url.String
|
||||||
|
}
|
||||||
|
|
||||||
|
newStudioJSON.Image = utils.GetBase64StringFromData(studio.Image)
|
||||||
|
|
||||||
|
studioJSON, err := instance.JSON.getStudio(studio.Checksum)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[studios] error reading studio json: %s", err.Error())
|
||||||
|
}
|
||||||
|
if jsonschema.CompareJSON(*studioJSON, newStudioJSON) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := instance.JSON.saveStudio(studio.Checksum, &newStudioJSON); err != nil {
|
||||||
|
logger.Errorf("[studios] <%s> failed to save json: %s", studio.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("[studios] export complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) ExportScrapedItems(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewScrapedItemQueryBuilder()
|
||||||
|
sqb := models.NewStudioQueryBuilder()
|
||||||
|
scrapedItems, err := qb.All()
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scraped sites] failed to fetch all items: %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[scraped sites] exporting")
|
||||||
|
|
||||||
|
for i, scrapedItem := range scrapedItems {
|
||||||
|
index := i + 1
|
||||||
|
logger.Progressf("[scraped sites] %d of %d", index, len(scrapedItems))
|
||||||
|
|
||||||
|
var studioName string
|
||||||
|
if scrapedItem.StudioID.Valid {
|
||||||
|
studio, _ := sqb.Find(int(scrapedItem.StudioID.Int64), tx)
|
||||||
|
if studio != nil {
|
||||||
|
studioName = studio.Name.String
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newScrapedItemJSON := jsonschema.ScrapedItem{}
|
||||||
|
|
||||||
|
if scrapedItem.Title.Valid {
|
||||||
|
newScrapedItemJSON.Title = scrapedItem.Title.String
|
||||||
|
}
|
||||||
|
if scrapedItem.Description.Valid {
|
||||||
|
newScrapedItemJSON.Description = scrapedItem.Description.String
|
||||||
|
}
|
||||||
|
if scrapedItem.Url.Valid {
|
||||||
|
newScrapedItemJSON.Url = scrapedItem.Url.String
|
||||||
|
}
|
||||||
|
if scrapedItem.Date.Valid {
|
||||||
|
newScrapedItemJSON.Date = utils.GetYMDFromDatabaseDate(scrapedItem.Date.String)
|
||||||
|
}
|
||||||
|
if scrapedItem.Rating.Valid {
|
||||||
|
newScrapedItemJSON.Rating = scrapedItem.Rating.String
|
||||||
|
}
|
||||||
|
if scrapedItem.Tags.Valid {
|
||||||
|
newScrapedItemJSON.Tags = scrapedItem.Tags.String
|
||||||
|
}
|
||||||
|
if scrapedItem.Models.Valid {
|
||||||
|
newScrapedItemJSON.Models = scrapedItem.Models.String
|
||||||
|
}
|
||||||
|
if scrapedItem.Episode.Valid {
|
||||||
|
newScrapedItemJSON.Episode = int(scrapedItem.Episode.Int64)
|
||||||
|
}
|
||||||
|
if scrapedItem.GalleryFilename.Valid {
|
||||||
|
newScrapedItemJSON.GalleryFilename = scrapedItem.GalleryFilename.String
|
||||||
|
}
|
||||||
|
if scrapedItem.GalleryUrl.Valid {
|
||||||
|
newScrapedItemJSON.GalleryUrl = scrapedItem.GalleryUrl.String
|
||||||
|
}
|
||||||
|
if scrapedItem.VideoFilename.Valid {
|
||||||
|
newScrapedItemJSON.VideoFilename = scrapedItem.VideoFilename.String
|
||||||
|
}
|
||||||
|
if scrapedItem.VideoUrl.Valid {
|
||||||
|
newScrapedItemJSON.VideoUrl = scrapedItem.VideoUrl.String
|
||||||
|
}
|
||||||
|
|
||||||
|
newScrapedItemJSON.Studio = studioName
|
||||||
|
updatedAt := jsonschema.RailsTime{ Time: scrapedItem.UpdatedAt.Timestamp } // TODO keeping ruby format
|
||||||
|
newScrapedItemJSON.UpdatedAt = updatedAt
|
||||||
|
|
||||||
|
t.Scraped = append(t.Scraped, newScrapedItemJSON)
|
||||||
|
}
|
||||||
|
|
||||||
|
scrapedJSON, err := instance.JSON.getScraped()
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scraped sites] error reading json: %s", err.Error())
|
||||||
|
}
|
||||||
|
if !jsonschema.CompareJSON(scrapedJSON, t.Scraped) {
|
||||||
|
if err := instance.JSON.saveScaped(t.Scraped); err != nil {
|
||||||
|
logger.Errorf("[scraped sites] failed to save json: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Infof("[scraped sites] export complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) getPerformerNames(performers []models.Performer) []string {
|
||||||
|
if len(performers) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var results []string
|
||||||
|
for _, performer := range performers {
|
||||||
|
if performer.Name.Valid {
|
||||||
|
results = append(results, performer.Name.String)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) getTagNames(tags []models.Tag) []string {
|
||||||
|
if len(tags) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var results []string
|
||||||
|
for _, tag := range tags {
|
||||||
|
if tag.Name != "" {
|
||||||
|
results = append(results, tag.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ExportTask) getDecimalString(num float64) string {
|
||||||
|
if num == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
precision := getPrecision(num)
|
||||||
|
if precision == 0 {
|
||||||
|
precision = 1
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%."+strconv.Itoa(precision)+"f", num)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPrecision(num float64) int {
|
||||||
|
if num == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
e := 1.0
|
||||||
|
p := 0
|
||||||
|
for (math.Round(num * e) / e) != num {
|
||||||
|
e *= 10
|
||||||
|
p += 1
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
626
internal/manager/task_import.go
Normal file
626
internal/manager/task_import.go
Normal file
|
|
@ -0,0 +1,626 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"github.com/stashapp/stash/internal/manager/jsonschema"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"strconv"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ImportTask struct {
|
||||||
|
Mappings *jsonschema.Mappings
|
||||||
|
Scraped []jsonschema.ScrapedItem
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) Start(wg *sync.WaitGroup) {
|
||||||
|
t.Mappings, _ = instance.JSON.getMappings()
|
||||||
|
if t.Mappings == nil {
|
||||||
|
panic("missing mappings json")
|
||||||
|
}
|
||||||
|
scraped, _ := instance.JSON.getScraped()
|
||||||
|
if scraped == nil {
|
||||||
|
logger.Warn("missing scraped json")
|
||||||
|
}
|
||||||
|
t.Scraped = scraped
|
||||||
|
|
||||||
|
database.Reset(instance.Paths.FixedPaths.DatabaseFile)
|
||||||
|
|
||||||
|
ctx := context.TODO()
|
||||||
|
|
||||||
|
t.ImportPerformers(ctx)
|
||||||
|
t.ImportStudios(ctx)
|
||||||
|
t.ImportGalleries(ctx)
|
||||||
|
t.ImportTags(ctx)
|
||||||
|
|
||||||
|
t.ImportScrapedItems(ctx)
|
||||||
|
t.ImportScenes(ctx)
|
||||||
|
|
||||||
|
wg.Done()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) ImportPerformers(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewPerformerQueryBuilder()
|
||||||
|
|
||||||
|
for i, mappingJSON := range t.Mappings.Performers {
|
||||||
|
index := i + 1
|
||||||
|
performerJSON, err := instance.JSON.getPerformer(mappingJSON.Checksum)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[performers] failed to read json: %s", err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || performerJSON == nil { return }
|
||||||
|
|
||||||
|
logger.Progressf("[performers] %d of %d", index, len(t.Mappings.Performers))
|
||||||
|
|
||||||
|
// Process the base 64 encoded image string
|
||||||
|
checksum, imageData, err := utils.ProcessBase64Image(performerJSON.Image)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[performers] <%s> invalid image: %s", mappingJSON.Checksum, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate a new performer from the input
|
||||||
|
currentTime := time.Now()
|
||||||
|
newPerformer := models.Performer{
|
||||||
|
Image: imageData,
|
||||||
|
Checksum: checksum,
|
||||||
|
Favorite: sql.NullBool{ Bool: performerJSON.Favorite, Valid: true },
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
if performerJSON.Name != "" {
|
||||||
|
newPerformer.Name = sql.NullString{ String: performerJSON.Name, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Url != "" {
|
||||||
|
newPerformer.Url = sql.NullString{ String: performerJSON.Url, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Birthdate != "" {
|
||||||
|
newPerformer.Birthdate = sql.NullString{ String: performerJSON.Birthdate, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Ethnicity != "" {
|
||||||
|
newPerformer.Ethnicity = sql.NullString{ String: performerJSON.Ethnicity, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Country != "" {
|
||||||
|
newPerformer.Country = sql.NullString{ String: performerJSON.Country, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.EyeColor != "" {
|
||||||
|
newPerformer.EyeColor = sql.NullString{ String: performerJSON.EyeColor, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Height != "" {
|
||||||
|
newPerformer.Height = sql.NullString{ String: performerJSON.Height, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Measurements != "" {
|
||||||
|
newPerformer.Measurements = sql.NullString{ String: performerJSON.Measurements, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.FakeTits != "" {
|
||||||
|
newPerformer.FakeTits = sql.NullString{ String: performerJSON.FakeTits, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.CareerLength != "" {
|
||||||
|
newPerformer.CareerLength = sql.NullString{ String: performerJSON.CareerLength, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Tattoos != "" {
|
||||||
|
newPerformer.Tattoos = sql.NullString{ String: performerJSON.Tattoos, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Piercings != "" {
|
||||||
|
newPerformer.Piercings = sql.NullString{ String: performerJSON.Piercings, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Aliases != "" {
|
||||||
|
newPerformer.Aliases = sql.NullString{ String: performerJSON.Aliases, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Twitter != "" {
|
||||||
|
newPerformer.Twitter = sql.NullString{ String: performerJSON.Twitter, Valid: true }
|
||||||
|
}
|
||||||
|
if performerJSON.Instagram != "" {
|
||||||
|
newPerformer.Instagram = sql.NullString{ String: performerJSON.Instagram, Valid: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = qb.Create(newPerformer, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[performers] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[performers] importing")
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
logger.Errorf("[performers] import failed to commit: %s", err.Error())
|
||||||
|
}
|
||||||
|
logger.Info("[performers] import complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) ImportStudios(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewStudioQueryBuilder()
|
||||||
|
|
||||||
|
for i, mappingJSON := range t.Mappings.Studios {
|
||||||
|
index := i + 1
|
||||||
|
studioJSON, err := instance.JSON.getStudio(mappingJSON.Checksum)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[studios] failed to read json: %s", err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if mappingJSON.Checksum == "" || mappingJSON.Name == "" || studioJSON == nil { return }
|
||||||
|
|
||||||
|
logger.Progressf("[studios] %d of %d", index, len(t.Mappings.Studios))
|
||||||
|
|
||||||
|
// Process the base 64 encoded image string
|
||||||
|
checksum, imageData, err := utils.ProcessBase64Image(studioJSON.Image)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[studios] <%s> invalid image: %s", mappingJSON.Checksum, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate a new studio from the input
|
||||||
|
currentTime := time.Now()
|
||||||
|
newStudio := models.Studio{
|
||||||
|
Image: imageData,
|
||||||
|
Checksum: checksum,
|
||||||
|
Name: sql.NullString{ String: studioJSON.Name, Valid: true },
|
||||||
|
Url: sql.NullString{ String: studioJSON.Url, Valid: true },
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = qb.Create(newStudio, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[studios] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[studios] importing")
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
logger.Errorf("[studios] import failed to commit: %s", err.Error())
|
||||||
|
}
|
||||||
|
logger.Info("[studios] import complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) ImportGalleries(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
|
||||||
|
for i, mappingJSON := range t.Mappings.Galleries {
|
||||||
|
index := i + 1
|
||||||
|
if mappingJSON.Checksum == "" || mappingJSON.Path == "" { return }
|
||||||
|
|
||||||
|
logger.Progressf("[galleries] %d of %d", index, len(t.Mappings.Galleries))
|
||||||
|
|
||||||
|
// Populate a new gallery from the input
|
||||||
|
currentTime := time.Now()
|
||||||
|
newGallery := models.Gallery{
|
||||||
|
Checksum: mappingJSON.Checksum,
|
||||||
|
Path: mappingJSON.Path,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := qb.Create(newGallery, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[galleries] <%s> failed to create: %s", mappingJSON.Checksum, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[galleries] importing")
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
logger.Errorf("[galleries] import failed to commit: %s", err.Error())
|
||||||
|
}
|
||||||
|
logger.Info("[galleries] import complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) ImportTags(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewTagQueryBuilder()
|
||||||
|
|
||||||
|
var tagNames []string
|
||||||
|
|
||||||
|
for i, mappingJSON := range t.Mappings.Scenes {
|
||||||
|
index := i + 1
|
||||||
|
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||||
|
logger.Warn("[tags] scene mapping without checksum or path: ", mappingJSON)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Progressf("[tags] %d of %d scenes", index, len(t.Mappings.Scenes))
|
||||||
|
|
||||||
|
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
||||||
|
if err != nil {
|
||||||
|
logger.Infof("[tags] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
// Return early if we are missing a json file.
|
||||||
|
if sceneJSON == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the tags from the tags json if we have it
|
||||||
|
if len(sceneJSON.Tags) > 0 {
|
||||||
|
tagNames = append(tagNames, sceneJSON.Tags...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the tags from the markers if we have marker json
|
||||||
|
if len(sceneJSON.Markers) == 0 { continue }
|
||||||
|
for _, markerJSON := range sceneJSON.Markers {
|
||||||
|
if markerJSON.PrimaryTag != "" {
|
||||||
|
tagNames = append(tagNames, markerJSON.PrimaryTag)
|
||||||
|
}
|
||||||
|
if len(markerJSON.Tags) > 0 {
|
||||||
|
tagNames = append(tagNames, markerJSON.Tags...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
uniqueTagNames := t.getUnique(tagNames)
|
||||||
|
for _, tagName := range uniqueTagNames {
|
||||||
|
currentTime := time.Now()
|
||||||
|
newTag := models.Tag{
|
||||||
|
Name: tagName,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := qb.Create(newTag, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[tags] <%s> failed to create: %s", tagName, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[tags] importing")
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
logger.Errorf("[tags] import failed to commit: %s", err.Error())
|
||||||
|
}
|
||||||
|
logger.Info("[tags] import complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) ImportScrapedItems(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewScrapedItemQueryBuilder()
|
||||||
|
sqb := models.NewStudioQueryBuilder()
|
||||||
|
currentTime := time.Now()
|
||||||
|
|
||||||
|
for i, mappingJSON := range t.Scraped {
|
||||||
|
index := i + 1
|
||||||
|
logger.Progressf("[scraped sites] %d of %d", index, len(t.Mappings.Scenes))
|
||||||
|
|
||||||
|
var updatedAt time.Time
|
||||||
|
if currentTime.Location() != nil {
|
||||||
|
updatedAt = mappingJSON.UpdatedAt.Time.In(currentTime.Location())
|
||||||
|
} else {
|
||||||
|
updatedAt = mappingJSON.UpdatedAt.Time
|
||||||
|
}
|
||||||
|
newScrapedItem := models.ScrapedItem{
|
||||||
|
Title: sql.NullString{String: mappingJSON.Title, Valid: true},
|
||||||
|
Description: sql.NullString{String: mappingJSON.Description, Valid: true},
|
||||||
|
Url: sql.NullString{String: mappingJSON.Url, Valid: true},
|
||||||
|
Date: sql.NullString{String: mappingJSON.Date, Valid: true},
|
||||||
|
Rating: sql.NullString{String: mappingJSON.Rating, Valid: true},
|
||||||
|
Tags: sql.NullString{String: mappingJSON.Tags, Valid: true},
|
||||||
|
Models: sql.NullString{String: mappingJSON.Models, Valid: true},
|
||||||
|
Episode: sql.NullInt64{Int64: int64(mappingJSON.Episode), Valid: true},
|
||||||
|
GalleryFilename: sql.NullString{String: mappingJSON.GalleryFilename, Valid: true},
|
||||||
|
GalleryUrl: sql.NullString{String: mappingJSON.GalleryUrl, Valid: true},
|
||||||
|
VideoFilename: sql.NullString{String: mappingJSON.VideoFilename, Valid: true},
|
||||||
|
VideoUrl: sql.NullString{String: mappingJSON.VideoUrl, Valid: true},
|
||||||
|
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{Timestamp: updatedAt},
|
||||||
|
}
|
||||||
|
|
||||||
|
studio, err := sqb.FindByName(mappingJSON.Studio, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scraped sites] failed to fetch studio: %s", err.Error())
|
||||||
|
}
|
||||||
|
if studio != nil {
|
||||||
|
newScrapedItem.StudioID = sql.NullInt64{Int64: int64(studio.ID), Valid: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = qb.Create(newScrapedItem, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scraped sites] <%s> failed to create: %s", newScrapedItem.Title, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[scraped sites] importing")
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
logger.Errorf("[scraped sites] import failed to commit: %s", err.Error())
|
||||||
|
}
|
||||||
|
logger.Info("[scraped sites] import complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) ImportScenes(ctx context.Context) {
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
jqb := models.NewJoinsQueryBuilder()
|
||||||
|
currentTime := time.Now()
|
||||||
|
|
||||||
|
for i, mappingJSON := range t.Mappings.Scenes {
|
||||||
|
index := i + 1
|
||||||
|
if mappingJSON.Checksum == "" || mappingJSON.Path == "" {
|
||||||
|
logger.Warn("[scenes] scene mapping without checksum or path: ", mappingJSON)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Progressf("[scenes] %d of %d", index, len(t.Mappings.Scenes))
|
||||||
|
|
||||||
|
newScene := models.Scene{
|
||||||
|
Checksum: mappingJSON.Checksum,
|
||||||
|
Path: mappingJSON.Path,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
sceneJSON, err := instance.JSON.getScene(mappingJSON.Checksum)
|
||||||
|
if err != nil {
|
||||||
|
logger.Infof("[scenes] <%s> json parse failure: %s", mappingJSON.Checksum, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate scene fields
|
||||||
|
if sceneJSON != nil {
|
||||||
|
if sceneJSON.Title != "" {
|
||||||
|
newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.Details != "" {
|
||||||
|
newScene.Details = sql.NullString{String: sceneJSON.Details, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.Url != "" {
|
||||||
|
newScene.Url = sql.NullString{String: sceneJSON.Url, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.Date != "" {
|
||||||
|
newScene.Date = sql.NullString{String: sceneJSON.Date, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.Rating != 0 {
|
||||||
|
newScene.Rating = sql.NullInt64{Int64: int64(sceneJSON.Rating), Valid: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
if sceneJSON.File != nil {
|
||||||
|
if sceneJSON.File.Size != "" {
|
||||||
|
newScene.Size = sql.NullString{String: sceneJSON.File.Size, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Duration != "" {
|
||||||
|
duration, _ := strconv.ParseFloat(sceneJSON.File.Duration, 64)
|
||||||
|
newScene.Duration = sql.NullFloat64{Float64: duration, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.VideoCodec != "" {
|
||||||
|
newScene.VideoCodec = sql.NullString{String: sceneJSON.File.VideoCodec, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.AudioCodec != "" {
|
||||||
|
newScene.AudioCodec = sql.NullString{String: sceneJSON.File.AudioCodec, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Width != 0 {
|
||||||
|
newScene.Width = sql.NullInt64{Int64: int64(sceneJSON.File.Width), Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Height != 0 {
|
||||||
|
newScene.Height = sql.NullInt64{Int64: int64(sceneJSON.File.Height), Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Framerate != "" {
|
||||||
|
framerate, _ := strconv.ParseFloat(sceneJSON.File.Framerate, 64)
|
||||||
|
newScene.Framerate = sql.NullFloat64{Float64: framerate, Valid: true}
|
||||||
|
}
|
||||||
|
if sceneJSON.File.Bitrate != 0 {
|
||||||
|
newScene.Bitrate = sql.NullInt64{Int64: int64(sceneJSON.File.Bitrate), Valid: true}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// TODO: Get FFMPEG data?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate the studio ID
|
||||||
|
if sceneJSON.Studio != "" {
|
||||||
|
sqb := models.NewStudioQueryBuilder()
|
||||||
|
studio, err := sqb.FindByName(sceneJSON.Studio, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("[scenes] studio <%s> does not exist: %s", sceneJSON.Studio, err)
|
||||||
|
} else {
|
||||||
|
newScene.StudioID = sql.NullInt64{ Int64: int64(studio.ID), Valid: true }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the scene in the DB
|
||||||
|
scene, err := qb.Create(newScene, tx)
|
||||||
|
if err != nil {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[scenes] <%s> failed to create: %s", scene.Checksum, err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if scene.ID == 0 {
|
||||||
|
_ = tx.Rollback()
|
||||||
|
logger.Errorf("[scenes] <%s> invalid id after scene creation", mappingJSON.Checksum)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relate the scene to the gallery
|
||||||
|
if sceneJSON.Gallery != "" {
|
||||||
|
gqb := models.NewGalleryQueryBuilder()
|
||||||
|
gallery, err := gqb.FindByChecksum(sceneJSON.Gallery, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("[scenes] gallery <%s> does not exist: %s", sceneJSON.Gallery, err)
|
||||||
|
} else {
|
||||||
|
gallery.SceneID = sql.NullInt64{ Int64: int64(scene.ID), Valid: true }
|
||||||
|
_, err := gqb.Update(*gallery, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> failed to update gallery: %s", scene.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relate the scene to the performers
|
||||||
|
if len(sceneJSON.Performers) > 0 {
|
||||||
|
performers, err := t.getPerformers(sceneJSON.Performers, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("[scenes] <%s> failed to fetch performers: %s", scene.Checksum, err)
|
||||||
|
} else {
|
||||||
|
var performerJoins []models.PerformersScenes
|
||||||
|
for _, performer := range performers {
|
||||||
|
join := models.PerformersScenes{
|
||||||
|
PerformerID: performer.ID,
|
||||||
|
SceneID: scene.ID,
|
||||||
|
}
|
||||||
|
performerJoins = append(performerJoins, join)
|
||||||
|
}
|
||||||
|
if err := jqb.CreatePerformersScenes(performerJoins, tx); err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> failed to associate performers: %s", scene.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relate the scene to the tags
|
||||||
|
if len(sceneJSON.Tags) > 0 {
|
||||||
|
tags, err := t.getTags(scene.Checksum, sceneJSON.Tags, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("[scenes] <%s> failed to fetch tags: %s", scene.Checksum, err)
|
||||||
|
} else {
|
||||||
|
var tagJoins []models.ScenesTags
|
||||||
|
for _, tag := range tags {
|
||||||
|
join := models.ScenesTags{
|
||||||
|
SceneID: scene.ID,
|
||||||
|
TagID: tag.ID,
|
||||||
|
}
|
||||||
|
tagJoins = append(tagJoins, join)
|
||||||
|
}
|
||||||
|
if err := jqb.CreateScenesTags(tagJoins, tx); err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> failed to associate tags: %s", scene.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relate the scene to the scene markers
|
||||||
|
if len(sceneJSON.Markers) > 0 {
|
||||||
|
smqb := models.NewSceneMarkerQueryBuilder()
|
||||||
|
tqb := models.NewTagQueryBuilder()
|
||||||
|
for _, marker := range sceneJSON.Markers {
|
||||||
|
seconds, _ := strconv.ParseFloat(marker.Seconds, 64)
|
||||||
|
newSceneMarker := models.SceneMarker{
|
||||||
|
Title: marker.Title,
|
||||||
|
Seconds: seconds,
|
||||||
|
SceneID: sql.NullInt64{Int64: int64(scene.ID), Valid: true},
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
|
||||||
|
primaryTag, err := tqb.FindByName(marker.PrimaryTag, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> failed to find primary tag for marker: %s", scene.Checksum, err.Error())
|
||||||
|
} else {
|
||||||
|
newSceneMarker.PrimaryTagID = sql.NullInt64{Int64: int64(primaryTag.ID), Valid: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the scene marker in the DB
|
||||||
|
sceneMarker, err := smqb.Create(newSceneMarker, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warnf("[scenes] <%s> failed to create scene marker: %s", scene.Checksum, err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if sceneMarker.ID == 0 {
|
||||||
|
logger.Warnf("[scenes] <%s> invalid scene marker id after scene marker creation", scene.Checksum)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the scene marker tags and create the joins
|
||||||
|
tags, err := t.getTags(scene.Checksum, marker.Tags, tx)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("[scenes] <%s> failed to fetch scene marker tags: %s", scene.Checksum, err)
|
||||||
|
} else {
|
||||||
|
var tagJoins []models.SceneMarkersTags
|
||||||
|
for _, tag := range tags {
|
||||||
|
join := models.SceneMarkersTags{
|
||||||
|
SceneMarkerID: sceneMarker.ID,
|
||||||
|
TagID: tag.ID,
|
||||||
|
}
|
||||||
|
tagJoins = append(tagJoins, join)
|
||||||
|
}
|
||||||
|
if err := jqb.CreateSceneMarkersTags(tagJoins, tx); err != nil {
|
||||||
|
logger.Errorf("[scenes] <%s> failed to associate scene marker tags: %s", scene.Checksum, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("[scenes] importing")
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
logger.Errorf("[scenes] import failed to commit: %s", err.Error())
|
||||||
|
}
|
||||||
|
logger.Info("[scenes] import complete")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) getPerformers(names []string, tx *sqlx.Tx) ([]models.Performer, error) {
|
||||||
|
pqb := models.NewPerformerQueryBuilder()
|
||||||
|
performers, err := pqb.FindByNames(names, tx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var pluckedNames []string
|
||||||
|
for _, performer := range performers {
|
||||||
|
if !performer.Name.Valid {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pluckedNames = append(pluckedNames, performer.Name.String)
|
||||||
|
}
|
||||||
|
|
||||||
|
missingPerformers := utils.StrFilter(names, func(name string) bool {
|
||||||
|
return !utils.StrInclude(pluckedNames, name)
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, missingPerformer := range missingPerformers {
|
||||||
|
logger.Warnf("[scenes] performer %s does not exist", missingPerformer)
|
||||||
|
}
|
||||||
|
|
||||||
|
return performers, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ImportTask) getTags(sceneChecksum string, names []string, tx *sqlx.Tx) ([]models.Tag, error) {
|
||||||
|
tqb := models.NewTagQueryBuilder()
|
||||||
|
tags, err := tqb.FindByNames(names, tx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var pluckedNames []string
|
||||||
|
for _, tag := range tags {
|
||||||
|
if tag.Name == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pluckedNames = append(pluckedNames, tag.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
missingTags := utils.StrFilter(names, func(name string) bool {
|
||||||
|
return !utils.StrInclude(pluckedNames, name)
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, missingTag := range missingTags {
|
||||||
|
logger.Warnf("[scenes] <%s> tag %s does not exist", sceneChecksum, missingTag)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tags, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://www.reddit.com/r/golang/comments/5ia523/idiomatic_way_to_remove_duplicates_in_a_slice/db6qa2e
|
||||||
|
func (t *ImportTask) getUnique(s []string) []string {
|
||||||
|
seen := make(map[string]struct{}, len(s))
|
||||||
|
j := 0
|
||||||
|
for _, v := range s {
|
||||||
|
if _, ok := seen[v]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[v] = struct{}{}
|
||||||
|
s[j] = v
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
return s[:j]
|
||||||
|
}
|
||||||
164
internal/manager/task_scan.go
Normal file
164
internal/manager/task_scan.go
Normal file
|
|
@ -0,0 +1,164 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"github.com/stashapp/stash/internal/ffmpeg"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"github.com/stashapp/stash/internal/models"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ScanTask struct {
|
||||||
|
FilePath string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ScanTask) Start(wg *sync.WaitGroup) {
|
||||||
|
if filepath.Ext(t.FilePath) == ".zip" {
|
||||||
|
t.scanGallery()
|
||||||
|
} else {
|
||||||
|
t.scanScene()
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Done()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ScanTask) scanGallery() {
|
||||||
|
qb := models.NewGalleryQueryBuilder()
|
||||||
|
gallery, _ := qb.FindByPath(t.FilePath)
|
||||||
|
if gallery != nil {
|
||||||
|
// We already have this item in the database, keep going
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
checksum, err := t.calculateChecksum()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.TODO()
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
gallery, _ = qb.FindByChecksum(checksum, tx)
|
||||||
|
if gallery != nil {
|
||||||
|
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||||
|
gallery.Path = t.FilePath
|
||||||
|
_, err = qb.Update(*gallery, tx)
|
||||||
|
} else {
|
||||||
|
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||||
|
currentTime := time.Now()
|
||||||
|
newGallery := models.Gallery{
|
||||||
|
Checksum: checksum,
|
||||||
|
Path: t.FilePath,
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
_, err = qb.Create(newGallery, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
_ = tx.Rollback()
|
||||||
|
} else if err := tx.Commit(); err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ScanTask) scanScene() {
|
||||||
|
ffprobe := ffmpeg.NewFFProbe(instance.Paths.FixedPaths.FFProbe)
|
||||||
|
ffprobeResult, err := ffprobe.ProbeVideo(t.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
qb := models.NewSceneQueryBuilder()
|
||||||
|
scene, _ := qb.FindByPath(t.FilePath)
|
||||||
|
if scene != nil {
|
||||||
|
// We already have this item in the database, keep going
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
checksum, err := t.calculateChecksum()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
t.makeScreenshots(*ffprobeResult, checksum)
|
||||||
|
|
||||||
|
scene, _ = qb.FindByChecksum(checksum)
|
||||||
|
ctx := context.TODO()
|
||||||
|
tx := database.DB.MustBeginTx(ctx, nil)
|
||||||
|
if scene != nil {
|
||||||
|
logger.Infof("%s already exists. Updating path...", t.FilePath)
|
||||||
|
scene.Path = t.FilePath
|
||||||
|
_, err = qb.Update(*scene, tx)
|
||||||
|
} else {
|
||||||
|
logger.Infof("%s doesn't exist. Creating new item...", t.FilePath)
|
||||||
|
currentTime := time.Now()
|
||||||
|
newScene := models.Scene{
|
||||||
|
Checksum: checksum,
|
||||||
|
Path: t.FilePath,
|
||||||
|
Duration: sql.NullFloat64{Float64: ffprobeResult.Duration, Valid: true },
|
||||||
|
VideoCodec: sql.NullString{ String: ffprobeResult.VideoCodec, Valid: true},
|
||||||
|
AudioCodec: sql.NullString{ String: ffprobeResult.AudioCodec, Valid: true},
|
||||||
|
Width: sql.NullInt64{ Int64: int64(ffprobeResult.Width), Valid: true },
|
||||||
|
Height: sql.NullInt64{ Int64: int64(ffprobeResult.Height), Valid: true },
|
||||||
|
Framerate: sql.NullFloat64{ Float64: ffprobeResult.FrameRate, Valid: true },
|
||||||
|
Bitrate: sql.NullInt64{ Int64: ffprobeResult.Bitrate, Valid: true },
|
||||||
|
Size: sql.NullString{ String: strconv.Itoa(int(ffprobeResult.Size)), Valid: true },
|
||||||
|
CreatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
UpdatedAt: models.SQLiteTimestamp{ Timestamp: currentTime },
|
||||||
|
}
|
||||||
|
_, err = qb.Create(newScene, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
_ = tx.Rollback()
|
||||||
|
} else if err := tx.Commit(); err != nil {
|
||||||
|
logger.Error(err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ScanTask) makeScreenshots(probeResult ffmpeg.FFProbeResult, checksum string) {
|
||||||
|
thumbPath := instance.Paths.Scene.GetThumbnailScreenshotPath(checksum)
|
||||||
|
normalPath := instance.Paths.Scene.GetScreenshotPath(checksum)
|
||||||
|
|
||||||
|
thumbExists, _ := utils.FileExists(thumbPath)
|
||||||
|
normalExists, _ := utils.FileExists(normalPath)
|
||||||
|
if thumbExists && normalExists {
|
||||||
|
logger.Debug("Screenshots already exist for this path... skipping")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
t.makeScreenshot(probeResult, thumbPath, 5, 320)
|
||||||
|
t.makeScreenshot(probeResult, normalPath, 2, probeResult.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ScanTask) makeScreenshot(probeResult ffmpeg.FFProbeResult, outputPath string, quality int, width int) {
|
||||||
|
encoder := ffmpeg.NewEncoder(instance.Paths.FixedPaths.FFMPEG)
|
||||||
|
options := ffmpeg.ScreenshotOptions{
|
||||||
|
OutputPath: outputPath,
|
||||||
|
Quality: quality,
|
||||||
|
Time: float64(probeResult.Duration) * 0.2,
|
||||||
|
Width: width,
|
||||||
|
}
|
||||||
|
encoder.Screenshot(probeResult, options)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *ScanTask) calculateChecksum() (string, error) {
|
||||||
|
logger.Infof("%s not found. Calculating checksum...", t.FilePath)
|
||||||
|
checksum, err := utils.MD5FromFilePath(t.FilePath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
logger.Debugf("Checksum calculated: %s", checksum)
|
||||||
|
return checksum, nil
|
||||||
|
}
|
||||||
17
internal/manager/utils.go
Normal file
17
internal/manager/utils.go
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
package manager
|
||||||
|
|
||||||
|
import "github.com/stashapp/stash/internal/utils"
|
||||||
|
|
||||||
|
func IsStreamable(videoPath string, checksum string) (bool, error) {
|
||||||
|
fileType, err := utils.FileType(videoPath)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if fileType.MIME.Value == "video/quicktime" || fileType.MIME.Value == "video/mp4" || fileType.MIME.Value == "video/webm" || fileType.MIME.Value == "video/x-m4v" {
|
||||||
|
return true, nil
|
||||||
|
} else {
|
||||||
|
transcodePath := instance.Paths.Scene.GetTranscodePath(checksum)
|
||||||
|
return utils.FileExists(transcodePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
25
internal/models/extension_find_filter.go
Normal file
25
internal/models/extension_find_filter.go
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
func (ff FindFilterType) GetSort(defaultSort string) string {
|
||||||
|
var sort string
|
||||||
|
if ff.Sort == nil {
|
||||||
|
sort = defaultSort
|
||||||
|
} else {
|
||||||
|
sort = *ff.Sort
|
||||||
|
}
|
||||||
|
return sort
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ff FindFilterType) GetDirection() string {
|
||||||
|
var direction string
|
||||||
|
if directionFilter := ff.Direction; directionFilter != nil {
|
||||||
|
if dir := directionFilter.String(); directionFilter.IsValid() {
|
||||||
|
direction = dir
|
||||||
|
} else {
|
||||||
|
direction = "ASC"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
direction = "ASC"
|
||||||
|
}
|
||||||
|
return direction
|
||||||
|
}
|
||||||
11893
internal/models/generated_exec.go
Normal file
11893
internal/models/generated_exec.go
Normal file
File diff suppressed because it is too large
Load diff
334
internal/models/generated_models.go
Normal file
334
internal/models/generated_models.go
Normal file
|
|
@ -0,0 +1,334 @@
|
||||||
|
// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
|
||||||
|
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type FindFilterType struct {
|
||||||
|
Q *string `json:"q"`
|
||||||
|
Page *int `json:"page"`
|
||||||
|
PerPage *int `json:"per_page"`
|
||||||
|
Sort *string `json:"sort"`
|
||||||
|
Direction *SortDirectionEnum `json:"direction"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FindGalleriesResultType struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
Galleries []Gallery `json:"galleries"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FindPerformersResultType struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
Performers []Performer `json:"performers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FindSceneMarkersResultType struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
SceneMarkers []SceneMarker `json:"scene_markers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FindScenesResultType struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
Scenes []Scene `json:"scenes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FindStudiosResultType struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
Studios []Studio `json:"studios"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GalleryFilesType struct {
|
||||||
|
Index int `json:"index"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Path *string `json:"path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type MarkerStringsResultType struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PerformerCreateInput struct {
|
||||||
|
Name *string `json:"name"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
Birthdate *string `json:"birthdate"`
|
||||||
|
Ethnicity *string `json:"ethnicity"`
|
||||||
|
Country *string `json:"country"`
|
||||||
|
EyeColor *string `json:"eye_color"`
|
||||||
|
Height *string `json:"height"`
|
||||||
|
Measurements *string `json:"measurements"`
|
||||||
|
FakeTits *string `json:"fake_tits"`
|
||||||
|
CareerLength *string `json:"career_length"`
|
||||||
|
Tattoos *string `json:"tattoos"`
|
||||||
|
Piercings *string `json:"piercings"`
|
||||||
|
Aliases *string `json:"aliases"`
|
||||||
|
Twitter *string `json:"twitter"`
|
||||||
|
Instagram *string `json:"instagram"`
|
||||||
|
Favorite *bool `json:"favorite"`
|
||||||
|
// This should be base64 encoded
|
||||||
|
Image string `json:"image"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PerformerFilterType struct {
|
||||||
|
// Filter by favorite
|
||||||
|
FilterFavorites *bool `json:"filter_favorites"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PerformerUpdateInput struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
Birthdate *string `json:"birthdate"`
|
||||||
|
Ethnicity *string `json:"ethnicity"`
|
||||||
|
Country *string `json:"country"`
|
||||||
|
EyeColor *string `json:"eye_color"`
|
||||||
|
Height *string `json:"height"`
|
||||||
|
Measurements *string `json:"measurements"`
|
||||||
|
FakeTits *string `json:"fake_tits"`
|
||||||
|
CareerLength *string `json:"career_length"`
|
||||||
|
Tattoos *string `json:"tattoos"`
|
||||||
|
Piercings *string `json:"piercings"`
|
||||||
|
Aliases *string `json:"aliases"`
|
||||||
|
Twitter *string `json:"twitter"`
|
||||||
|
Instagram *string `json:"instagram"`
|
||||||
|
Favorite *bool `json:"favorite"`
|
||||||
|
// This should be base64 encoded
|
||||||
|
Image *string `json:"image"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneFileType struct {
|
||||||
|
Size *string `json:"size"`
|
||||||
|
Duration *float64 `json:"duration"`
|
||||||
|
VideoCodec *string `json:"video_codec"`
|
||||||
|
AudioCodec *string `json:"audio_codec"`
|
||||||
|
Width *int `json:"width"`
|
||||||
|
Height *int `json:"height"`
|
||||||
|
Framerate *float64 `json:"framerate"`
|
||||||
|
Bitrate *int `json:"bitrate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneFilterType struct {
|
||||||
|
// Filter by rating
|
||||||
|
Rating *int `json:"rating"`
|
||||||
|
// Filter by resolution
|
||||||
|
Resolution *ResolutionEnum `json:"resolution"`
|
||||||
|
// Filter to only include scenes which have markers. `true` or `false`
|
||||||
|
HasMarkers *string `json:"has_markers"`
|
||||||
|
// Filter to only include scenes missing this property
|
||||||
|
IsMissing *string `json:"is_missing"`
|
||||||
|
// Filter to only include scenes with this studio
|
||||||
|
StudioID *string `json:"studio_id"`
|
||||||
|
// Filter to only include scenes with these tags
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
// Filter to only include scenes with this performer
|
||||||
|
PerformerID *string `json:"performer_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneMarkerCreateInput struct {
|
||||||
|
Title string `json:"title"`
|
||||||
|
Seconds float64 `json:"seconds"`
|
||||||
|
SceneID string `json:"scene_id"`
|
||||||
|
PrimaryTagID string `json:"primary_tag_id"`
|
||||||
|
TagIds []string `json:"tag_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneMarkerFilterType struct {
|
||||||
|
// Filter to only include scene markers with this tag
|
||||||
|
TagID *string `json:"tag_id"`
|
||||||
|
// Filter to only include scene markers with these tags
|
||||||
|
Tags []string `json:"tags"`
|
||||||
|
// Filter to only include scene markers attached to a scene with these tags
|
||||||
|
SceneTags []string `json:"scene_tags"`
|
||||||
|
// Filter to only include scene markers with these performers
|
||||||
|
Performers []string `json:"performers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneMarkerTag struct {
|
||||||
|
Tag Tag `json:"tag"`
|
||||||
|
SceneMarkers []SceneMarker `json:"scene_markers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneMarkerUpdateInput struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Seconds float64 `json:"seconds"`
|
||||||
|
SceneID string `json:"scene_id"`
|
||||||
|
PrimaryTagID string `json:"primary_tag_id"`
|
||||||
|
TagIds []string `json:"tag_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScenePathsType struct {
|
||||||
|
Screenshot *string `json:"screenshot"`
|
||||||
|
Preview *string `json:"preview"`
|
||||||
|
Stream *string `json:"stream"`
|
||||||
|
Webp *string `json:"webp"`
|
||||||
|
Vtt *string `json:"vtt"`
|
||||||
|
ChaptersVtt *string `json:"chapters_vtt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneUpdateInput struct {
|
||||||
|
ClientMutationID *string `json:"clientMutationId"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
Title *string `json:"title"`
|
||||||
|
Details *string `json:"details"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
Date *string `json:"date"`
|
||||||
|
Rating *int `json:"rating"`
|
||||||
|
StudioID *string `json:"studio_id"`
|
||||||
|
GalleryID *string `json:"gallery_id"`
|
||||||
|
PerformerIds []string `json:"performer_ids"`
|
||||||
|
TagIds []string `json:"tag_ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// A performer from a scraping operation...
|
||||||
|
type ScrapedPerformer struct {
|
||||||
|
Name *string `json:"name"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
Twitter *string `json:"twitter"`
|
||||||
|
Instagram *string `json:"instagram"`
|
||||||
|
Birthdate *string `json:"birthdate"`
|
||||||
|
Ethnicity *string `json:"ethnicity"`
|
||||||
|
Country *string `json:"country"`
|
||||||
|
EyeColor *string `json:"eye_color"`
|
||||||
|
Height *string `json:"height"`
|
||||||
|
Measurements *string `json:"measurements"`
|
||||||
|
FakeTits *string `json:"fake_tits"`
|
||||||
|
CareerLength *string `json:"career_length"`
|
||||||
|
Tattoos *string `json:"tattoos"`
|
||||||
|
Piercings *string `json:"piercings"`
|
||||||
|
Aliases *string `json:"aliases"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StatsResultType struct {
|
||||||
|
SceneCount int `json:"scene_count"`
|
||||||
|
GalleryCount int `json:"gallery_count"`
|
||||||
|
PerformerCount int `json:"performer_count"`
|
||||||
|
StudioCount int `json:"studio_count"`
|
||||||
|
TagCount int `json:"tag_count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StudioCreateInput struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
// This should be base64 encoded
|
||||||
|
Image string `json:"image"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StudioUpdateInput struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
URL *string `json:"url"`
|
||||||
|
// This should be base64 encoded
|
||||||
|
Image *string `json:"image"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TagCreateInput struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TagDestroyInput struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TagUpdateInput struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ResolutionEnum string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// 240p
|
||||||
|
ResolutionEnumLow ResolutionEnum = "LOW"
|
||||||
|
// 480p
|
||||||
|
ResolutionEnumStandard ResolutionEnum = "STANDARD"
|
||||||
|
// 720p
|
||||||
|
ResolutionEnumStandardHd ResolutionEnum = "STANDARD_HD"
|
||||||
|
// 1080p
|
||||||
|
ResolutionEnumFullHd ResolutionEnum = "FULL_HD"
|
||||||
|
// 4k
|
||||||
|
ResolutionEnumFourK ResolutionEnum = "FOUR_K"
|
||||||
|
)
|
||||||
|
|
||||||
|
var AllResolutionEnum = []ResolutionEnum{
|
||||||
|
ResolutionEnumLow,
|
||||||
|
ResolutionEnumStandard,
|
||||||
|
ResolutionEnumStandardHd,
|
||||||
|
ResolutionEnumFullHd,
|
||||||
|
ResolutionEnumFourK,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ResolutionEnum) IsValid() bool {
|
||||||
|
switch e {
|
||||||
|
case ResolutionEnumLow, ResolutionEnumStandard, ResolutionEnumStandardHd, ResolutionEnumFullHd, ResolutionEnumFourK:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ResolutionEnum) String() string {
|
||||||
|
return string(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ResolutionEnum) UnmarshalGQL(v interface{}) error {
|
||||||
|
str, ok := v.(string)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("enums must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
*e = ResolutionEnum(str)
|
||||||
|
if !e.IsValid() {
|
||||||
|
return fmt.Errorf("%s is not a valid ResolutionEnum", str)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e ResolutionEnum) MarshalGQL(w io.Writer) {
|
||||||
|
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||||
|
}
|
||||||
|
|
||||||
|
type SortDirectionEnum string
|
||||||
|
|
||||||
|
const (
|
||||||
|
SortDirectionEnumAsc SortDirectionEnum = "ASC"
|
||||||
|
SortDirectionEnumDesc SortDirectionEnum = "DESC"
|
||||||
|
)
|
||||||
|
|
||||||
|
var AllSortDirectionEnum = []SortDirectionEnum{
|
||||||
|
SortDirectionEnumAsc,
|
||||||
|
SortDirectionEnumDesc,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e SortDirectionEnum) IsValid() bool {
|
||||||
|
switch e {
|
||||||
|
case SortDirectionEnumAsc, SortDirectionEnumDesc:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e SortDirectionEnum) String() string {
|
||||||
|
return string(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *SortDirectionEnum) UnmarshalGQL(v interface{}) error {
|
||||||
|
str, ok := v.(string)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("enums must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
*e = SortDirectionEnum(str)
|
||||||
|
if !e.IsValid() {
|
||||||
|
return fmt.Errorf("%s is not a valid SortDirectionEnum", str)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e SortDirectionEnum) MarshalGQL(w io.Writer) {
|
||||||
|
fmt.Fprint(w, strconv.Quote(e.String()))
|
||||||
|
}
|
||||||
116
internal/models/model_gallery.go
Normal file
116
internal/models/model_gallery.go
Normal file
|
|
@ -0,0 +1,116 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bytes"
|
||||||
|
"database/sql"
|
||||||
|
"github.com/disintegration/imaging"
|
||||||
|
"github.com/stashapp/stash/internal/api/urlbuilders"
|
||||||
|
"github.com/stashapp/stash/internal/logger"
|
||||||
|
"github.com/stashapp/stash/internal/utils"
|
||||||
|
"image"
|
||||||
|
"image/jpeg"
|
||||||
|
"io/ioutil"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Gallery struct {
|
||||||
|
ID int `db:"id" json:"id"`
|
||||||
|
Path string `db:"path" json:"path"`
|
||||||
|
Checksum string `db:"checksum" json:"checksum"`
|
||||||
|
SceneID sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||||
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) GetFiles(baseUrl string) []GalleryFilesType {
|
||||||
|
var galleryFiles []GalleryFilesType
|
||||||
|
filteredFiles, readCloser, err := g.listZipContents()
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
defer readCloser.Close()
|
||||||
|
|
||||||
|
builder := urlbuilders.NewGalleryURLBuilder(baseUrl, g.ID)
|
||||||
|
for i, file := range filteredFiles {
|
||||||
|
galleryURL := builder.GetGalleryImageUrl(i)
|
||||||
|
galleryFile := GalleryFilesType{
|
||||||
|
Index: i,
|
||||||
|
Name: &file.Name,
|
||||||
|
Path: &galleryURL,
|
||||||
|
}
|
||||||
|
galleryFiles = append(galleryFiles, galleryFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
return galleryFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) GetImage(index int) []byte {
|
||||||
|
data, _ := g.readZipFile(index)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) GetThumbnail(index int) []byte {
|
||||||
|
data, _ := g.readZipFile(index)
|
||||||
|
srcImage, _, err := image.Decode(bytes.NewReader(data))
|
||||||
|
if err != nil {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
resizedImage := imaging.Resize(srcImage, 512, 0, imaging.Lanczos)
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
err = jpeg.Encode(buf, resizedImage, nil)
|
||||||
|
if err != nil {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) readZipFile(index int) ([]byte, error) {
|
||||||
|
filteredFiles, readCloser, err := g.listZipContents()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer readCloser.Close()
|
||||||
|
|
||||||
|
zipFile := filteredFiles[index]
|
||||||
|
zipFileReadCloser, err := zipFile.Open()
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("failed to read file inside zip file")
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer zipFileReadCloser.Close()
|
||||||
|
|
||||||
|
return ioutil.ReadAll(zipFileReadCloser)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *Gallery) listZipContents() ([]*zip.File, *zip.ReadCloser, error) {
|
||||||
|
readCloser, err := zip.OpenReader(g.Path)
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("failed to read zip file")
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
filteredFiles := make([]*zip.File, 0)
|
||||||
|
for _, file := range readCloser.File {
|
||||||
|
if file.FileInfo().IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
ext := filepath.Ext(file.Name)
|
||||||
|
if ext != ".jpg" && ext != ".png" && ext != ".gif" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.Contains(file.Name, "__MACOSX") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filteredFiles = append(filteredFiles, file)
|
||||||
|
}
|
||||||
|
sort.Slice(filteredFiles, func(i, j int) bool {
|
||||||
|
a := filteredFiles[i]
|
||||||
|
b := filteredFiles[j]
|
||||||
|
return utils.NaturalCompare(a.Name, b.Name)
|
||||||
|
})
|
||||||
|
|
||||||
|
return filteredFiles, readCloser, nil
|
||||||
|
}
|
||||||
16
internal/models/model_joins.go
Normal file
16
internal/models/model_joins.go
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
type PerformersScenes struct {
|
||||||
|
PerformerID int `db:"performer_id" json:"performer_id"`
|
||||||
|
SceneID int `db:"scene_id" json:"scene_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ScenesTags struct {
|
||||||
|
SceneID int `db:"scene_id" json:"scene_id"`
|
||||||
|
TagID int `db:"tag_id" json:"tag_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SceneMarkersTags struct {
|
||||||
|
SceneMarkerID int `db:"scene_marker_id" json:"scene_marker_id"`
|
||||||
|
TagID int `db:"tag_id" json:"tag_id"`
|
||||||
|
}
|
||||||
29
internal/models/model_performer.go
Normal file
29
internal/models/model_performer.go
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Performer struct {
|
||||||
|
ID int `db:"id" json:"id"`
|
||||||
|
Image []byte `db:"image" json:"image"`
|
||||||
|
Checksum string `db:"checksum" json:"checksum"`
|
||||||
|
Name sql.NullString `db:"name" json:"name"`
|
||||||
|
Url sql.NullString `db:"url" json:"url"`
|
||||||
|
Twitter sql.NullString `db:"twitter" json:"twitter"`
|
||||||
|
Instagram sql.NullString `db:"instagram" json:"instagram"`
|
||||||
|
Birthdate sql.NullString `db:"birthdate" json:"birthdate"` // TODO dates?
|
||||||
|
Ethnicity sql.NullString `db:"ethnicity" json:"ethnicity"`
|
||||||
|
Country sql.NullString `db:"country" json:"country"`
|
||||||
|
EyeColor sql.NullString `db:"eye_color" json:"eye_color"`
|
||||||
|
Height sql.NullString `db:"height" json:"height"`
|
||||||
|
Measurements sql.NullString `db:"measurements" json:"measurements"`
|
||||||
|
FakeTits sql.NullString `db:"fake_tits" json:"fake_tits"`
|
||||||
|
CareerLength sql.NullString `db:"career_length" json:"career_length"`
|
||||||
|
Tattoos sql.NullString `db:"tattoos" json:"tattoos"`
|
||||||
|
Piercings sql.NullString `db:"piercings" json:"piercings"`
|
||||||
|
Aliases sql.NullString `db:"aliases" json:"aliases"`
|
||||||
|
Favorite sql.NullBool `db:"favorite" json:"favorite"`
|
||||||
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
27
internal/models/model_scene.go
Normal file
27
internal/models/model_scene.go
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Scene struct {
|
||||||
|
ID int `db:"id" json:"id"`
|
||||||
|
Checksum string `db:"checksum" json:"checksum"`
|
||||||
|
Path string `db:"path" json:"path"`
|
||||||
|
Title sql.NullString `db:"title" json:"title"`
|
||||||
|
Details sql.NullString `db:"details" json:"details"`
|
||||||
|
Url sql.NullString `db:"url" json:"url"`
|
||||||
|
Date sql.NullString `db:"date" json:"date"` // TODO dates?
|
||||||
|
Rating sql.NullInt64 `db:"rating" json:"rating"`
|
||||||
|
Size sql.NullString `db:"size" json:"size"`
|
||||||
|
Duration sql.NullFloat64 `db:"duration" json:"duration"`
|
||||||
|
VideoCodec sql.NullString `db:"video_codec" json:"video_codec"`
|
||||||
|
AudioCodec sql.NullString `db:"audio_codec" json:"audio_codec"`
|
||||||
|
Width sql.NullInt64 `db:"width" json:"width"`
|
||||||
|
Height sql.NullInt64 `db:"height" json:"height"`
|
||||||
|
Framerate sql.NullFloat64 `db:"framerate" json:"framerate"`
|
||||||
|
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
|
||||||
|
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||||
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
15
internal/models/model_scene_marker.go
Normal file
15
internal/models/model_scene_marker.go
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SceneMarker struct {
|
||||||
|
ID int `db:"id" json:"id"`
|
||||||
|
Title string `db:"title" json:"title"`
|
||||||
|
Seconds float64 `db:"seconds" json:"seconds"`
|
||||||
|
PrimaryTagID sql.NullInt64 `db:"primary_tag_id,omitempty" json:"primary_tag_id"`
|
||||||
|
SceneID sql.NullInt64 `db:"scene_id,omitempty" json:"scene_id"`
|
||||||
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
24
internal/models/model_scraped_item.go
Normal file
24
internal/models/model_scraped_item.go
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ScrapedItem struct {
|
||||||
|
ID int `db:"id" json:"id"`
|
||||||
|
Title sql.NullString `db:"title" json:"title"`
|
||||||
|
Description sql.NullString `db:"description" json:"description"`
|
||||||
|
Url sql.NullString `db:"url" json:"url"`
|
||||||
|
Date sql.NullString `db:"date" json:"date"` // TODO dates?
|
||||||
|
Rating sql.NullString `db:"rating" json:"rating"`
|
||||||
|
Tags sql.NullString `db:"tags" json:"tags"`
|
||||||
|
Models sql.NullString `db:"models" json:"models"`
|
||||||
|
Episode sql.NullInt64 `db:"episode" json:"episode"`
|
||||||
|
GalleryFilename sql.NullString `db:"gallery_filename" json:"gallery_filename"`
|
||||||
|
GalleryUrl sql.NullString `db:"gallery_url" json:"gallery_url"`
|
||||||
|
VideoFilename sql.NullString `db:"video_filename" json:"video_filename"`
|
||||||
|
VideoUrl sql.NullString `db:"video_url" json:"video_url"`
|
||||||
|
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
|
||||||
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
15
internal/models/model_studio.go
Normal file
15
internal/models/model_studio.go
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Studio struct {
|
||||||
|
ID int `db:"id" json:"id"`
|
||||||
|
Image []byte `db:"image" json:"image"`
|
||||||
|
Checksum string `db:"checksum" json:"checksum"`
|
||||||
|
Name sql.NullString `db:"name" json:"name"`
|
||||||
|
Url sql.NullString `db:"url" json:"url"`
|
||||||
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
8
internal/models/model_tag.go
Normal file
8
internal/models/model_tag.go
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
type Tag struct {
|
||||||
|
ID int `db:"id" json:"id"`
|
||||||
|
Name string `db:"name" json:"name"` // TODO make schema not null
|
||||||
|
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
|
||||||
|
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
|
||||||
|
}
|
||||||
163
internal/models/querybuilder_gallery.go
Normal file
163
internal/models/querybuilder_gallery.go
Normal file
|
|
@ -0,0 +1,163 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type galleryQueryBuilder struct {}
|
||||||
|
|
||||||
|
func NewGalleryQueryBuilder() galleryQueryBuilder {
|
||||||
|
return galleryQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) Create(newGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
result, err := tx.NamedExec(
|
||||||
|
`INSERT INTO galleries (path, checksum, scene_id, created_at, updated_at)
|
||||||
|
VALUES (:path, :checksum, :scene_id, :created_at, :updated_at)
|
||||||
|
`,
|
||||||
|
newGallery,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
galleryID, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := tx.Get(&newGallery, `SELECT * FROM galleries WHERE id = ? LIMIT 1`, galleryID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &newGallery, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) Update(updatedGallery Gallery, tx *sqlx.Tx) (*Gallery, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE galleries SET `+SqlGenKeys(updatedGallery)+` WHERE galleries.id = :id`,
|
||||||
|
updatedGallery,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&updatedGallery, `SELECT * FROM galleries WHERE id = ? LIMIT 1`, updatedGallery.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &updatedGallery, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) Find(id int) (*Gallery, error) {
|
||||||
|
query := "SELECT * FROM galleries WHERE id = ? LIMIT 1"
|
||||||
|
args := []interface{}{id}
|
||||||
|
return qb.queryGallery(query, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) FindByChecksum(checksum string, tx *sqlx.Tx) (*Gallery, error) {
|
||||||
|
query := "SELECT * FROM galleries WHERE checksum = ? LIMIT 1"
|
||||||
|
args := []interface{}{checksum}
|
||||||
|
return qb.queryGallery(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) FindByPath(path string) (*Gallery, error) {
|
||||||
|
query := "SELECT * FROM galleries WHERE path = ? LIMIT 1"
|
||||||
|
args := []interface{}{path}
|
||||||
|
return qb.queryGallery(query, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) (*Gallery, error) {
|
||||||
|
query := "SELECT galleries.* FROM galleries JOIN scenes ON scenes.id = galleries.scene_id WHERE scenes.id = ? LIMIT 1"
|
||||||
|
args := []interface{}{sceneID}
|
||||||
|
return qb.queryGallery(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) ValidGalleriesForScenePath(scenePath string) ([]Gallery, error) {
|
||||||
|
sceneDirPath := filepath.Dir(scenePath)
|
||||||
|
query := "SELECT galleries.* FROM galleries WHERE galleries.scene_id IS NULL AND galleries.path LIKE '" + sceneDirPath + "%' ORDER BY path ASC"
|
||||||
|
return qb.queryGalleries(query, nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) All() ([]Gallery, error) {
|
||||||
|
return qb.queryGalleries(selectAll("galleries") + qb.getGallerySort(nil), nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) Query(findFilter *FindFilterType) ([]Gallery, int) {
|
||||||
|
if findFilter == nil {
|
||||||
|
findFilter = &FindFilterType{}
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClauses := []string{}
|
||||||
|
havingClauses := []string{}
|
||||||
|
args := []interface{}{}
|
||||||
|
body := selectDistinctIDs("galleries")
|
||||||
|
|
||||||
|
if q := findFilter.Q; q != nil && *q != "" {
|
||||||
|
searchColumns := []string{"galleries.path", "galleries.checksum"}
|
||||||
|
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||||
|
}
|
||||||
|
|
||||||
|
sortAndPagination := qb.getGallerySort(findFilter) + getPagination(findFilter)
|
||||||
|
idsResult, countResult := executeFindQuery("galleries", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||||
|
|
||||||
|
var galleries []Gallery
|
||||||
|
for _, id := range idsResult {
|
||||||
|
gallery, _ := qb.Find(id)
|
||||||
|
galleries = append(galleries, *gallery)
|
||||||
|
}
|
||||||
|
|
||||||
|
return galleries, countResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) getGallerySort(findFilter *FindFilterType) string {
|
||||||
|
var sort string
|
||||||
|
var direction string
|
||||||
|
//if findFilter == nil { // TODO temp until title is removed from schema and UI
|
||||||
|
sort = "path"
|
||||||
|
direction = "ASC"
|
||||||
|
//} else {
|
||||||
|
// sort = findFilter.getSort("path")
|
||||||
|
// direction = findFilter.getDirection()
|
||||||
|
//}
|
||||||
|
return getSort(sort, direction, "galleries")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) queryGallery(query string, args []interface{}, tx *sqlx.Tx) (*Gallery, error) {
|
||||||
|
results, err := qb.queryGalleries(query, args, tx)
|
||||||
|
if err != nil || len(results) < 1 {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &results[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *galleryQueryBuilder) queryGalleries(query string, args []interface{}, tx *sqlx.Tx) ([]Gallery, error) {
|
||||||
|
var rows *sqlx.Rows
|
||||||
|
var err error
|
||||||
|
if tx != nil {
|
||||||
|
rows, err = tx.Queryx(query, args...)
|
||||||
|
} else {
|
||||||
|
rows, err = database.DB.Queryx(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
galleries := make([]Gallery, 0)
|
||||||
|
gallery := Gallery{}
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.StructScan(&gallery); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
galleries = append(galleries, gallery)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return galleries, nil
|
||||||
|
}
|
||||||
84
internal/models/querybuilder_joins.go
Normal file
84
internal/models/querybuilder_joins.go
Normal file
|
|
@ -0,0 +1,84 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import "github.com/jmoiron/sqlx"
|
||||||
|
|
||||||
|
type joinsQueryBuilder struct {}
|
||||||
|
|
||||||
|
func NewJoinsQueryBuilder() joinsQueryBuilder {
|
||||||
|
return joinsQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *joinsQueryBuilder) CreatePerformersScenes(newJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||||
|
ensureTx(tx)
|
||||||
|
for _, join := range newJoins {
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`INSERT INTO performers_scenes (performer_id, scene_id) VALUES (:performer_id, :scene_id)`,
|
||||||
|
join,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *joinsQueryBuilder) UpdatePerformersScenes(sceneID int, updatedJoins []PerformersScenes, tx *sqlx.Tx) error {
|
||||||
|
ensureTx(tx)
|
||||||
|
|
||||||
|
// Delete the existing joins and then create new ones
|
||||||
|
_, err := tx.Exec("DELETE FROM performers_scenes WHERE scene_id = ?", sceneID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return qb.CreatePerformersScenes(updatedJoins, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *joinsQueryBuilder) CreateScenesTags(newJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||||
|
ensureTx(tx)
|
||||||
|
for _, join := range newJoins {
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`INSERT INTO scenes_tags (scene_id, tag_id) VALUES (:scene_id, :tag_id)`,
|
||||||
|
join,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *joinsQueryBuilder) UpdateScenesTags(sceneID int, updatedJoins []ScenesTags, tx *sqlx.Tx) error {
|
||||||
|
ensureTx(tx)
|
||||||
|
|
||||||
|
// Delete the existing joins and then create new ones
|
||||||
|
_, err := tx.Exec("DELETE FROM scenes_tags WHERE scene_id = ?", sceneID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return qb.CreateScenesTags(updatedJoins, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *joinsQueryBuilder) CreateSceneMarkersTags(newJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||||
|
ensureTx(tx)
|
||||||
|
for _, join := range newJoins {
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`INSERT INTO scene_markers_tags (scene_marker_id, tag_id) VALUES (:scene_marker_id, :tag_id)`,
|
||||||
|
join,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *joinsQueryBuilder) UpdateSceneMarkersTags(sceneMarkerID int, updatedJoins []SceneMarkersTags, tx *sqlx.Tx) error {
|
||||||
|
ensureTx(tx)
|
||||||
|
|
||||||
|
// Delete the existing joins and then create new ones
|
||||||
|
_, err := tx.Exec("DELETE FROM scene_markers_tags WHERE scene_marker_id = ?", sceneMarkerID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return qb.CreateSceneMarkersTags(updatedJoins, tx)
|
||||||
|
}
|
||||||
175
internal/models/querybuilder_performer.go
Normal file
175
internal/models/querybuilder_performer.go
Normal file
|
|
@ -0,0 +1,175 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
type performerQueryBuilder struct {}
|
||||||
|
|
||||||
|
func NewPerformerQueryBuilder() performerQueryBuilder {
|
||||||
|
return performerQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) Create(newPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
result, err := tx.NamedExec(
|
||||||
|
`INSERT INTO performers (image, checksum, name, url, twitter, instagram, birthdate, ethnicity, country,
|
||||||
|
eye_color, height, measurements, fake_tits, career_length, tattoos, piercings,
|
||||||
|
aliases, favorite, created_at, updated_at)
|
||||||
|
VALUES (:image, :checksum, :name, :url, :twitter, :instagram, :birthdate, :ethnicity, :country,
|
||||||
|
:eye_color, :height, :measurements, :fake_tits, :career_length, :tattoos, :piercings,
|
||||||
|
:aliases, :favorite, :created_at, :updated_at)
|
||||||
|
`,
|
||||||
|
newPerformer,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
performerID, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&newPerformer, `SELECT * FROM performers WHERE id = ? LIMIT 1`, performerID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &newPerformer, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) Update(updatedPerformer Performer, tx *sqlx.Tx) (*Performer, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE performers SET `+SqlGenKeys(updatedPerformer)+` WHERE performers.id = :id`,
|
||||||
|
updatedPerformer,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&updatedPerformer, `SELECT * FROM performers WHERE id = ? LIMIT 1`, updatedPerformer.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &updatedPerformer, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) Find(id int) (*Performer, error) {
|
||||||
|
query := "SELECT * FROM performers WHERE id = ? LIMIT 1"
|
||||||
|
args := []interface{}{id}
|
||||||
|
results, err := qb.queryPerformers(query, args, nil)
|
||||||
|
if err != nil || len(results) < 1 {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &results[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Performer, error) {
|
||||||
|
query := `
|
||||||
|
SELECT performers.* FROM performers
|
||||||
|
LEFT JOIN performers_scenes as scenes_join on scenes_join.performer_id = performers.id
|
||||||
|
LEFT JOIN scenes on scenes_join.scene_id = scenes.id
|
||||||
|
WHERE scenes.id = ?
|
||||||
|
GROUP BY performers.id
|
||||||
|
`
|
||||||
|
args := []interface{}{sceneID}
|
||||||
|
return qb.queryPerformers(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Performer, error) {
|
||||||
|
query := "SELECT * FROM performers WHERE name IN " + getInBinding(len(names))
|
||||||
|
var args []interface{}
|
||||||
|
for _, name := range names {
|
||||||
|
args = append(args, name)
|
||||||
|
}
|
||||||
|
return qb.queryPerformers(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) All() ([]Performer, error) {
|
||||||
|
return qb.queryPerformers(selectAll("performers") + qb.getPerformerSort(nil), nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) Query(performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]Performer, int) {
|
||||||
|
if performerFilter == nil {
|
||||||
|
performerFilter = &PerformerFilterType{}
|
||||||
|
}
|
||||||
|
if findFilter == nil {
|
||||||
|
findFilter = &FindFilterType{}
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClauses := []string{}
|
||||||
|
havingClauses := []string{}
|
||||||
|
args := []interface{}{}
|
||||||
|
body := selectDistinctIDs("performers")
|
||||||
|
body += `
|
||||||
|
left join performers_scenes as scenes_join on scenes_join.performer_id = performers.id
|
||||||
|
left join scenes on scenes_join.scene_id = scenes.id
|
||||||
|
`
|
||||||
|
|
||||||
|
if q := findFilter.Q; q != nil && *q != "" {
|
||||||
|
searchColumns := []string{"performers.name", "performers.checksum", "performers.birthdate", "performers.ethnicity"}
|
||||||
|
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||||
|
}
|
||||||
|
|
||||||
|
if favoritesFilter := performerFilter.FilterFavorites; favoritesFilter != nil {
|
||||||
|
if *favoritesFilter == true {
|
||||||
|
whereClauses = append(whereClauses, "performers.favorite = 1")
|
||||||
|
} else {
|
||||||
|
whereClauses = append(whereClauses, "performers.favorite = 0")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sortAndPagination := qb.getPerformerSort(findFilter) + getPagination(findFilter)
|
||||||
|
idsResult, countResult := executeFindQuery("performers", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||||
|
|
||||||
|
var performers []Performer
|
||||||
|
for _, id := range idsResult {
|
||||||
|
performer, _ := qb.Find(id)
|
||||||
|
performers = append(performers, *performer)
|
||||||
|
}
|
||||||
|
|
||||||
|
return performers, countResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) getPerformerSort(findFilter *FindFilterType) string {
|
||||||
|
var sort string
|
||||||
|
var direction string
|
||||||
|
if findFilter == nil {
|
||||||
|
sort = "name"
|
||||||
|
direction = "ASC"
|
||||||
|
} else {
|
||||||
|
sort = findFilter.GetSort("name")
|
||||||
|
direction = findFilter.GetDirection()
|
||||||
|
}
|
||||||
|
return getSort(sort, direction, "performers")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *performerQueryBuilder) queryPerformers(query string, args []interface{}, tx *sqlx.Tx) ([]Performer, error) {
|
||||||
|
var rows *sqlx.Rows
|
||||||
|
var err error
|
||||||
|
if tx != nil {
|
||||||
|
rows, err = tx.Queryx(query, args...)
|
||||||
|
} else {
|
||||||
|
rows, err = database.DB.Queryx(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
performers := make([]Performer, 0)
|
||||||
|
performer := Performer{}
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.StructScan(&performer); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
performers = append(performers, performer)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return performers, nil
|
||||||
|
}
|
||||||
284
internal/models/querybuilder_scene.go
Normal file
284
internal/models/querybuilder_scene.go
Normal file
|
|
@ -0,0 +1,284 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const scenesForPerformerQuery = `
|
||||||
|
SELECT scenes.* FROM scenes
|
||||||
|
LEFT JOIN performers_scenes as performers_join on performers_join.scene_id = scenes.id
|
||||||
|
LEFT JOIN performers on performers_join.performer_id = performers.id
|
||||||
|
WHERE performers.id = ?
|
||||||
|
GROUP BY scenes.id
|
||||||
|
`
|
||||||
|
|
||||||
|
const scenesForStudioQuery = `
|
||||||
|
SELECT scenes.* FROM scenes
|
||||||
|
JOIN studios ON studios.id = scenes.studio_id
|
||||||
|
WHERE studios.id = ?
|
||||||
|
GROUP BY scenes.id
|
||||||
|
`
|
||||||
|
|
||||||
|
const scenesForTagQuery = `
|
||||||
|
SELECT scenes.* FROM scenes
|
||||||
|
LEFT JOIN scenes_tags as tags_join on tags_join.scene_id = scenes.id
|
||||||
|
LEFT JOIN tags on tags_join.tag_id = tags.id
|
||||||
|
WHERE tags.id = ?
|
||||||
|
GROUP BY scenes.id
|
||||||
|
`
|
||||||
|
|
||||||
|
type sceneQueryBuilder struct{}
|
||||||
|
|
||||||
|
func NewSceneQueryBuilder() sceneQueryBuilder {
|
||||||
|
return sceneQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) Create(newScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
result, err := tx.NamedExec(
|
||||||
|
`INSERT INTO scenes (checksum, path, title, details, url, date, rating, size, duration, video_codec,
|
||||||
|
audio_codec, width, height, framerate, bitrate, studio_id, created_at, updated_at)
|
||||||
|
VALUES (:checksum, :path, :title, :details, :url, :date, :rating, :size, :duration, :video_codec,
|
||||||
|
:audio_codec, :width, :height, :framerate, :bitrate, :studio_id, :created_at, :updated_at)
|
||||||
|
`,
|
||||||
|
newScene,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sceneID, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := tx.Get(&newScene, `SELECT * FROM scenes WHERE id = ? LIMIT 1`, sceneID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &newScene, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) Update(updatedScene Scene, tx *sqlx.Tx) (*Scene, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE scenes SET `+SqlGenKeys(updatedScene)+` WHERE scenes.id = :id`,
|
||||||
|
updatedScene,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&updatedScene, `SELECT * FROM scenes WHERE id = ? LIMIT 1`, updatedScene.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &updatedScene, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) Find(id int) (*Scene, error) {
|
||||||
|
query := "SELECT * FROM scenes WHERE id = ? LIMIT 1"
|
||||||
|
args := []interface{}{id}
|
||||||
|
return qb.queryScene(query, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) FindByChecksum(checksum string) (*Scene, error) {
|
||||||
|
query := "SELECT * FROM scenes WHERE checksum = ? LIMIT 1"
|
||||||
|
args := []interface{}{checksum}
|
||||||
|
return qb.queryScene(query, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) FindByPath(path string) (*Scene, error) {
|
||||||
|
query := "SELECT * FROM scenes WHERE path = ? LIMIT 1"
|
||||||
|
args := []interface{}{path}
|
||||||
|
return qb.queryScene(query, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) FindByPerformerID(performerID int) ([]Scene, error) {
|
||||||
|
args := []interface{}{performerID}
|
||||||
|
return qb.queryScenes(scenesForPerformerQuery, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) CountByPerformerID(performerID int) (int, error) {
|
||||||
|
args := []interface{}{performerID}
|
||||||
|
return runCountQuery(buildCountQuery(scenesForPerformerQuery), args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) FindByStudioID(studioID int) ([]Scene, error) {
|
||||||
|
args := []interface{}{studioID}
|
||||||
|
return qb.queryScenes(scenesForStudioQuery, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) CountByStudioID(studioID int) (int, error) {
|
||||||
|
args := []interface{}{studioID}
|
||||||
|
return runCountQuery(buildCountQuery(scenesForStudioQuery), args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||||
|
args := []interface{}{tagID}
|
||||||
|
return runCountQuery(buildCountQuery(scenesForTagQuery), args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) Wall(q *string) ([]Scene, error) {
|
||||||
|
s := ""
|
||||||
|
if q != nil {
|
||||||
|
s = *q
|
||||||
|
}
|
||||||
|
query := "SELECT scenes.* FROM scenes WHERE scenes.details LIKE '%" + s + "%' ORDER BY RANDOM() LIMIT 80"
|
||||||
|
return qb.queryScenes(query, nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) All() ([]Scene, error) {
|
||||||
|
return qb.queryScenes(selectAll("scenes") + qb.getSceneSort(nil), nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) Query(sceneFilter *SceneFilterType, findFilter *FindFilterType) ([]Scene, int) {
|
||||||
|
if sceneFilter == nil {
|
||||||
|
sceneFilter = &SceneFilterType{}
|
||||||
|
}
|
||||||
|
if findFilter == nil {
|
||||||
|
findFilter = &FindFilterType{}
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClauses := []string{}
|
||||||
|
havingClauses := []string{}
|
||||||
|
args := []interface{}{}
|
||||||
|
body := selectDistinctIDs("scenes")
|
||||||
|
body = body + `
|
||||||
|
left join scene_markers on scene_markers.scene_id = scenes.id
|
||||||
|
left join performers_scenes as performers_join on performers_join.scene_id = scenes.id
|
||||||
|
left join performers on performers_join.performer_id = performers.id
|
||||||
|
left join studios as studio on studio.id = scenes.studio_id
|
||||||
|
left join galleries as gallery on gallery.scene_id = scenes.id
|
||||||
|
left join scenes_tags as tags_join on tags_join.scene_id = scenes.id
|
||||||
|
left join tags on tags_join.tag_id = tags.id
|
||||||
|
`
|
||||||
|
|
||||||
|
if q := findFilter.Q; q != nil && *q != "" {
|
||||||
|
searchColumns := []string{"scenes.title", "scenes.details", "scenes.path", "scenes.checksum", "scene_markers.title"}
|
||||||
|
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||||
|
}
|
||||||
|
|
||||||
|
if rating := sceneFilter.Rating; rating != nil {
|
||||||
|
whereClauses = append(whereClauses, "rating = ?")
|
||||||
|
args = append(args, *sceneFilter.Rating)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resolutionFilter := sceneFilter.Resolution; resolutionFilter != nil {
|
||||||
|
if resolution := resolutionFilter.String(); resolutionFilter.IsValid() {
|
||||||
|
switch resolution {
|
||||||
|
case "LOW":
|
||||||
|
whereClauses = append(whereClauses, "(scenes.height >= 240 AND scenes.height < 480)")
|
||||||
|
case "STANDARD":
|
||||||
|
whereClauses = append(whereClauses, "(scenes.height >= 480 AND scenes.height < 720)")
|
||||||
|
case "STANDARD_HD":
|
||||||
|
whereClauses = append(whereClauses, "(scenes.height >= 720 AND scenes.height < 1080)")
|
||||||
|
case "FULL_HD":
|
||||||
|
whereClauses = append(whereClauses, "(scenes.height >= 1080 AND scenes.height < 2160)")
|
||||||
|
case "FOUR_K":
|
||||||
|
whereClauses = append(whereClauses, "scenes.height >= 2160")
|
||||||
|
default:
|
||||||
|
whereClauses = append(whereClauses, "scenes.height < 240")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasMarkersFilter := sceneFilter.HasMarkers; hasMarkersFilter != nil {
|
||||||
|
if strings.Compare(*hasMarkersFilter, "true") == 0 {
|
||||||
|
havingClauses = append(havingClauses, "count(scene_markers.scene_id) > 0")
|
||||||
|
} else {
|
||||||
|
whereClauses = append(whereClauses, "scene_markers.id IS NULL")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isMissingFilter := sceneFilter.IsMissing; isMissingFilter != nil && *isMissingFilter != "" {
|
||||||
|
switch *isMissingFilter {
|
||||||
|
case "gallery":
|
||||||
|
whereClauses = append(whereClauses, "gallery.scene_id IS NULL")
|
||||||
|
case "studio":
|
||||||
|
whereClauses = append(whereClauses, "scenes.studio_id IS NULL")
|
||||||
|
case "performers":
|
||||||
|
whereClauses = append(whereClauses, "performers_join.scene_id IS NULL")
|
||||||
|
default:
|
||||||
|
whereClauses = append(whereClauses, "scenes."+*isMissingFilter+" IS NULL")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if tagsFilter := sceneFilter.Tags; len(tagsFilter) > 0 {
|
||||||
|
for _, tagId := range tagsFilter {
|
||||||
|
args = append(args, tagId)
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClauses = append(whereClauses, "tags.id IN "+getInBinding(len(tagsFilter)))
|
||||||
|
havingClauses = append(havingClauses, "count(distinct tags.id) IS "+strconv.Itoa(len(tagsFilter)))
|
||||||
|
}
|
||||||
|
|
||||||
|
if performerID := sceneFilter.PerformerID; performerID != nil {
|
||||||
|
whereClauses = append(whereClauses, "performers.id = ?")
|
||||||
|
args = append(args, *performerID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if studioID := sceneFilter.StudioID; studioID != nil {
|
||||||
|
whereClauses = append(whereClauses, "studio.id = ?")
|
||||||
|
args = append(args, *studioID)
|
||||||
|
}
|
||||||
|
|
||||||
|
sortAndPagination := qb.getSceneSort(findFilter) + getPagination(findFilter)
|
||||||
|
idsResult, countResult := executeFindQuery("scenes", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||||
|
|
||||||
|
var scenes []Scene
|
||||||
|
for _, id := range idsResult {
|
||||||
|
scene, _ := qb.Find(id)
|
||||||
|
scenes = append(scenes, *scene)
|
||||||
|
}
|
||||||
|
|
||||||
|
return scenes, countResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) getSceneSort(findFilter *FindFilterType) string {
|
||||||
|
if findFilter == nil {
|
||||||
|
return " ORDER BY scenes.path, scenes.date ASC "
|
||||||
|
} else {
|
||||||
|
sort := findFilter.GetSort("title")
|
||||||
|
direction := findFilter.GetDirection()
|
||||||
|
return getSort(sort, direction, "scenes")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) queryScene(query string, args []interface{}, tx *sqlx.Tx) (*Scene, error) {
|
||||||
|
results, err := qb.queryScenes(query, args, tx)
|
||||||
|
if err != nil || len(results) < 1 {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &results[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneQueryBuilder) queryScenes(query string, args []interface{}, tx *sqlx.Tx) ([]Scene, error) {
|
||||||
|
var rows *sqlx.Rows
|
||||||
|
var err error
|
||||||
|
if tx != nil {
|
||||||
|
rows, err = tx.Queryx(query, args...)
|
||||||
|
} else {
|
||||||
|
rows, err = database.DB.Queryx(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
scenes := make([]Scene, 0)
|
||||||
|
scene := Scene{}
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.StructScan(&scene); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
scenes = append(scenes, scene)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return scenes, nil
|
||||||
|
}
|
||||||
255
internal/models/querybuilder_scene_marker.go
Normal file
255
internal/models/querybuilder_scene_marker.go
Normal file
|
|
@ -0,0 +1,255 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
const sceneMarkersForTagQuery = `
|
||||||
|
SELECT scene_markers.* FROM scene_markers
|
||||||
|
LEFT JOIN scene_markers_tags as tags_join on tags_join.scene_marker_id = scene_markers.id
|
||||||
|
LEFT JOIN tags on tags_join.tag_id = tags.id
|
||||||
|
WHERE tags.id = ?
|
||||||
|
GROUP BY scene_markers.id
|
||||||
|
`
|
||||||
|
|
||||||
|
type sceneMarkerQueryBuilder struct {}
|
||||||
|
|
||||||
|
func NewSceneMarkerQueryBuilder() sceneMarkerQueryBuilder {
|
||||||
|
return sceneMarkerQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) Create(newSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
result, err := tx.NamedExec(
|
||||||
|
`INSERT INTO scene_markers (title, seconds, primary_tag_id, scene_id, created_at, updated_at)
|
||||||
|
VALUES (:title, :seconds, :primary_tag_id, :scene_id, :created_at, :updated_at)
|
||||||
|
`,
|
||||||
|
newSceneMarker,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sceneMarkerID, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&newSceneMarker, `SELECT * FROM scene_markers WHERE id = ? LIMIT 1`, sceneMarkerID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &newSceneMarker, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) Update(updatedSceneMarker SceneMarker, tx *sqlx.Tx) (*SceneMarker, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE scene_markers SET `+SqlGenKeys(updatedSceneMarker)+` WHERE scene_markers.id = :id`,
|
||||||
|
updatedSceneMarker,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&updatedSceneMarker, `SELECT * FROM scene_markers WHERE id = ? LIMIT 1`, updatedSceneMarker.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &updatedSceneMarker, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||||
|
return executeDeleteQuery("scene_markers", id, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) Find(id int) (*SceneMarker, error) {
|
||||||
|
query := "SELECT * FROM scene_markers WHERE id = ? LIMIT 1"
|
||||||
|
args := []interface{}{id}
|
||||||
|
results, err := qb.querySceneMarkers(query, args, nil)
|
||||||
|
if err != nil || len(results) < 1 {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &results[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||||
|
query := `
|
||||||
|
SELECT scene_markers.* FROM scene_markers
|
||||||
|
JOIN scenes ON scenes.id = scene_markers.scene_id
|
||||||
|
WHERE scenes.id = ?
|
||||||
|
GROUP BY scene_markers.id
|
||||||
|
ORDER BY scene_markers.seconds ASC
|
||||||
|
`
|
||||||
|
args := []interface{}{sceneID}
|
||||||
|
return qb.querySceneMarkers(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) CountByTagID(tagID int) (int, error) {
|
||||||
|
args := []interface{}{tagID}
|
||||||
|
return runCountQuery(buildCountQuery(sceneMarkersForTagQuery), args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) GetMarkerStrings(q *string, sort *string) ([]*MarkerStringsResultType, error) {
|
||||||
|
query := "SELECT count(*) as `count`, scene_markers.id as id, scene_markers.title as title FROM scene_markers"
|
||||||
|
if q != nil {
|
||||||
|
query = query + " WHERE title LIKE '%" + *q + "%'"
|
||||||
|
}
|
||||||
|
query = query + " GROUP BY title"
|
||||||
|
if sort != nil && *sort == "count" {
|
||||||
|
query = query + " ORDER BY `count` DESC"
|
||||||
|
} else {
|
||||||
|
query = query + " ORDER BY title ASC"
|
||||||
|
}
|
||||||
|
args := []interface{}{}
|
||||||
|
return qb.queryMarkerStringsResultType(query, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) Wall(q *string) ([]SceneMarker, error) {
|
||||||
|
s := ""
|
||||||
|
if q != nil {
|
||||||
|
s = *q
|
||||||
|
}
|
||||||
|
query := "SELECT scene_markers.* FROM scene_markers WHERE scene_markers.title LIKE '%" + s + "%' ORDER BY RANDOM() LIMIT 80"
|
||||||
|
return qb.querySceneMarkers(query, nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) Query(sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]SceneMarker, int) {
|
||||||
|
if sceneMarkerFilter == nil {
|
||||||
|
sceneMarkerFilter = &SceneMarkerFilterType{}
|
||||||
|
}
|
||||||
|
if findFilter == nil {
|
||||||
|
findFilter = &FindFilterType{}
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClauses := []string{}
|
||||||
|
havingClauses := []string{}
|
||||||
|
args := []interface{}{}
|
||||||
|
body := selectDistinctIDs("scene_markers")
|
||||||
|
body = body + `
|
||||||
|
left join tags as primary_tag on primary_tag.id = scene_markers.primary_tag_id
|
||||||
|
left join scenes as scene on scene.id = scene_markers.scene_id
|
||||||
|
left join scene_markers_tags as tags_join on tags_join.scene_marker_id = scene_markers.id
|
||||||
|
left join tags on tags_join.tag_id = tags.id
|
||||||
|
`
|
||||||
|
|
||||||
|
if tagIDs := sceneMarkerFilter.Tags; tagIDs != nil {
|
||||||
|
//select `scene_markers`.* from `scene_markers`
|
||||||
|
//left join `tags` as `primary_tags_join`
|
||||||
|
// on `primary_tags_join`.`id` = `scene_markers`.`primary_tag_id`
|
||||||
|
// and `primary_tags_join`.`id` in ('3', '37', '9', '89')
|
||||||
|
//left join `scene_markers_tags` as `tags_join`
|
||||||
|
// on `tags_join`.`scene_marker_id` = `scene_markers`.`id`
|
||||||
|
// and `tags_join`.`tag_id` in ('3', '37', '9', '89')
|
||||||
|
//group by `scene_markers`.`id`
|
||||||
|
//having ((count(distinct `primary_tags_join`.`id`) + count(distinct `tags_join`.`tag_id`)) = 4)
|
||||||
|
|
||||||
|
length := len(tagIDs)
|
||||||
|
body += " LEFT JOIN tags AS ptj ON ptj.id = scene_markers.primary_tag_id AND ptj.id IN " + getInBinding(length)
|
||||||
|
body += " LEFT JOIN scene_markers_tags AS tj ON tj.scene_marker_id = scene_markers.id AND tj.tag_id IN " + getInBinding(length)
|
||||||
|
havingClauses = append(havingClauses, "((COUNT(DISTINCT ptj.id) + COUNT(DISTINCT tj.tag_id)) = " + strconv.Itoa(length) +")")
|
||||||
|
for _, tagID := range tagIDs {
|
||||||
|
args = append(args, tagID)
|
||||||
|
}
|
||||||
|
for _, tagID := range tagIDs {
|
||||||
|
args = append(args, tagID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if sceneTagIDs := sceneMarkerFilter.SceneTags; sceneTagIDs != nil {
|
||||||
|
length := len(sceneTagIDs)
|
||||||
|
body += " LEFT JOIN scenes_tags AS scene_tags_join ON scene_tags_join.scene_id = scene.id AND scene_tags_join.tag_id IN " + getInBinding(length)
|
||||||
|
havingClauses = append(havingClauses, "COUNT(DISTINCT scene_tags_join.tag_id) = " + strconv.Itoa(length))
|
||||||
|
for _, tagID := range sceneTagIDs {
|
||||||
|
args = append(args, tagID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if performerIDs := sceneMarkerFilter.Performers; performerIDs != nil {
|
||||||
|
length := len(performerIDs)
|
||||||
|
body += " LEFT JOIN performers_scenes as scene_performers ON scene.id = scene_performers.scene_id"
|
||||||
|
whereClauses = append(whereClauses, "scene_performers.performer_id IN " + getInBinding(length))
|
||||||
|
for _, performerID := range performerIDs {
|
||||||
|
args = append(args, performerID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if q := findFilter.Q; q != nil && *q != "" {
|
||||||
|
searchColumns := []string{"scene_markers.title", "scene.title"}
|
||||||
|
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||||
|
}
|
||||||
|
|
||||||
|
if tagID := sceneMarkerFilter.TagID; tagID != nil {
|
||||||
|
whereClauses = append(whereClauses, "(scene_markers.primary_tag_id = "+*tagID+" OR tags.id = "+*tagID+")")
|
||||||
|
}
|
||||||
|
|
||||||
|
sortAndPagination := qb.getSceneMarkerSort(findFilter) + getPagination(findFilter)
|
||||||
|
idsResult, countResult := executeFindQuery("scene_markers", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||||
|
|
||||||
|
var sceneMarkers []SceneMarker
|
||||||
|
for _, id := range idsResult {
|
||||||
|
sceneMarker, _ := qb.Find(id)
|
||||||
|
sceneMarkers = append(sceneMarkers, *sceneMarker)
|
||||||
|
}
|
||||||
|
|
||||||
|
return sceneMarkers, countResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) getSceneMarkerSort(findFilter *FindFilterType) string {
|
||||||
|
sort := findFilter.GetSort("title")
|
||||||
|
direction := findFilter.GetDirection()
|
||||||
|
return getSort(sort, direction, "scene_markers")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) querySceneMarkers(query string, args []interface{}, tx *sqlx.Tx) ([]SceneMarker, error) {
|
||||||
|
var rows *sqlx.Rows
|
||||||
|
var err error
|
||||||
|
if tx != nil {
|
||||||
|
rows, err = tx.Queryx(query, args...)
|
||||||
|
} else {
|
||||||
|
rows, err = database.DB.Queryx(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
sceneMarkers := make([]SceneMarker, 0)
|
||||||
|
sceneMarker := SceneMarker{}
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.StructScan(&sceneMarker); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sceneMarkers = append(sceneMarkers, sceneMarker)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return sceneMarkers, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *sceneMarkerQueryBuilder) queryMarkerStringsResultType(query string, args []interface{}) ([]*MarkerStringsResultType, error) {
|
||||||
|
rows, err := database.DB.Queryx(query, args...)
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
markerStrings := make([]*MarkerStringsResultType, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
markerString := MarkerStringsResultType{}
|
||||||
|
if err := rows.StructScan(&markerString); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
markerStrings = append(markerStrings, &markerString)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return markerStrings, nil
|
||||||
|
}
|
||||||
113
internal/models/querybuilder_scraped_item.go
Normal file
113
internal/models/querybuilder_scraped_item.go
Normal file
|
|
@ -0,0 +1,113 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
type scrapedItemQueryBuilder struct{}
|
||||||
|
|
||||||
|
func NewScrapedItemQueryBuilder() scrapedItemQueryBuilder {
|
||||||
|
return scrapedItemQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *scrapedItemQueryBuilder) Create(newScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
result, err := tx.NamedExec(
|
||||||
|
`INSERT INTO scraped_items (title, description, url, date, rating, tags, models, episode, gallery_filename,
|
||||||
|
gallery_url, video_filename, video_url, studio_id, created_at, updated_at)
|
||||||
|
VALUES (:title, :description, :url, :date, :rating, :tags, :models, :episode, :gallery_filename,
|
||||||
|
:gallery_url, :video_filename, :video_url, :studio_id, :created_at, :updated_at)
|
||||||
|
`,
|
||||||
|
newScrapedItem,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
scrapedItemID, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := tx.Get(&newScrapedItem, `SELECT * FROM scraped_items WHERE id = ? LIMIT 1`, scrapedItemID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &newScrapedItem, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *scrapedItemQueryBuilder) Update(updatedScrapedItem ScrapedItem, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE scraped_items SET `+SqlGenKeys(updatedScrapedItem)+` WHERE scraped_items.id = :id`,
|
||||||
|
updatedScrapedItem,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&updatedScrapedItem, `SELECT * FROM scraped_items WHERE id = ? LIMIT 1`, updatedScrapedItem.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &updatedScrapedItem, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *scrapedItemQueryBuilder) Find(id int) (*ScrapedItem, error) {
|
||||||
|
query := "SELECT * FROM scraped_items WHERE id = ? LIMIT 1"
|
||||||
|
args := []interface{}{id}
|
||||||
|
return qb.queryScrapedItem(query, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *scrapedItemQueryBuilder) All() ([]ScrapedItem, error) {
|
||||||
|
return qb.queryScrapedItems(selectAll("scraped_items") + qb.getScrapedItemsSort(nil), nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *scrapedItemQueryBuilder) getScrapedItemsSort(findFilter *FindFilterType) string {
|
||||||
|
var sort string
|
||||||
|
var direction string
|
||||||
|
if findFilter == nil {
|
||||||
|
sort = "id" // TODO studio_id and title
|
||||||
|
direction = "ASC"
|
||||||
|
} else {
|
||||||
|
sort = findFilter.GetSort("id")
|
||||||
|
direction = findFilter.GetDirection()
|
||||||
|
}
|
||||||
|
return getSort(sort, direction, "scraped_items")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *scrapedItemQueryBuilder) queryScrapedItem(query string, args []interface{}, tx *sqlx.Tx) (*ScrapedItem, error) {
|
||||||
|
results, err := qb.queryScrapedItems(query, args, tx)
|
||||||
|
if err != nil || len(results) < 1 {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &results[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *scrapedItemQueryBuilder) queryScrapedItems(query string, args []interface{}, tx *sqlx.Tx) ([]ScrapedItem, error) {
|
||||||
|
var rows *sqlx.Rows
|
||||||
|
var err error
|
||||||
|
if tx != nil {
|
||||||
|
rows, err = tx.Queryx(query, args...)
|
||||||
|
} else {
|
||||||
|
rows, err = database.DB.Queryx(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
scrapedItems := make([]ScrapedItem, 0)
|
||||||
|
scrapedItem := ScrapedItem{}
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.StructScan(&scrapedItem); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
scrapedItems = append(scrapedItems, scrapedItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return scrapedItems, nil
|
||||||
|
}
|
||||||
234
internal/models/querybuilder_sql.go
Normal file
234
internal/models/querybuilder_sql.go
Normal file
|
|
@ -0,0 +1,234 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func selectAll(tableName string) string {
|
||||||
|
idColumn := getColumn(tableName, "*")
|
||||||
|
return "SELECT " + idColumn + " FROM " + tableName + " "
|
||||||
|
}
|
||||||
|
|
||||||
|
func selectDistinctIDs(tableName string) string {
|
||||||
|
idColumn := getColumn(tableName, "id")
|
||||||
|
return "SELECT DISTINCT " + idColumn + " FROM " + tableName + " "
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildCountQuery(query string) string {
|
||||||
|
return "SELECT COUNT(*) as count FROM (" + query + ") as temp"
|
||||||
|
}
|
||||||
|
|
||||||
|
func getColumn(tableName string, columnName string) string {
|
||||||
|
return tableName + "." + columnName
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPagination(findFilter *FindFilterType) string {
|
||||||
|
if findFilter == nil {
|
||||||
|
panic("nil find filter for pagination")
|
||||||
|
}
|
||||||
|
|
||||||
|
var page int
|
||||||
|
if findFilter.Page == nil || *findFilter.Page < 1 {
|
||||||
|
page = 1
|
||||||
|
} else {
|
||||||
|
page = *findFilter.Page
|
||||||
|
}
|
||||||
|
|
||||||
|
var perPage int
|
||||||
|
if findFilter.PerPage == nil {
|
||||||
|
perPage = 25
|
||||||
|
} else {
|
||||||
|
perPage = *findFilter.PerPage
|
||||||
|
}
|
||||||
|
if perPage > 120 {
|
||||||
|
perPage = 120
|
||||||
|
} else if perPage < 1 {
|
||||||
|
perPage = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
page = (page - 1) * perPage
|
||||||
|
return " LIMIT " + strconv.Itoa(perPage) + " OFFSET " + strconv.Itoa(page) + " "
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSort(sort string, direction string, tableName string) string {
|
||||||
|
if direction != "ASC" && direction != "DESC" {
|
||||||
|
direction = "ASC"
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(sort, "_count") {
|
||||||
|
var relationTableName = strings.Split(sort, "_")[0] // TODO: pluralize?
|
||||||
|
colName := getColumn(relationTableName, "id")
|
||||||
|
return " ORDER BY COUNT(distinct " + colName + ") " + direction
|
||||||
|
} else if strings.Compare(sort, "filesize") == 0 {
|
||||||
|
colName := getColumn(tableName, "size")
|
||||||
|
return " ORDER BY cast(" + colName + " as integer) " + direction
|
||||||
|
} else if strings.Compare(sort, "random") == 0 {
|
||||||
|
return " ORDER BY RANDOM() "
|
||||||
|
} else {
|
||||||
|
colName := getColumn(tableName, sort)
|
||||||
|
return " ORDER BY " + colName + " " + direction
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSearch(columns []string, q string) string {
|
||||||
|
var likeClauses []string
|
||||||
|
queryWords := strings.Split(q, " ")
|
||||||
|
trimmedQuery := strings.Trim(q, "\"")
|
||||||
|
if trimmedQuery == q {
|
||||||
|
// Search for any word
|
||||||
|
for _, word := range queryWords {
|
||||||
|
for _, column := range columns {
|
||||||
|
likeClauses = append(likeClauses, column+" LIKE '%"+word+"%'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Search the exact query
|
||||||
|
for _, column := range columns {
|
||||||
|
likeClauses = append(likeClauses, column+" LIKE '%"+trimmedQuery+"%'")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
likes := strings.Join(likeClauses, " OR ")
|
||||||
|
|
||||||
|
return "(" + likes + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
func getInBinding(length int) string {
|
||||||
|
bindings := strings.Repeat("?, ", length)
|
||||||
|
bindings = strings.TrimRight(bindings, ", ")
|
||||||
|
return "(" + bindings + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
func runIdsQuery(query string, args []interface{}) ([]int, error) {
|
||||||
|
var result []struct {
|
||||||
|
Int int `db:"id"`
|
||||||
|
}
|
||||||
|
if err := database.DB.Select(&result, query, args...); err != nil && err != sql.ErrNoRows {
|
||||||
|
return []int{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
vsm := make([]int, len(result))
|
||||||
|
for i, v := range result {
|
||||||
|
vsm[i] = v.Int
|
||||||
|
}
|
||||||
|
return vsm, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runCountQuery(query string, args []interface{}) (int, error) {
|
||||||
|
// Perform query and fetch result
|
||||||
|
result := struct {
|
||||||
|
Int int `db:"count"`
|
||||||
|
}{0}
|
||||||
|
if err := database.DB.Get(&result, query, args...); err != nil && err != sql.ErrNoRows {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.Int, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func executeFindQuery(tableName string, body string, args []interface{}, sortAndPagination string, whereClauses []string, havingClauses []string) ([]int, int) {
|
||||||
|
if len(whereClauses) > 0 {
|
||||||
|
body = body + " WHERE " + strings.Join(whereClauses, " AND ") // TODO handle AND or OR
|
||||||
|
}
|
||||||
|
body = body + " GROUP BY " + tableName + ".id "
|
||||||
|
if len(havingClauses) > 0 {
|
||||||
|
body = body + " HAVING " + strings.Join(havingClauses, " AND ") // TODO handle AND or OR
|
||||||
|
}
|
||||||
|
|
||||||
|
countQuery := buildCountQuery(body)
|
||||||
|
countResult, countErr := runCountQuery(countQuery, args)
|
||||||
|
|
||||||
|
idsQuery := body + sortAndPagination
|
||||||
|
idsResult, idsErr := runIdsQuery(idsQuery, args)
|
||||||
|
|
||||||
|
if countErr != nil {
|
||||||
|
panic(countErr)
|
||||||
|
}
|
||||||
|
if idsErr != nil {
|
||||||
|
panic(idsErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return idsResult, countResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func executeDeleteQuery(tableName string, id string, tx *sqlx.Tx) error {
|
||||||
|
if tx == nil {
|
||||||
|
panic("must use a transaction")
|
||||||
|
}
|
||||||
|
idColumnName := getColumn(tableName, "id")
|
||||||
|
_, err := tx.Exec(
|
||||||
|
`DELETE FROM ` + tableName + ` WHERE ` + idColumnName + ` = ?`,
|
||||||
|
id,
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func ensureTx(tx *sqlx.Tx) {
|
||||||
|
if tx == nil {
|
||||||
|
panic("must use a transaction")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://github.com/jmoiron/sqlx/issues/410
|
||||||
|
// sqlGenKeys is used for passing a struct and returning a string
|
||||||
|
// of keys for non empty key:values. These keys are formated
|
||||||
|
// keyname=:keyname with a comma seperating them
|
||||||
|
func SqlGenKeys(i interface{}) string {
|
||||||
|
var query []string
|
||||||
|
v := reflect.ValueOf(i)
|
||||||
|
for i := 0; i < v.NumField(); i++ {
|
||||||
|
//get key for struct tag
|
||||||
|
rawKey := v.Type().Field(i).Tag.Get("db")
|
||||||
|
key := strings.Split(rawKey, ",")[0]
|
||||||
|
if key == "id" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch t := v.Field(i).Interface().(type) {
|
||||||
|
case string:
|
||||||
|
if t != "" {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
case int:
|
||||||
|
if t != 0 {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
case float64:
|
||||||
|
if t != 0 {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
case SQLiteTimestamp:
|
||||||
|
if !t.Timestamp.IsZero() {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
case sql.NullString:
|
||||||
|
if t.Valid {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
case sql.NullBool:
|
||||||
|
if t.Valid {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
case sql.NullInt64:
|
||||||
|
if t.Valid {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
case sql.NullFloat64:
|
||||||
|
if t.Valid {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
reflectValue := reflect.ValueOf(t)
|
||||||
|
kind := reflectValue.Kind()
|
||||||
|
isNil := reflectValue.IsNil()
|
||||||
|
if kind != reflect.Ptr && !isNil {
|
||||||
|
query = append(query, fmt.Sprintf("%s=:%s", key, key))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(query, ", ")
|
||||||
|
}
|
||||||
151
internal/models/querybuilder_studio.go
Normal file
151
internal/models/querybuilder_studio.go
Normal file
|
|
@ -0,0 +1,151 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
type studioQueryBuilder struct {}
|
||||||
|
|
||||||
|
func NewStudioQueryBuilder() studioQueryBuilder {
|
||||||
|
return studioQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) Create(newStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
result, err := tx.NamedExec(
|
||||||
|
`INSERT INTO studios (image, checksum, name, url, created_at, updated_at)
|
||||||
|
VALUES (:image, :checksum, :name, :url, :created_at, :updated_at)
|
||||||
|
`,
|
||||||
|
newStudio,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
studioID, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&newStudio, `SELECT * FROM studios WHERE id = ? LIMIT 1`, studioID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &newStudio, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) Update(updatedStudio Studio, tx *sqlx.Tx) (*Studio, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
`UPDATE studios SET `+SqlGenKeys(updatedStudio)+` WHERE studios.id = :id`,
|
||||||
|
updatedStudio,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&updatedStudio, `SELECT * FROM studios WHERE id = ? LIMIT 1`, updatedStudio.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &updatedStudio, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) Find(id int, tx *sqlx.Tx) (*Studio, error) {
|
||||||
|
query := "SELECT * FROM studios WHERE id = ? LIMIT 1"
|
||||||
|
args := []interface{}{id}
|
||||||
|
return qb.queryStudio(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) FindBySceneID(sceneID int) (*Studio, error) {
|
||||||
|
query := "SELECT studios.* FROM studios JOIN scenes ON studios.id = scenes.studio_id WHERE scenes.id = ? LIMIT 1"
|
||||||
|
args := []interface{}{sceneID}
|
||||||
|
return qb.queryStudio(query, args, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Studio, error) {
|
||||||
|
query := "SELECT * FROM studios WHERE name = ? LIMIT 1"
|
||||||
|
args := []interface{}{name}
|
||||||
|
return qb.queryStudio(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) All() ([]Studio, error) {
|
||||||
|
return qb.queryStudios(selectAll("studios") + qb.getStudioSort(nil), nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) Query(findFilter *FindFilterType) ([]Studio, int) {
|
||||||
|
if findFilter == nil {
|
||||||
|
findFilter = &FindFilterType{}
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClauses := []string{}
|
||||||
|
havingClauses := []string{}
|
||||||
|
args := []interface{}{}
|
||||||
|
body := selectDistinctIDs("studios")
|
||||||
|
|
||||||
|
if q := findFilter.Q; q != nil && *q != "" {
|
||||||
|
searchColumns := []string{"studios.name"}
|
||||||
|
whereClauses = append(whereClauses, getSearch(searchColumns, *q))
|
||||||
|
}
|
||||||
|
|
||||||
|
sortAndPagination := qb.getStudioSort(findFilter) + getPagination(findFilter)
|
||||||
|
idsResult, countResult := executeFindQuery("studios", body, args, sortAndPagination, whereClauses, havingClauses)
|
||||||
|
|
||||||
|
var studios []Studio
|
||||||
|
for _, id := range idsResult {
|
||||||
|
studio, _ := qb.Find(id, nil)
|
||||||
|
studios = append(studios, *studio)
|
||||||
|
}
|
||||||
|
|
||||||
|
return studios, countResult
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) getStudioSort(findFilter *FindFilterType) string {
|
||||||
|
var sort string
|
||||||
|
var direction string
|
||||||
|
if findFilter == nil {
|
||||||
|
sort = "name"
|
||||||
|
direction = "ASC"
|
||||||
|
} else {
|
||||||
|
sort = findFilter.GetSort("name")
|
||||||
|
direction = findFilter.GetDirection()
|
||||||
|
}
|
||||||
|
return getSort(sort, direction, "studios")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) queryStudio(query string, args []interface{}, tx *sqlx.Tx) (*Studio, error) {
|
||||||
|
results, err := qb.queryStudios(query, args, tx)
|
||||||
|
if err != nil || len(results) < 1 {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &results[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *studioQueryBuilder) queryStudios(query string, args []interface{}, tx *sqlx.Tx) ([]Studio, error) {
|
||||||
|
var rows *sqlx.Rows
|
||||||
|
var err error
|
||||||
|
if tx != nil {
|
||||||
|
rows, err = tx.Queryx(query, args...)
|
||||||
|
} else {
|
||||||
|
rows, err = database.DB.Queryx(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
studios := make([]Studio, 0)
|
||||||
|
studio := Studio{}
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.StructScan(&studio); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
studios = append(studios, studio)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return studios, nil
|
||||||
|
}
|
||||||
158
internal/models/querybuilder_tag.go
Normal file
158
internal/models/querybuilder_tag.go
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"github.com/stashapp/stash/internal/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
type tagQueryBuilder struct {}
|
||||||
|
|
||||||
|
func NewTagQueryBuilder() tagQueryBuilder {
|
||||||
|
return tagQueryBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) Create(newTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
result, err := tx.NamedExec(
|
||||||
|
`INSERT INTO tags (name, created_at, updated_at)
|
||||||
|
VALUES (:name, :created_at, :updated_at)
|
||||||
|
`,
|
||||||
|
newTag,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
studioID, err := result.LastInsertId()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&newTag, `SELECT * FROM tags WHERE id = ? LIMIT 1`, studioID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &newTag, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) Update(updatedTag Tag, tx *sqlx.Tx) (*Tag, error) {
|
||||||
|
ensureTx(tx)
|
||||||
|
query := `UPDATE tags SET `+SqlGenKeys(updatedTag)+` WHERE tags.id = :id`
|
||||||
|
_, err := tx.NamedExec(
|
||||||
|
query,
|
||||||
|
updatedTag,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Get(&updatedTag, `SELECT * FROM tags WHERE id = ? LIMIT 1`, updatedTag.ID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &updatedTag, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) Destroy(id string, tx *sqlx.Tx) error {
|
||||||
|
return executeDeleteQuery("tags", id, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) Find(id int, tx *sqlx.Tx) (*Tag, error) {
|
||||||
|
query := "SELECT * FROM tags WHERE id = ? LIMIT 1"
|
||||||
|
args := []interface{}{id}
|
||||||
|
return qb.queryTag(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) FindBySceneID(sceneID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||||
|
query := `
|
||||||
|
SELECT tags.* FROM tags
|
||||||
|
LEFT JOIN scenes_tags as scenes_join on scenes_join.tag_id = tags.id
|
||||||
|
LEFT JOIN scenes on scenes_join.scene_id = scenes.id
|
||||||
|
WHERE scenes.id = ?
|
||||||
|
GROUP BY tags.id
|
||||||
|
`
|
||||||
|
query += qb.getTagSort(nil)
|
||||||
|
args := []interface{}{sceneID}
|
||||||
|
return qb.queryTags(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) FindBySceneMarkerID(sceneMarkerID int, tx *sqlx.Tx) ([]Tag, error) {
|
||||||
|
query := `
|
||||||
|
SELECT tags.* FROM tags
|
||||||
|
LEFT JOIN scene_markers_tags as scene_markers_join on scene_markers_join.tag_id = tags.id
|
||||||
|
LEFT JOIN scene_markers on scene_markers_join.scene_marker_id = scene_markers.id
|
||||||
|
WHERE scene_markers.id = ?
|
||||||
|
GROUP BY tags.id
|
||||||
|
`
|
||||||
|
query += qb.getTagSort(nil)
|
||||||
|
args := []interface{}{sceneMarkerID}
|
||||||
|
return qb.queryTags(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) FindByName(name string, tx *sqlx.Tx) (*Tag, error) {
|
||||||
|
query := "SELECT * FROM tags WHERE name = ? LIMIT 1"
|
||||||
|
args := []interface{}{name}
|
||||||
|
return qb.queryTag(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) FindByNames(names []string, tx *sqlx.Tx) ([]Tag, error) {
|
||||||
|
query := "SELECT * FROM tags WHERE name IN " + getInBinding(len(names))
|
||||||
|
var args []interface{}
|
||||||
|
for _, name := range names {
|
||||||
|
args = append(args, name)
|
||||||
|
}
|
||||||
|
return qb.queryTags(query, args, tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) All() ([]Tag, error) {
|
||||||
|
return qb.queryTags(selectAll("tags") + qb.getTagSort(nil), nil, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) getTagSort(findFilter *FindFilterType) string {
|
||||||
|
var sort string
|
||||||
|
var direction string
|
||||||
|
if findFilter == nil {
|
||||||
|
sort = "name"
|
||||||
|
direction = "ASC"
|
||||||
|
} else {
|
||||||
|
sort = findFilter.GetSort("name")
|
||||||
|
direction = findFilter.GetDirection()
|
||||||
|
}
|
||||||
|
return getSort(sort, direction, "tags")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) queryTag(query string, args []interface{}, tx *sqlx.Tx) (*Tag, error) {
|
||||||
|
results, err := qb.queryTags(query, args, tx)
|
||||||
|
if err != nil || len(results) < 1 {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &results[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (qb *tagQueryBuilder) queryTags(query string, args []interface{}, tx *sqlx.Tx) ([]Tag, error) {
|
||||||
|
var rows *sqlx.Rows
|
||||||
|
var err error
|
||||||
|
if tx != nil {
|
||||||
|
rows, err = tx.Queryx(query, args...)
|
||||||
|
} else {
|
||||||
|
rows, err = database.DB.Queryx(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil && err != sql.ErrNoRows {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
tags := make([]Tag, 0)
|
||||||
|
tag := Tag{}
|
||||||
|
for rows.Next() {
|
||||||
|
if err := rows.StructScan(&tag); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tags = append(tags, tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return tags, nil
|
||||||
|
}
|
||||||
21
internal/models/sqlite_timestamp.go
Normal file
21
internal/models/sqlite_timestamp.go
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
package models
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql/driver"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SQLiteTimestamp struct {
|
||||||
|
Timestamp time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan implements the Scanner interface.
|
||||||
|
func (t *SQLiteTimestamp) Scan(value interface{}) error {
|
||||||
|
t.Timestamp = value.(time.Time)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value implements the driver Valuer interface.
|
||||||
|
func (t SQLiteTimestamp) Value() (driver.Value, error) {
|
||||||
|
return t.Timestamp.Format(time.RFC3339), nil
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue