stash/pkg/scraper/query_url.go
WithoutPants 5495d72849 File storage rewrite (#2676)
* Restructure data layer part 2 (#2599)
* Refactor and separate image model
* Refactor image query builder
* Handle relationships in image query builder
* Remove relationship management methods
* Refactor gallery model/query builder
* Add scenes to gallery model
* Convert scene model
* Refactor scene models
* Remove unused methods
* Add unit tests for gallery
* Add image tests
* Add scene tests
* Convert unnecessary scene value pointers to values
* Convert unnecessary pointer values to values
* Refactor scene partial
* Add scene partial tests
* Refactor ImagePartial
* Add image partial tests
* Refactor gallery partial update
* Add partial gallery update tests
* Use zero/null package for null values
* Add files and scan system
* Add sqlite implementation for files/folders
* Add unit tests for files/folders
* Image refactors
* Update image data layer
* Refactor gallery model and creation
* Refactor scene model
* Refactor scenes
* Don't set title from filename
* Allow galleries to freely add/remove images
* Add multiple scene file support to graphql and UI
* Add multiple file support for images in graphql/UI
* Add multiple file for galleries in graphql/UI
* Remove use of some deprecated fields
* Remove scene path usage
* Remove gallery path usage
* Remove path from image
* Move funscript to video file
* Refactor caption detection
* Migrate existing data
* Add post commit/rollback hook system
* Lint. Comment out import/export tests
* Add WithDatabase read only wrapper
* Prepend tasks to list
* Add 32 pre-migration
* Add warnings in release and migration notes
2022-09-06 07:03:42 +00:00

97 lines
2.1 KiB
Go

package scraper
import (
"path/filepath"
"strings"
"github.com/stashapp/stash/pkg/models"
)
type queryURLReplacements map[string]mappedRegexConfigs
type queryURLParameters map[string]string
func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
ret := make(queryURLParameters)
ret["checksum"] = scene.Checksum()
ret["oshash"] = scene.OSHash()
ret["filename"] = filepath.Base(scene.Path())
if scene.Title != "" {
ret["title"] = scene.Title
}
if scene.URL != "" {
ret["url"] = scene.URL
}
return ret
}
func queryURLParametersFromScrapedScene(scene ScrapedSceneInput) queryURLParameters {
ret := make(queryURLParameters)
setField := func(field string, value *string) {
if value != nil {
ret[field] = *value
}
}
setField("title", scene.Title)
setField("url", scene.URL)
setField("date", scene.Date)
setField("details", scene.Details)
setField("remote_site_id", scene.RemoteSiteID)
return ret
}
func queryURLParameterFromURL(url string) queryURLParameters {
ret := make(queryURLParameters)
ret["url"] = url
return ret
}
func queryURLParametersFromGallery(gallery *models.Gallery) queryURLParameters {
ret := make(queryURLParameters)
ret["checksum"] = gallery.Checksum()
if gallery.Path() != "" {
ret["filename"] = filepath.Base(gallery.Path())
}
if gallery.Title != "" {
ret["title"] = gallery.Title
}
if gallery.URL != "" {
ret["url"] = gallery.URL
}
return ret
}
func (p queryURLParameters) applyReplacements(r queryURLReplacements) {
for k, v := range p {
rpl, found := r[k]
if found {
p[k] = rpl.apply(v)
}
}
}
func (p queryURLParameters) constructURL(url string) string {
ret := url
for k, v := range p {
ret = strings.ReplaceAll(ret, "{"+k+"}", v)
}
return ret
}
// replaceURL does a partial URL Replace ( only url parameter is used)
func replaceURL(url string, scraperConfig scraperTypeConfig) string {
u := url
queryURL := queryURLParameterFromURL(u)
if scraperConfig.QueryURLReplacements != nil {
queryURL.applyReplacements(scraperConfig.QueryURLReplacements)
u = queryURL.constructURL(scraperConfig.QueryURL)
}
return u
}