mirror of
https://github.com/stashapp/stash.git
synced 2025-12-06 16:34:02 +01:00
* Move stashbox package under pkg * Remove StashBox from method names * Add fingerprint conversion methods to Fingerprint Refactor Fingerprints methods * Make FindSceneByFingerprints accept fingerprints not scene ids * Refactor SubmitSceneDraft to not require readers * Have SubmitFingerprints accept scenes Remove SceneReader dependency * Move ScrapedScene to models package * Move ScrapedImage into models package * Move ScrapedGallery into models package * Move Scene relationship matching out of stashbox package This is now expected to be done in the client code * Remove TagFinder dependency from stashbox.Client * Make stashbox scene find full hierarchy of studios * Move studio resolution into separate method * Move studio matching out of stashbox package This is now client code responsibility * Move performer matching out of FindPerformerByID and FindPerformerByName * Refactor performer querying logic and remove unused stashbox models Renames FindStashBoxPerformersByPerformerNames to QueryPerformers and accepts names instead of performer ids * Refactor SubmitPerformerDraft to not load relationships This will be the responsibility of the calling code * Remove repository references
121 lines
2.6 KiB
Go
121 lines
2.6 KiB
Go
package scraper
|
|
|
|
import (
|
|
"path/filepath"
|
|
"strings"
|
|
|
|
"github.com/stashapp/stash/pkg/models"
|
|
)
|
|
|
|
type queryURLReplacements map[string]mappedRegexConfigs
|
|
|
|
type queryURLParameters map[string]string
|
|
|
|
func queryURLParametersFromScene(scene *models.Scene) queryURLParameters {
|
|
ret := make(queryURLParameters)
|
|
ret["checksum"] = scene.Checksum
|
|
ret["oshash"] = scene.OSHash
|
|
ret["filename"] = filepath.Base(scene.Path)
|
|
|
|
if scene.Title != "" {
|
|
ret["title"] = scene.Title
|
|
}
|
|
if len(scene.URLs.List()) > 0 {
|
|
ret["url"] = scene.URLs.List()[0]
|
|
}
|
|
return ret
|
|
}
|
|
|
|
func queryURLParametersFromScrapedScene(scene models.ScrapedSceneInput) queryURLParameters {
|
|
ret := make(queryURLParameters)
|
|
|
|
setField := func(field string, value *string) {
|
|
if value != nil {
|
|
ret[field] = *value
|
|
}
|
|
}
|
|
|
|
setField("title", scene.Title)
|
|
setField("code", scene.Code)
|
|
if len(scene.URLs) > 0 {
|
|
setField("url", &scene.URLs[0])
|
|
} else {
|
|
setField("url", scene.URL)
|
|
}
|
|
setField("date", scene.Date)
|
|
setField("details", scene.Details)
|
|
setField("director", scene.Director)
|
|
setField("remote_site_id", scene.RemoteSiteID)
|
|
return ret
|
|
}
|
|
|
|
func queryURLParameterFromURL(url string) queryURLParameters {
|
|
ret := make(queryURLParameters)
|
|
ret["url"] = url
|
|
return ret
|
|
}
|
|
|
|
func queryURLParametersFromGallery(gallery *models.Gallery) queryURLParameters {
|
|
ret := make(queryURLParameters)
|
|
ret["checksum"] = gallery.PrimaryChecksum()
|
|
|
|
if gallery.Path != "" {
|
|
ret["filename"] = filepath.Base(gallery.Path)
|
|
}
|
|
if gallery.Title != "" {
|
|
ret["title"] = gallery.Title
|
|
}
|
|
|
|
if len(gallery.URLs.List()) > 0 {
|
|
ret["url"] = gallery.URLs.List()[0]
|
|
}
|
|
|
|
return ret
|
|
}
|
|
|
|
func queryURLParametersFromImage(image *models.Image) queryURLParameters {
|
|
ret := make(queryURLParameters)
|
|
ret["checksum"] = image.Checksum
|
|
|
|
if image.Path != "" {
|
|
ret["filename"] = filepath.Base(image.Path)
|
|
}
|
|
if image.Title != "" {
|
|
ret["title"] = image.Title
|
|
}
|
|
|
|
if len(image.URLs.List()) > 0 {
|
|
ret["url"] = image.URLs.List()[0]
|
|
}
|
|
|
|
return ret
|
|
}
|
|
|
|
func (p queryURLParameters) applyReplacements(r queryURLReplacements) {
|
|
for k, v := range p {
|
|
rpl, found := r[k]
|
|
if found {
|
|
p[k] = rpl.apply(v)
|
|
}
|
|
}
|
|
}
|
|
|
|
func (p queryURLParameters) constructURL(url string) string {
|
|
ret := url
|
|
for k, v := range p {
|
|
ret = strings.ReplaceAll(ret, "{"+k+"}", v)
|
|
}
|
|
|
|
return ret
|
|
}
|
|
|
|
// replaceURL does a partial URL Replace ( only url parameter is used)
|
|
func replaceURL(url string, scraperConfig scraperTypeConfig) string {
|
|
u := url
|
|
queryURL := queryURLParameterFromURL(u)
|
|
if scraperConfig.QueryURLReplacements != nil {
|
|
queryURL.applyReplacements(scraperConfig.QueryURLReplacements)
|
|
u = queryURL.constructURL(scraperConfig.QueryURL)
|
|
}
|
|
return u
|
|
}
|