Merge branch 'develop' into Stash4DeoVR-Main

This commit is contained in:
Philip Wang 2026-02-20 11:27:35 -05:00 committed by GitHub
commit e5292de21f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
387 changed files with 13674 additions and 4728 deletions

View file

@ -5,20 +5,39 @@ import (
"fmt"
"os"
"os/exec"
"path/filepath"
flag "github.com/spf13/pflag"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/hash/imagephash"
"github.com/stashapp/stash/pkg/hash/videophash"
"github.com/stashapp/stash/pkg/models"
)
func customUsage() {
fmt.Fprintf(os.Stderr, "Usage:\n")
fmt.Fprintf(os.Stderr, "%s [OPTIONS] VIDEOFILE...\n\nOptions:\n", os.Args[0])
fmt.Fprintf(os.Stderr, "%s [OPTIONS] FILE...\n\nOptions:\n", os.Args[0])
flag.PrintDefaults()
}
func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error {
// Determine if this is a video or image file based on extension
ext := filepath.Ext(inputfile)
ext = ext[1:] // remove the leading dot
// Common image extensions
imageExts := map[string]bool{
"jpg": true, "jpeg": true, "png": true, "gif": true, "webp": true, "bmp": true, "avif": true,
}
if imageExts[ext] {
return printImagePhash(ff, inputfile, quiet)
}
return printVideoPhash(ff, ffp, inputfile, quiet)
}
func printVideoPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error {
ffvideoFile, err := ffp.NewVideoFile(inputfile)
if err != nil {
return err
@ -46,6 +65,24 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet
return nil
}
func printImagePhash(ff *ffmpeg.FFMpeg, inputfile string, quiet *bool) error {
imgFile := &models.ImageFile{
BaseFile: &models.BaseFile{Path: inputfile},
}
phash, err := imagephash.Generate(ff, imgFile)
if err != nil {
return err
}
if *quiet {
fmt.Printf("%x\n", *phash)
} else {
fmt.Printf("%x %v\n", *phash, imgFile.Path)
}
return nil
}
func getPaths() (string, string) {
ffmpegPath, _ := exec.LookPath("ffmpeg")
ffprobePath, _ := exec.LookPath("ffprobe")
@ -67,7 +104,7 @@ func main() {
args := flag.Args()
if len(args) < 1 {
fmt.Fprintf(os.Stderr, "Missing VIDEOFILE argument.\n")
fmt.Fprintf(os.Stderr, "Missing FILE argument.\n")
flag.Usage()
os.Exit(2)
}
@ -87,4 +124,5 @@ func main() {
fmt.Fprintln(os.Stderr, err)
}
}
}

View file

@ -140,4 +140,8 @@ models:
fields:
plugins:
resolver: true
Performer:
fields:
career_length:
resolver: true

View file

@ -422,6 +422,8 @@ type Mutation {
"""
moveFiles(input: MoveFilesInput!): Boolean!
deleteFiles(ids: [ID!]!): Boolean!
"Deletes file entries from the database without deleting the files from the filesystem"
destroyFiles(ids: [ID!]!): Boolean!
fileSetFingerprints(input: FileSetFingerprintsInput!): Boolean!

View file

@ -184,6 +184,18 @@ input ConfigGeneralInput {
scraperPackageSources: [PackageSourceInput!]
"Source of plugin packages"
pluginPackageSources: [PackageSourceInput!]
"Size of the longest dimension for each sprite in pixels"
spriteScreenshotSize: Int
"True if sprite generation should use the sprite interval and min/max sprites settings instead of the default"
useCustomSpriteInterval: Boolean
"Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true"
spriteInterval: Float
"Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true"
minimumSprites: Int
"Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true"
maximumSprites: Int
}
type ConfigGeneralResult {
@ -287,6 +299,16 @@ type ConfigGeneralResult {
logAccess: Boolean!
"Maximum log size"
logFileMaxSize: Int!
"True if sprite generation should use the sprite interval and min/max sprites settings instead of the default"
useCustomSpriteInterval: Boolean!
"Time between two different scrubber sprites in seconds - only used if useCustomSpriteInterval is true"
spriteInterval: Float!
"Minimum number of sprites to be generated - only used if useCustomSpriteInterval is true"
minimumSprites: Int!
"Maximum number of sprites to be generated - only used if useCustomSpriteInterval is true"
maximumSprites: Int!
"Size of the longest dimension for each sprite in pixels"
spriteScreenshotSize: Int!
"Array of video file extensions"
videoExtensions: [String!]!
"Array of image file extensions"
@ -395,6 +417,9 @@ input ConfigInterfaceInput {
customLocales: String
customLocalesEnabled: Boolean
"When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting"
disableCustomizations: Boolean
"Interface language"
language: String
@ -472,6 +497,9 @@ type ConfigInterfaceResult {
customLocales: String
customLocalesEnabled: Boolean
"When true, disables all customizations (plugins, CSS, JavaScript, locales) for troubleshooting"
disableCustomizations: Boolean
"Interface language"
language: String

View file

@ -75,10 +75,26 @@ input OrientationCriterionInput {
value: [OrientationEnum!]!
}
input PHashDuplicationCriterionInput {
duplicated: Boolean
"Currently unimplemented"
input DuplicationCriterionInput {
duplicated: Boolean @deprecated(reason: "Use phash field instead")
"Currently unimplemented. Intended for phash distance matching."
distance: Int
"Filter by phash duplication"
phash: Boolean
"Filter by URL duplication"
url: Boolean
"Filter by Stash ID duplication"
stash_id: Boolean
"Filter by title duplication"
title: Boolean
}
input FileDuplicationCriterionInput {
duplicated: Boolean @deprecated(reason: "Use phash field instead")
"Currently unimplemented. Intended for phash distance matching."
distance: Int
"Filter by phash duplication"
phash: Boolean
}
input StashIDCriterionInput {
@ -138,8 +154,13 @@ input PerformerFilterType {
penis_length: FloatCriterionInput
"Filter by ciricumcision"
circumcised: CircumcisionCriterionInput
"Filter by career length"
"Deprecated: use career_start and career_end. This filter is non-functional."
career_length: StringCriterionInput
@deprecated(reason: "Use career_start and career_end")
"Filter by career start year"
career_start: IntCriterionInput
"Filter by career end year"
career_end: IntCriterionInput
"Filter by tattoos"
tattoos: StringCriterionInput
"Filter by piercings"
@ -261,8 +282,8 @@ input SceneFilterType {
organized: Boolean
"Filter by o-counter"
o_counter: IntCriterionInput
"Filter Scenes that have an exact phash match available"
duplicated: PHashDuplicationCriterionInput
"Filter Scenes by duplication criteria"
duplicated: DuplicationCriterionInput
"Filter by resolution"
resolution: ResolutionCriterionInput
"Filter by orientation"
@ -308,6 +329,8 @@ input SceneFilterType {
@deprecated(reason: "use stash_ids_endpoint instead")
"Filter by StashIDs"
stash_ids_endpoint: StashIDsCriterionInput
"Filter by StashID count"
stash_id_count: IntCriterionInput
"Filter by url"
url: StringCriterionInput
"Filter by interactive"
@ -348,6 +371,8 @@ input SceneFilterType {
markers_filter: SceneMarkerFilterType
"Filter by related files that meet this criteria"
files_filter: FileFilterType
custom_fields: [CustomFieldCriterionInput!]
}
input MovieFilterType {
@ -430,6 +455,8 @@ input GroupFilterType {
containing_group_count: IntCriterionInput
"Filter by number of sub-groups the group has"
sub_group_count: IntCriterionInput
"Filter by number of scenes the group has"
scene_count: IntCriterionInput
"Filter by related scenes that meet this criteria"
scenes_filter: SceneFilterType
@ -465,6 +492,8 @@ input StudioFilterType {
image_count: IntCriterionInput
"Filter by gallery count"
gallery_count: IntCriterionInput
"Filter by group count"
group_count: IntCriterionInput
"Filter by tag count"
tag_count: IntCriterionInput
"Filter by url"
@ -475,16 +504,22 @@ input StudioFilterType {
child_count: IntCriterionInput
"Filter by autotag ignore value"
ignore_auto_tag: Boolean
"Filter by organized"
organized: Boolean
"Filter by related scenes that meet this criteria"
scenes_filter: SceneFilterType
"Filter by related images that meet this criteria"
images_filter: ImageFilterType
"Filter by related galleries that meet this criteria"
galleries_filter: GalleryFilterType
"Filter by related groups that meet this criteria"
groups_filter: GroupFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
custom_fields: [CustomFieldCriterionInput!]
}
input GalleryFilterType {
@ -638,12 +673,20 @@ input TagFilterType {
images_filter: ImageFilterType
"Filter by related galleries that meet this criteria"
galleries_filter: GalleryFilterType
"Filter by related groups that meet this criteria"
groups_filter: GroupFilterType
"Filter by related performers that meet this criteria"
performers_filter: PerformerFilterType
"Filter by related studios that meet this criteria"
studios_filter: StudioFilterType
"Filter by creation time"
created_at: TimestampCriterionInput
"Filter by last update time"
updated_at: TimestampCriterionInput
custom_fields: [CustomFieldCriterionInput!]
}
input ImageFilterType {
@ -658,6 +701,8 @@ input ImageFilterType {
id: IntCriterionInput
"Filter by file checksum"
checksum: StringCriterionInput
"Filter by file phash distance"
phash_distance: PhashDistanceCriterionInput
"Filter by path"
path: StringCriterionInput
"Filter by file count"
@ -732,8 +777,8 @@ input FileFilterType {
"Filter by modification time"
mod_time: TimestampCriterionInput
"Filter files that have an exact match available"
duplicated: PHashDuplicationCriterionInput
"Filter files by duplication criteria (only phash applies to files)"
duplicated: FileDuplicationCriterionInput
"find files based on hash"
hashes: [FingerprintFilterInput!]

View file

@ -100,6 +100,8 @@ input GalleryDestroyInput {
"""
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
type FindGalleriesResultType {

View file

@ -82,12 +82,16 @@ input ImageDestroyInput {
id: ID!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
input ImagesDestroyInput {
ids: [ID!]!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
type FindImagesResultType {

View file

@ -10,8 +10,11 @@ input GenerateMetadataInput {
transcodes: Boolean
"Generate transcodes even if not required"
forceTranscodes: Boolean
"Generate video phashes during scan"
phashes: Boolean
interactiveHeatmapsSpeeds: Boolean
"Generate image phashes during scan"
imagePhashes: Boolean
imageThumbnails: Boolean
clipPreviews: Boolean
@ -19,6 +22,10 @@ input GenerateMetadataInput {
sceneIDs: [ID!]
"marker ids to generate for"
markerIDs: [ID!]
"image ids to generate for"
imageIDs: [ID!]
"gallery ids to generate for"
galleryIDs: [ID!]
"overwrite existing media"
overwrite: Boolean
@ -85,8 +92,10 @@ input ScanMetadataInput {
scanGenerateImagePreviews: Boolean
"Generate sprites during scan"
scanGenerateSprites: Boolean
"Generate phashes during scan"
"Generate video phashes during scan"
scanGeneratePhashes: Boolean
"Generate image phashes during scan"
scanGenerateImagePhashes: Boolean
"Generate image thumbnails during scan"
scanGenerateThumbnails: Boolean
"Generate image clip previews during scan"
@ -107,8 +116,10 @@ type ScanMetadataOptions {
scanGenerateImagePreviews: Boolean!
"Generate sprites during scan"
scanGenerateSprites: Boolean!
"Generate phashes during scan"
"Generate video phashes during scan"
scanGeneratePhashes: Boolean!
"Generate image phashes during scan"
scanGenerateImagePhashes: Boolean
"Generate image thumbnails during scan"
scanGenerateThumbnails: Boolean!
"Generate image clip previews during scan"
@ -204,7 +215,9 @@ input IdentifyMetadataOptionsInput {
setCoverImage: Boolean
setOrganized: Boolean
"defaults to true if not provided"
includeMalePerformers: Boolean
includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders")
"Filter to only include performers with these genders. If not provided, all genders are included."
performerGenders: [GenderEnum!]
"defaults to true if not provided"
skipMultipleMatches: Boolean
"tag to tag skipped multiple matches with"
@ -249,7 +262,9 @@ type IdentifyMetadataOptions {
setCoverImage: Boolean
setOrganized: Boolean
"defaults to true if not provided"
includeMalePerformers: Boolean
includeMalePerformers: Boolean @deprecated(reason: "Use performerGenders")
"Filter to only include performers with these genders. If not provided, all genders are included."
performerGenders: [GenderEnum!]
"defaults to true if not provided"
skipMultipleMatches: Boolean
"tag to tag skipped multiple matches with"
@ -310,6 +325,8 @@ input ImportObjectsInput {
input BackupDatabaseInput {
download: Boolean
"If true, blob files will be included in the backup. This can significantly increase the size of the backup and the time it takes to create it, but allows for a complete backup of the system that can be restored without needing access to the original media files."
includeBlobs: Boolean
}
input AnonymiseDatabaseInput {

View file

@ -30,7 +30,9 @@ type Performer {
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
tattoos: String
piercings: String
alias_list: [String!]!
@ -77,9 +79,12 @@ input PerformerCreateInput {
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
alias_list: [String!]
twitter: String @deprecated(reason: "Use urls")
instagram: String @deprecated(reason: "Use urls")
@ -115,9 +120,12 @@ input PerformerUpdateInput {
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
alias_list: [String!]
twitter: String @deprecated(reason: "Use urls")
instagram: String @deprecated(reason: "Use urls")
@ -158,9 +166,12 @@ input BulkPerformerUpdateInput {
fake_tits: String
penis_length: Float
circumcised: CircumisedEnum
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
tattoos: String
piercings: String
"Duplicate aliases and those equal to name will result in an error (case-insensitive)"
alias_list: BulkUpdateStrings
twitter: String @deprecated(reason: "Use urls")
instagram: String @deprecated(reason: "Use urls")

View file

@ -79,6 +79,8 @@ type Scene {
performers: [Performer!]!
stash_ids: [StashID!]!
custom_fields: Map!
"Return valid stream paths"
sceneStreams: [SceneStreamEndpoint!]!
}
@ -120,6 +122,8 @@ input SceneCreateInput {
Files must not already be primary for another scene.
"""
file_ids: [ID!]
custom_fields: Map
}
input SceneUpdateInput {
@ -158,6 +162,8 @@ input SceneUpdateInput {
)
primary_file_id: ID
custom_fields: CustomFieldsInput
}
enum BulkUpdateIdMode {
@ -190,18 +196,24 @@ input BulkSceneUpdateInput {
tag_ids: BulkUpdateIds
group_ids: BulkUpdateIds
movie_ids: BulkUpdateIds @deprecated(reason: "Use group_ids")
custom_fields: CustomFieldsInput
}
input SceneDestroyInput {
id: ID!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
input ScenesDestroyInput {
ids: [ID!]!
delete_file: Boolean
delete_generated: Boolean
"If true, delete the file entry from the database if the file is not assigned to any other objects"
destroy_file_entry: Boolean
}
type FindScenesResultType {

View file

@ -18,7 +18,9 @@ type ScrapedPerformer {
fake_tits: String
penis_length: String
circumcised: String
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
tattoos: String
piercings: String
# aliases must be comma-delimited to be parsed correctly
@ -54,7 +56,9 @@ input ScrapedPerformerInput {
fake_tits: String
penis_length: String
circumcised: String
career_length: String
career_length: String @deprecated(reason: "Use career_start and career_end")
career_start: Int
career_end: Int
tattoos: String
piercings: String
aliases: String

View file

@ -8,6 +8,7 @@ type Studio {
aliases: [String!]!
tags: [Tag!]!
ignore_auto_tag: Boolean!
organized: Boolean!
image_path: String # Resolver
scene_count(depth: Int): Int! # Resolver
@ -26,6 +27,8 @@ type Studio {
groups: [Group!]!
movies: [Movie!]! @deprecated(reason: "use groups instead")
o_counter: Int
custom_fields: Map!
}
input StudioCreateInput {
@ -40,9 +43,13 @@ input StudioCreateInput {
rating100: Int
favorite: Boolean
details: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
tag_ids: [ID!]
ignore_auto_tag: Boolean
organized: Boolean
custom_fields: Map
}
input StudioUpdateInput {
@ -58,9 +65,13 @@ input StudioUpdateInput {
rating100: Int
favorite: Boolean
details: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
tag_ids: [ID!]
ignore_auto_tag: Boolean
organized: Boolean
custom_fields: CustomFieldsInput
}
input BulkStudioUpdateInput {
@ -74,6 +85,7 @@ input BulkStudioUpdateInput {
details: String
tag_ids: BulkUpdateIds
ignore_auto_tag: Boolean
organized: Boolean
}
input StudioDestroyInput {

View file

@ -24,6 +24,7 @@ type Tag {
parent_count: Int! # Resolver
child_count: Int! # Resolver
custom_fields: Map!
}
input TagCreateInput {
@ -31,6 +32,7 @@ input TagCreateInput {
"Value that does not appear in the UI but overrides name for sorting"
sort_name: String
description: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
ignore_auto_tag: Boolean
favorite: Boolean
@ -40,6 +42,8 @@ input TagCreateInput {
parent_ids: [ID!]
child_ids: [ID!]
custom_fields: Map
}
input TagUpdateInput {
@ -48,6 +52,7 @@ input TagUpdateInput {
"Value that does not appear in the UI but overrides name for sorting"
sort_name: String
description: String
"Duplicate aliases and those equal to name will be ignored (case-insensitive)"
aliases: [String!]
ignore_auto_tag: Boolean
favorite: Boolean
@ -57,6 +62,8 @@ input TagUpdateInput {
parent_ids: [ID!]
child_ids: [ID!]
custom_fields: CustomFieldsInput
}
input TagDestroyInput {
@ -71,11 +78,14 @@ type FindTagsResultType {
input TagsMergeInput {
source: [ID!]!
destination: ID!
# values defined here will override values in the destination
values: TagUpdateInput
}
input BulkTagUpdateInput {
ids: [ID!]
description: String
"Duplicate aliases and those equal to name will result in an error (case-insensitive)"
aliases: BulkUpdateStrings
ignore_auto_tag: Boolean
favorite: Boolean

View file

@ -120,18 +120,6 @@ fragment SceneFragment on Scene {
}
}
query FindSceneByFingerprint($fingerprint: FingerprintQueryInput!) {
findSceneByFingerprint(fingerprint: $fingerprint) {
...SceneFragment
}
}
query FindScenesByFullFingerprints($fingerprints: [FingerprintQueryInput!]!) {
findScenesByFullFingerprints(fingerprints: $fingerprints) {
...SceneFragment
}
}
query FindScenesBySceneFingerprints(
$fingerprints: [[FingerprintQueryInput!]!]!
) {

View file

@ -42,13 +42,14 @@ const (
)
type Loaders struct {
SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader
ScenePlayCount *ScenePlayCountLoader
SceneOCount *SceneOCountLoader
ScenePlayHistory *ScenePlayHistoryLoader
SceneOHistory *SceneOHistoryLoader
SceneLastPlayed *SceneLastPlayedLoader
SceneByID *SceneLoader
SceneFiles *SceneFileIDsLoader
ScenePlayCount *ScenePlayCountLoader
SceneOCount *SceneOCountLoader
ScenePlayHistory *ScenePlayHistoryLoader
SceneOHistory *SceneOHistoryLoader
SceneLastPlayed *SceneLastPlayedLoader
SceneCustomFields *CustomFieldsLoader
ImageFiles *ImageFileIDsLoader
GalleryFiles *GalleryFileIDsLoader
@ -59,11 +60,14 @@ type Loaders struct {
PerformerByID *PerformerLoader
PerformerCustomFields *CustomFieldsLoader
StudioByID *StudioLoader
TagByID *TagLoader
GroupByID *GroupLoader
FileByID *FileLoader
FolderByID *FolderLoader
StudioByID *StudioLoader
StudioCustomFields *CustomFieldsLoader
TagByID *TagLoader
TagCustomFields *CustomFieldsLoader
GroupByID *GroupLoader
FileByID *FileLoader
FolderByID *FolderLoader
}
type Middleware struct {
@ -99,6 +103,16 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchPerformerCustomFields(ctx),
},
StudioCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchStudioCustomFields(ctx),
},
SceneCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchSceneCustomFields(ctx),
},
StudioByID: &StudioLoader{
wait: wait,
maxBatch: maxBatch,
@ -109,6 +123,11 @@ func (m Middleware) Middleware(next http.Handler) http.Handler {
maxBatch: maxBatch,
fetch: m.fetchTags(ctx),
},
TagCustomFields: &CustomFieldsLoader{
wait: wait,
maxBatch: maxBatch,
fetch: m.fetchTagCustomFields(ctx),
},
GroupByID: &GroupLoader{
wait: wait,
maxBatch: maxBatch,
@ -194,6 +213,18 @@ func (m Middleware) fetchScenes(ctx context.Context) func(keys []int) ([]*models
}
}
func (m Middleware) fetchSceneCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Scene.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchImages(ctx context.Context) func(keys []int) ([]*models.Image, []error) {
return func(keys []int) (ret []*models.Image, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@ -253,6 +284,18 @@ func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*model
}
}
func (m Middleware) fetchStudioCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Studio.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.Tag, []error) {
return func(keys []int) (ret []*models.Tag, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
@ -264,6 +307,18 @@ func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.T
}
}
func (m Middleware) fetchTagCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) {
return func(keys []int) (ret []models.CustomFieldMap, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {
var err error
ret, err = m.Repository.Tag.GetCustomFieldsBulk(ctx, keys)
return err
})
return ret, toErrorSlice(err)
}
}
func (m Middleware) fetchGroups(ctx context.Context) func(keys []int) ([]*models.Group, []error) {
return func(keys []int) (ret []*models.Group, errs []error) {
err := m.Repository.WithDB(ctx, func(ctx context.Context) error {

View file

@ -10,6 +10,7 @@ import (
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/performer"
"github.com/stashapp/stash/pkg/utils"
)
func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer) ([]string, error) {
@ -109,6 +110,15 @@ func (r *performerResolver) HeightCm(ctx context.Context, obj *models.Performer)
return obj.Height, nil
}
func (r *performerResolver) CareerLength(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.CareerStart == nil && obj.CareerEnd == nil {
return nil, nil
}
ret := utils.FormatYearRange(obj.CareerStart, obj.CareerEnd)
return &ret, nil
}
func (r *performerResolver) Birthdate(ctx context.Context, obj *models.Performer) (*string, error) {
if obj.Birthdate != nil {
ret := obj.Birthdate.String()

View file

@ -410,3 +410,16 @@ func (r *sceneResolver) OHistory(ctx context.Context, obj *models.Scene) ([]*tim
return ptrRet, nil
}
func (r *sceneResolver) CustomFields(ctx context.Context, obj *models.Scene) (map[string]interface{}, error) {
m, err := loaders.From(ctx).SceneCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}

View file

@ -207,6 +207,19 @@ func (r *studioResolver) Groups(ctx context.Context, obj *models.Studio) (ret []
return ret, nil
}
func (r *studioResolver) CustomFields(ctx context.Context, obj *models.Studio) (map[string]interface{}, error) {
m, err := loaders.From(ctx).StudioCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}
// deprecated
func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Group, err error) {
return r.Groups(ctx, obj)

View file

@ -181,3 +181,16 @@ func (r *tagResolver) ChildCount(ctx context.Context, obj *models.Tag) (ret int,
return ret, nil
}
func (r *tagResolver) CustomFields(ctx context.Context, obj *models.Tag) (map[string]interface{}, error) {
m, err := loaders.From(ctx).TagCustomFields.Load(obj.ID)
if err != nil {
return nil, err
}
if m == nil {
return make(map[string]interface{}), nil
}
return m, nil
}

View file

@ -287,6 +287,11 @@ func (r *mutationResolver) ConfigureGeneral(ctx context.Context, input ConfigGen
if input.PreviewPreset != nil {
c.SetString(config.PreviewPreset, input.PreviewPreset.String())
}
r.setConfigBool(config.UseCustomSpriteInterval, input.UseCustomSpriteInterval)
r.setConfigFloat(config.SpriteInterval, input.SpriteInterval)
r.setConfigInt(config.MinimumSprites, input.MinimumSprites)
r.setConfigInt(config.MaximumSprites, input.MaximumSprites)
r.setConfigInt(config.SpriteScreenshotSize, input.SpriteScreenshotSize)
r.setConfigBool(config.TranscodeHardwareAcceleration, input.TranscodeHardwareAcceleration)
if input.MaxTranscodeSize != nil {
@ -515,6 +520,8 @@ func (r *mutationResolver) ConfigureInterface(ctx context.Context, input ConfigI
r.setConfigBool(config.CustomLocalesEnabled, input.CustomLocalesEnabled)
r.setConfigBool(config.DisableCustomizations, input.DisableCustomizations)
if input.DisableDropdownCreate != nil {
ddc := input.DisableDropdownCreate
r.setConfigBool(config.DisableDropdownCreatePerformer, ddc.Performer)

View file

@ -210,6 +210,58 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b
return true, nil
}
func (r *mutationResolver) DestroyFiles(ctx context.Context, ids []string) (ret bool, err error) {
fileIDs, err := stringslice.StringSliceToIntSlice(ids)
if err != nil {
return false, fmt.Errorf("converting ids: %w", err)
}
destroyer := &file.ZipDestroyer{
FileDestroyer: r.repository.File,
FolderDestroyer: r.repository.Folder,
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.File
for _, fileIDInt := range fileIDs {
fileID := models.FileID(fileIDInt)
f, err := qb.Find(ctx, fileID)
if err != nil {
return err
}
if len(f) == 0 {
return fmt.Errorf("file with id %d not found", fileID)
}
path := f[0].Base().Path
// ensure not a primary file
isPrimary, err := qb.IsPrimary(ctx, fileID)
if err != nil {
return fmt.Errorf("checking if file %s is primary: %w", path, err)
}
if isPrimary {
return fmt.Errorf("cannot destroy primary file entry %s", path)
}
// destroy DB entries only (no filesystem deletion)
const deleteFile = false
if err := destroyer.DestroyZip(ctx, f[0], nil, deleteFile); err != nil {
return fmt.Errorf("destroying file entry %s: %w", path, err)
}
}
return nil
}); err != nil {
return false, err
}
return true, nil
}
func (r *mutationResolver) FileSetFingerprints(ctx context.Context, input FileSetFingerprintsInput) (bool, error) {
fileIDInt, err := strconv.Atoi(input.ID)
if err != nil {

View file

@ -346,6 +346,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
destroyFileEntry := utils.IsTrue(input.DestroyFileEntry)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery
@ -366,7 +367,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
galleries = append(galleries, gallery)
imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile)
imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry)
if err != nil {
return err
}

View file

@ -325,7 +325,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return fmt.Errorf("image with id %d not found", imageID)
}
return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry))
}); err != nil {
fileDeleter.Rollback()
return false, err
@ -372,7 +372,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
images = append(images, i)
if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile), utils.IsTrue(input.DestroyFileEntry)); err != nil {
return err
}
}

View file

@ -122,9 +122,10 @@ func (r *mutationResolver) MigrateHashNaming(ctx context.Context) (string, error
func (r *mutationResolver) BackupDatabase(ctx context.Context, input BackupDatabaseInput) (*string, error) {
// if download is true, then backup to temporary file and return a link
download := input.Download != nil && *input.Download
includeBlobs := input.IncludeBlobs != nil && *input.IncludeBlobs
mgr := manager.GetInstance()
backupPath, backupName, err := mgr.BackupDatabase(download)
backupPath, backupName, err := mgr.BackupDatabase(download, includeBlobs)
if err != nil {
logger.Errorf("Error backing up database: %v", err)
return nil, err

View file

@ -43,7 +43,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
newPerformer.Name = strings.TrimSpace(input.Name)
newPerformer.Disambiguation = translator.string(input.Disambiguation)
newPerformer.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.AliasList))
newPerformer.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.AliasList), newPerformer.Name))
newPerformer.Gender = input.Gender
newPerformer.Ethnicity = translator.string(input.Ethnicity)
newPerformer.Country = translator.string(input.Country)
@ -52,7 +52,17 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per
newPerformer.FakeTits = translator.string(input.FakeTits)
newPerformer.PenisLength = input.PenisLength
newPerformer.Circumcised = input.Circumcised
newPerformer.CareerLength = translator.string(input.CareerLength)
newPerformer.CareerStart = input.CareerStart
newPerformer.CareerEnd = input.CareerEnd
// if career_start/career_end not provided, parse deprecated career_length
if newPerformer.CareerStart == nil && newPerformer.CareerEnd == nil && input.CareerLength != nil {
start, end, err := utils.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
newPerformer.CareerStart = start
newPerformer.CareerEnd = end
}
newPerformer.Tattoos = translator.string(input.Tattoos)
newPerformer.Piercings = translator.string(input.Piercings)
newPerformer.Favorite = translator.bool(input.Favorite)
@ -261,7 +271,22 @@ func performerPartialFromInput(input models.PerformerUpdateInput, translator cha
updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits")
updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length")
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start")
updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end")
} else if translator.hasField("career_length") && input.CareerLength != nil {
start, end, err := utils.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
updatedPerformer.CareerStart = models.NewOptionalInt(*start)
}
if end != nil {
updatedPerformer.CareerEnd = models.NewOptionalInt(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")
updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite")
@ -348,6 +373,27 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per
}
}
if updatedPerformer.Aliases != nil {
p, err := qb.Find(ctx, performerID)
if err != nil {
return err
}
if p != nil {
if err := p.LoadAliases(ctx, qb); err != nil {
return err
}
effectiveAliases := updatedPerformer.Aliases.Apply(p.Aliases.List())
name := p.Name
if updatedPerformer.Name.Set {
name = updatedPerformer.Name.Value
}
sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name)
updatedPerformer.Aliases.Values = sanitized
updatedPerformer.Aliases.Mode = models.RelationshipUpdateModeSet
}
}
if err := performer.ValidateUpdate(ctx, performerID, *updatedPerformer, qb); err != nil {
return err
}
@ -396,7 +442,22 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe
updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits")
updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length")
updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised")
updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length")
// prefer career_start/career_end over deprecated career_length
if translator.hasField("career_start") || translator.hasField("career_end") {
updatedPerformer.CareerStart = translator.optionalInt(input.CareerStart, "career_start")
updatedPerformer.CareerEnd = translator.optionalInt(input.CareerEnd, "career_end")
} else if translator.hasField("career_length") && input.CareerLength != nil {
start, end, err := utils.ParseYearRangeString(*input.CareerLength)
if err != nil {
return nil, fmt.Errorf("could not parse career_length %q: %w", *input.CareerLength, err)
}
if start != nil {
updatedPerformer.CareerStart = models.NewOptionalInt(*start)
}
if end != nil {
updatedPerformer.CareerEnd = models.NewOptionalInt(*end)
}
}
updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos")
updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings")

View file

@ -103,8 +103,15 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr
}
}
customFields := convertMapJSONNumbers(input.CustomFields)
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.Resolver.sceneService.Create(ctx, &newScene, fileIDs, coverImageData)
ret, err = r.Resolver.sceneService.Create(ctx, models.CreateSceneInput{
Scene: &newScene,
FileIDs: fileIDs,
CoverImage: coverImageData,
CustomFields: customFields,
})
return err
}); err != nil {
return nil, err
@ -306,6 +313,15 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
}
}
var customFields *models.CustomFieldsInput
if input.CustomFields != nil {
cfCopy := *input.CustomFields
customFields = &cfCopy
// convert json.Numbers to int/float
customFields.Full = convertMapJSONNumbers(customFields.Full)
customFields.Partial = convertMapJSONNumbers(customFields.Partial)
}
scene, err := qb.UpdatePartial(ctx, sceneID, *updatedScene)
if err != nil {
return nil, err
@ -317,6 +333,12 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
}
}
if customFields != nil {
if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil {
return nil, err
}
}
return scene, nil
}
@ -387,6 +409,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
}
}
var customFields *models.CustomFieldsInput
if input.CustomFields != nil {
cf := handleUpdateCustomFields(*input.CustomFields)
customFields = &cf
}
ret := []*models.Scene{}
// Start the transaction and save the scenes
@ -399,6 +427,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
return err
}
if customFields != nil {
if err := qb.SetCustomFields(ctx, scene.ID, *customFields); err != nil {
return err
}
}
ret = append(ret, scene)
}
@ -441,6 +475,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
destroyFileEntry := utils.IsTrue(input.DestroyFileEntry)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
@ -457,7 +492,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo)
return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile)
return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry)
}); err != nil {
fileDeleter.Rollback()
return false, err
@ -495,6 +530,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
deleteGenerated := utils.IsTrue(input.DeleteGenerated)
deleteFile := utils.IsTrue(input.DeleteFile)
destroyFileEntry := utils.IsTrue(input.DestroyFileEntry)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
@ -513,7 +549,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// kill any running encoders
manager.KillRunningStreams(scene, fileNamingAlgo)
if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile); err != nil {
if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil {
return err
}
}
@ -573,6 +609,7 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
var values *models.ScenePartial
var coverImageData []byte
var customFields *models.CustomFieldsInput
if input.Values != nil {
translator := changesetTranslator{
@ -591,6 +628,11 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
return nil, fmt.Errorf("processing cover image: %w", err)
}
}
if input.Values.CustomFields != nil {
cf := handleUpdateCustomFields(*input.Values.CustomFields)
customFields = &cf
}
} else {
v := models.NewScenePartial()
values = &v
@ -622,7 +664,20 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput
return fmt.Errorf("scene with id %d not found", destID)
}
return r.sceneUpdateCoverImage(ctx, ret, coverImageData)
// only update cover image if one was provided
if len(coverImageData) > 0 {
if err := r.sceneUpdateCoverImage(ctx, ret, coverImageData); err != nil {
return err
}
}
if customFields != nil {
if err := r.Resolver.repository.Scene.SetCustomFields(ctx, ret.ID, *customFields); err != nil {
return err
}
}
return nil
}); err != nil {
return nil, err
}

View file

@ -31,14 +31,15 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
}
// Populate a new studio from the input
newStudio := models.NewStudio()
newStudio := models.NewCreateStudioInput()
newStudio.Name = strings.TrimSpace(input.Name)
newStudio.Rating = input.Rating100
newStudio.Favorite = translator.bool(input.Favorite)
newStudio.Details = translator.string(input.Details)
newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
newStudio.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases))
newStudio.Organized = translator.bool(input.Organized)
newStudio.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newStudio.Name))
newStudio.StashIDs = models.NewRelatedStashIDs(models.StashIDInputs(input.StashIds).ToStashIDs())
var err error
@ -61,6 +62,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
newStudio.CustomFields = convertMapJSONNumbers(input.CustomFields)
// Process the base 64 encoded image string
var imageData []byte
@ -119,6 +121,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
updatedStudio.Rating = translator.optionalInt(input.Rating100, "rating100")
updatedStudio.Favorite = translator.optionalBool(input.Favorite, "favorite")
updatedStudio.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
updatedStudio.Organized = translator.optionalBool(input.Organized, "organized")
updatedStudio.Aliases = translator.updateStrings(input.Aliases, "aliases")
updatedStudio.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids")
@ -152,6 +155,11 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
}
}
updatedStudio.CustomFields = input.CustomFields
// convert json.Numbers to int/float
updatedStudio.CustomFields.Full = convertMapJSONNumbers(updatedStudio.CustomFields.Full)
updatedStudio.CustomFields.Partial = convertMapJSONNumbers(updatedStudio.CustomFields.Partial)
// Process the base 64 encoded image string
var imageData []byte
imageIncluded := translator.hasField("image")
@ -167,6 +175,28 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Studio
if updatedStudio.Aliases != nil {
s, err := qb.Find(ctx, studioID)
if err != nil {
return err
}
if s != nil {
if err := s.LoadAliases(ctx, qb); err != nil {
return err
}
effectiveAliases := updatedStudio.Aliases.Apply(s.Aliases.List())
name := s.Name
if updatedStudio.Name.Set {
name = updatedStudio.Name.Value
}
sanitized := stringslice.UniqueExcludeFold(effectiveAliases, name)
updatedStudio.Aliases.Values = sanitized
updatedStudio.Aliases.Mode = models.RelationshipUpdateModeSet
}
}
if err := studio.ValidateModify(ctx, updatedStudio, qb); err != nil {
return err
}
@ -233,6 +263,7 @@ func (r *mutationResolver) BulkStudioUpdate(ctx context.Context, input BulkStudi
partial.Rating = translator.optionalInt(input.Rating100, "rating100")
partial.Details = translator.optionalString(input.Details, "details")
partial.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag")
partial.Organized = translator.optionalBool(input.Organized, "organized")
partial.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids")
if err != nil {

View file

@ -6,7 +6,6 @@ import (
"strconv"
"strings"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin/hook"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
@ -31,11 +30,14 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
}
// Populate a new tag from the input
newTag := models.NewTag()
newTag := models.CreateTagInput{
Tag: &models.Tag{},
}
*newTag.Tag = models.NewTag()
newTag.Name = strings.TrimSpace(input.Name)
newTag.SortName = translator.string(input.SortName)
newTag.Aliases = models.NewRelatedStrings(stringslice.TrimSpace(input.Aliases))
newTag.Aliases = models.NewRelatedStrings(stringslice.UniqueExcludeFold(stringslice.TrimSpace(input.Aliases), newTag.Name))
newTag.Favorite = translator.bool(input.Favorite)
newTag.Description = translator.string(input.Description)
newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag)
@ -60,6 +62,8 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
return nil, fmt.Errorf("converting child tag ids: %w", err)
}
newTag.CustomFields = convertMapJSONNumbers(input.CustomFields)
// Process the base 64 encoded image string
var imageData []byte
if input.Image != nil {
@ -73,7 +77,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Tag
if err := tag.ValidateCreate(ctx, newTag, qb); err != nil {
if err := tag.ValidateCreate(ctx, *newTag.Tag, qb); err != nil {
return err
}
@ -98,17 +102,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput)
return r.getTag(ctx, newTag.ID)
}
func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) {
tagID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate tag from the input
func tagPartialFromInput(input TagUpdateInput, translator changesetTranslator) (*models.TagPartial, error) {
updatedTag := models.NewTagPartial()
updatedTag.Name = translator.optionalString(input.Name, "name")
@ -127,6 +121,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
}
updatedTag.StashIDs = translator.updateStashIDs(updateStashIDInputs, "stash_ids")
var err error
updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids")
if err != nil {
return nil, fmt.Errorf("converting parent tag ids: %w", err)
@ -137,6 +132,32 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
return nil, fmt.Errorf("converting child tag ids: %w", err)
}
if input.CustomFields != nil {
updatedTag.CustomFields = *input.CustomFields
// convert json.Numbers to int/float
updatedTag.CustomFields.Full = convertMapJSONNumbers(updatedTag.CustomFields.Full)
updatedTag.CustomFields.Partial = convertMapJSONNumbers(updatedTag.CustomFields.Partial)
}
return &updatedTag, nil
}
func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) {
tagID, err := strconv.Atoi(input.ID)
if err != nil {
return nil, fmt.Errorf("converting id: %w", err)
}
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
// Populate tag from the input
updatedTag, err := tagPartialFromInput(input, translator)
if err != nil {
return nil, err
}
var imageData []byte
imageIncluded := translator.hasField("image")
if input.Image != nil {
@ -151,11 +172,33 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput)
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Tag
if err := tag.ValidateUpdate(ctx, tagID, updatedTag, qb); err != nil {
if updatedTag.Aliases != nil {
t, err := qb.Find(ctx, tagID)
if err != nil {
return err
}
if t != nil {
if err := t.LoadAliases(ctx, qb); err != nil {
return err
}
newAliases := updatedTag.Aliases.Apply(t.Aliases.List())
name := t.Name
if updatedTag.Name.Set {
name = updatedTag.Name.Value
}
sanitized := stringslice.UniqueExcludeFold(newAliases, name)
updatedTag.Aliases.Values = sanitized
updatedTag.Aliases.Mode = models.RelationshipUpdateModeSet
}
}
if err := tag.ValidateUpdate(ctx, tagID, *updatedTag, qb); err != nil {
return err
}
t, err = qb.UpdatePartial(ctx, tagID, updatedTag)
t, err = qb.UpdatePartial(ctx, tagID, *updatedTag)
if err != nil {
return err
}
@ -303,6 +346,31 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput)
return nil, nil
}
var values *models.TagPartial
var imageData []byte
if input.Values != nil {
translator := changesetTranslator{
inputMap: getNamedUpdateInputMap(ctx, "input.values"),
}
values, err = tagPartialFromInput(*input.Values, translator)
if err != nil {
return nil, err
}
if input.Values.Image != nil {
var err error
imageData, err = utils.ProcessImageInput(ctx, *input.Values.Image)
if err != nil {
return nil, fmt.Errorf("processing cover image: %w", err)
}
}
} else {
v := models.NewTagPartial()
values = &v
}
var t *models.Tag
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Tag
@ -317,28 +385,22 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput)
return fmt.Errorf("tag with id %d not found", destination)
}
parents, children, err := tag.MergeHierarchy(ctx, destination, source, qb)
if err != nil {
return err
}
if err = qb.Merge(ctx, source, destination); err != nil {
return err
}
err = qb.UpdateParentTags(ctx, destination, parents)
if err != nil {
return err
}
err = qb.UpdateChildTags(ctx, destination, children)
if err != nil {
if err := tag.ValidateUpdate(ctx, destination, *values, qb); err != nil {
return err
}
err = tag.ValidateHierarchyExisting(ctx, t, parents, children, qb)
if err != nil {
logger.Errorf("Error merging tag: %s", err)
return err
if _, err := qb.UpdatePartial(ctx, destination, *values); err != nil {
return fmt.Errorf("updating tag: %w", err)
}
if len(imageData) > 0 {
if err := qb.UpdateImage(ctx, destination, imageData); err != nil {
return err
}
}
return nil

View file

@ -96,6 +96,11 @@ func makeConfigGeneralResult() *ConfigGeneralResult {
CalculateMd5: config.IsCalculateMD5(),
VideoFileNamingAlgorithm: config.GetVideoFileNamingAlgorithm(),
ParallelTasks: config.GetParallelTasks(),
UseCustomSpriteInterval: config.GetUseCustomSpriteInterval(),
SpriteInterval: config.GetSpriteInterval(),
SpriteScreenshotSize: config.GetSpriteScreenshotSize(),
MinimumSprites: config.GetMinimumSprites(),
MaximumSprites: config.GetMaximumSprites(),
PreviewAudio: config.GetPreviewAudio(),
PreviewSegments: config.GetPreviewSegments(),
PreviewSegmentDuration: config.GetPreviewSegmentDuration(),
@ -156,6 +161,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult {
javascriptEnabled := config.GetJavascriptEnabled()
customLocales := config.GetCustomLocales()
customLocalesEnabled := config.GetCustomLocalesEnabled()
disableCustomizations := config.GetDisableCustomizations()
language := config.GetLanguage()
handyKey := config.GetHandyKey()
scriptOffset := config.GetFunscriptOffset()
@ -183,6 +189,7 @@ func makeConfigInterfaceResult() *ConfigInterfaceResult {
JavascriptEnabled: &javascriptEnabled,
CustomLocales: &customLocales,
CustomLocalesEnabled: &customLocalesEnabled,
DisableCustomizations: &disableCustomizations,
Language: &language,
ImageLightbox: &imageLightboxOptions,

View file

@ -450,7 +450,7 @@ func cssHandler(c *config.Config) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
var paths []string
if c.GetCSSEnabled() {
if c.GetCSSEnabled() && !c.GetDisableCustomizations() {
// search for custom.css in current directory, then $HOME/.stash
fn := c.GetCSSPath()
exists, _ := fsutil.FileExists(fn)
@ -468,7 +468,7 @@ func javascriptHandler(c *config.Config) func(w http.ResponseWriter, r *http.Req
return func(w http.ResponseWriter, r *http.Request) {
var paths []string
if c.GetJavascriptEnabled() {
if c.GetJavascriptEnabled() && !c.GetDisableCustomizations() {
// search for custom.js in current directory, then $HOME/.stash
fn := c.GetJavascriptPath()
exists, _ := fsutil.FileExists(fn)
@ -486,7 +486,7 @@ func customLocalesHandler(c *config.Config) func(w http.ResponseWriter, r *http.
return func(w http.ResponseWriter, r *http.Request) {
buffer := bytes.Buffer{}
if c.GetCustomLocalesEnabled() {
if c.GetCustomLocalesEnabled() && !c.GetDisableCustomizations() {
// search for custom-locales.json in current directory, then $HOME/.stash
path := c.GetCustomLocalesPath()
exists, _ := fsutil.FileExists(path)

View file

@ -101,16 +101,15 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error {
func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) {
// create the studio
studio := models.Studio{
Name: name,
}
studio := models.NewCreateStudioInput()
studio.Name = name
err := qb.Create(ctx, &studio)
if err != nil {
return nil, err
}
return &studio, nil
return studio.Studio, nil
}
func createTag(ctx context.Context, qb models.TagWriter) error {
@ -119,7 +118,7 @@ func createTag(ctx context.Context, qb models.TagWriter) error {
Name: testName,
}
err := qb.Create(ctx, &tag)
err := qb.Create(ctx, &models.CreateTagInput{Tag: &tag})
if err != nil {
return err
}

View file

@ -147,6 +147,9 @@ func (t *SceneIdentifier) getOptions(source ScraperSource) MetadataOptions {
if source.Options.IncludeMalePerformers != nil {
options.IncludeMalePerformers = source.Options.IncludeMalePerformers
}
if source.Options.PerformerGenders != nil {
options.PerformerGenders = source.Options.PerformerGenders
}
if source.Options.SkipMultipleMatches != nil {
options.SkipMultipleMatches = source.Options.SkipMultipleMatches
}
@ -204,13 +207,23 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
ret.Partial.StudioID = models.NewOptionalInt(*studioID)
}
includeMalePerformers := true
if options.IncludeMalePerformers != nil {
includeMalePerformers = *options.IncludeMalePerformers
// Determine allowed genders for performer filtering
var allowedGenders []models.GenderEnum
if options.PerformerGenders != nil {
// New field takes precedence
allowedGenders = options.PerformerGenders
} else if options.IncludeMalePerformers != nil && !*options.IncludeMalePerformers {
// Legacy: if includeMalePerformers is false, include all genders except male
for _, g := range models.AllGenderEnum {
if g != models.GenderEnumMale {
allowedGenders = append(allowedGenders, g)
}
}
}
// nil allowedGenders means include all performers
addSkipSingleNamePerformerTag := false
performerIDs, err := rel.performers(ctx, !includeMalePerformers)
performerIDs, err := rel.performers(ctx, allowedGenders)
if err != nil {
if errors.Is(err, ErrSkipSingleNamePerformer) {
addSkipSingleNamePerformerTag = true

View file

@ -60,9 +60,15 @@ func TestSceneIdentifier_Identify(t *testing.T) {
)
defaultOptions := &MetadataOptions{
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
IncludeMalePerformers: &boolFalse,
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
PerformerGenders: []models.GenderEnum{
models.GenderEnumFemale,
models.GenderEnumTransgenderFemale,
models.GenderEnumTransgenderMale,
models.GenderEnumIntersex,
models.GenderEnumNonBinary,
},
SkipSingleNamePerformers: &boolFalse,
}
sources := []ScraperSource{
@ -216,9 +222,15 @@ func TestSceneIdentifier_modifyScene(t *testing.T) {
boolFalse := false
defaultOptions := &MetadataOptions{
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
IncludeMalePerformers: &boolFalse,
SetOrganized: &boolFalse,
SetCoverImage: &boolFalse,
PerformerGenders: []models.GenderEnum{
models.GenderEnumFemale,
models.GenderEnumTransgenderFemale,
models.GenderEnumTransgenderMale,
models.GenderEnumIntersex,
models.GenderEnumNonBinary,
},
SkipSingleNamePerformers: &boolFalse,
}
tr := &SceneIdentifier{

View file

@ -5,6 +5,7 @@ import (
"io"
"strconv"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scraper"
)
@ -32,7 +33,10 @@ type MetadataOptions struct {
SetCoverImage *bool `json:"setCoverImage"`
SetOrganized *bool `json:"setOrganized"`
// defaults to true if not provided
// Deprecated: use PerformerGenders instead
IncludeMalePerformers *bool `json:"includeMalePerformers"`
// Filter to only include performers with these genders. If not provided, all genders are included.
PerformerGenders []models.GenderEnum `json:"performerGenders"`
// defaults to true if not provided
SkipMultipleMatches *bool `json:"skipMultipleMatches"`
// ID of tag to tag skipped multiple matches with

View file

@ -5,6 +5,7 @@ import (
"context"
"errors"
"fmt"
"slices"
"strconv"
"strings"
"time"
@ -69,7 +70,7 @@ func (g sceneRelationships) studio(ctx context.Context) (*int, error) {
return nil, nil
}
func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]int, error) {
func (g sceneRelationships) performers(ctx context.Context, allowedGenders []models.GenderEnum) ([]int, error) {
fieldStrategy := g.fieldOptions["performers"]
scraped := g.result.result.Performers
@ -97,8 +98,11 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]
singleNamePerformerSkipped := false
for _, p := range scraped {
if ignoreMale && p.Gender != nil && strings.EqualFold(*p.Gender, models.GenderEnumMale.String()) {
continue
if allowedGenders != nil && p.Gender != nil {
gender := models.GenderEnum(strings.ToUpper(*p.Gender))
if !slices.Contains(allowedGenders, gender) {
continue
}
}
performerID, err := getPerformerID(ctx, endpoint, g.performerCreator, p, createMissing, g.skipSingleNamePerformers)
@ -167,7 +171,9 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
} else if createMissing {
newTag := t.ToTag(endpoint, nil)
err := g.tagCreator.Create(ctx, newTag)
err := g.tagCreator.Create(ctx, &models.CreateTagInput{
Tag: newTag,
})
if err != nil {
return nil, fmt.Errorf("error creating tag: %w", err)
}

View file

@ -27,7 +27,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
db := mocks.NewDatabase()
db.Studio.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) {
s := args.Get(1).(*models.Studio)
s := args.Get(1).(*models.CreateStudioInput)
s.ID = validStoredIDInt
}).Return(nil)
@ -183,13 +183,13 @@ func Test_sceneRelationships_performers(t *testing.T) {
}
tests := []struct {
name string
scene *models.Scene
fieldOptions *FieldOptions
scraped []*models.ScrapedPerformer
ignoreMale bool
want []int
wantErr bool
name string
scene *models.Scene
fieldOptions *FieldOptions
scraped []*models.ScrapedPerformer
allowedGenders []models.GenderEnum
want []int
wantErr bool
}{
{
"ignore",
@ -202,7 +202,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &validStoredID,
},
},
false,
nil,
nil,
false,
},
@ -211,7 +211,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
emptyScene,
defaultOptions,
[]*models.ScrapedPerformer{},
false,
nil,
nil,
false,
},
@ -225,7 +225,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &existingPerformerStr,
},
},
false,
nil,
nil,
false,
},
@ -239,7 +239,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &validStoredID,
},
},
false,
nil,
[]int{existingPerformerID, validStoredIDInt},
false,
},
@ -254,7 +254,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
Gender: &male,
},
},
true,
[]models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary},
nil,
false,
},
@ -270,7 +270,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &validStoredID,
},
},
false,
nil,
[]int{validStoredIDInt},
false,
},
@ -287,7 +287,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
Gender: &female,
},
},
true,
[]models.GenderEnum{models.GenderEnumFemale, models.GenderEnumTransgenderMale, models.GenderEnumTransgenderFemale, models.GenderEnumIntersex, models.GenderEnumNonBinary},
[]int{validStoredIDInt},
false,
},
@ -304,7 +304,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
StoredID: &invalidStoredID,
},
},
false,
nil,
nil,
true,
},
@ -319,7 +319,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
}
got, err := tr.performers(testCtx, tt.ignoreMale)
got, err := tr.performers(testCtx, tt.allowedGenders)
if (err != nil) != tt.wantErr {
t.Errorf("sceneRelationships.performers() error = %v, wantErr %v", err, tt.wantErr)
return
@ -368,14 +368,14 @@ func Test_sceneRelationships_tags(t *testing.T) {
db := mocks.NewDatabase()
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool {
return p.Name == validName
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool {
return p.Tag.Name == validName
})).Run(func(args mock.Arguments) {
t := args.Get(1).(*models.Tag)
t.ID = validStoredIDInt
t := args.Get(1).(*models.CreateTagInput)
t.Tag.ID = validStoredIDInt
}).Return(nil)
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.Tag) bool {
return p.Name == invalidName
db.Tag.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateTagInput) bool {
return p.Tag.Name == invalidName
})).Return(errors.New("error creating tag"))
tr := sceneRelationships{

View file

@ -21,13 +21,13 @@ func Test_createMissingStudio(t *testing.T) {
db := mocks.NewDatabase()
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool {
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool {
return p.Name == validName
})).Run(func(args mock.Arguments) {
s := args.Get(1).(*models.Studio)
s := args.Get(1).(*models.CreateStudioInput)
s.ID = createdID
}).Return(nil)
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.Studio) bool {
db.Studio.On("Create", testCtx, mock.MatchedBy(func(p *models.CreateStudioInput) bool {
return p.Name == invalidName
})).Return(errors.New("error creating studio"))

185
internal/manager/backup.go Normal file
View file

@ -0,0 +1,185 @@
package manager
import (
"archive/zip"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"strings"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger"
)
type databaseBackupZip struct {
*zip.Writer
}
func (z *databaseBackupZip) zipFileRename(fn, outDir, outFn string) error {
p := filepath.Join(outDir, outFn)
p = filepath.ToSlash(p)
f, err := z.Create(p)
if err != nil {
return fmt.Errorf("error creating zip entry for %s: %v", fn, err)
}
i, err := os.Open(fn)
if err != nil {
return fmt.Errorf("error opening %s: %v", fn, err)
}
defer i.Close()
if _, err := io.Copy(f, i); err != nil {
return fmt.Errorf("error writing %s to zip: %v", fn, err)
}
return nil
}
func (z *databaseBackupZip) zipFile(fn, outDir string) error {
return z.zipFileRename(fn, outDir, filepath.Base(fn))
}
func (s *Manager) BackupDatabase(download bool, includeBlobs bool) (string, string, error) {
var backupPath string
var backupName string
// if we include blobs, then the output is a zip file
// if not, using the same backup logic as before, which creates a sqlite file
if !includeBlobs || s.Config.GetBlobsStorage() != config.BlobStorageTypeFilesystem {
return s.backupDatabaseOnly(download)
}
// use tmp directory for the backup
backupDir := s.Paths.Generated.Tmp
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
f, err := os.CreateTemp(backupDir, "backup*.sqlite")
if err != nil {
return "", "", err
}
backupPath = f.Name()
backupName = s.Database.DatabaseBackupPath("")
f.Close()
// delete the temp file so that the backup operation can create it
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
if err := s.Database.Backup(backupPath); err != nil {
return "", "", err
}
// create a zip file
zipFileDir := s.Paths.Generated.Downloads
if !download {
zipFileDir = s.Config.GetBackupDirectoryPathOrDefault()
if zipFileDir != "" {
if err := fsutil.EnsureDir(zipFileDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", zipFileDir, err)
}
}
}
zipFileName := backupName + ".zip"
zipFilePath := filepath.Join(zipFileDir, zipFileName)
logger.Debugf("Preparing zip file for database backup at %v", zipFilePath)
zf, err := os.Create(zipFilePath)
if err != nil {
return "", "", fmt.Errorf("could not create zip file %v: %w", zipFilePath, err)
}
defer zf.Close()
z := databaseBackupZip{
Writer: zip.NewWriter(zf),
}
defer z.Close()
// move the database file into the zip
dbFn := filepath.Base(s.Config.GetDatabasePath())
if err := z.zipFileRename(backupPath, "", dbFn); err != nil {
return "", "", fmt.Errorf("could not add database backup to zip file: %w", err)
}
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
// walk the blobs directory and add files to the zip
blobsDir := s.Config.GetBlobsPath()
err = filepath.WalkDir(blobsDir, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
// calculate out dir by removing the blobsDir prefix from the path
outDir := filepath.Join("blobs", strings.TrimPrefix(filepath.Dir(path), blobsDir))
if err := z.zipFile(path, outDir); err != nil {
return fmt.Errorf("could not add blob %v to zip file: %w", path, err)
}
return nil
})
if err != nil {
return "", "", fmt.Errorf("error walking blobs directory: %w", err)
}
return zipFilePath, zipFileName, nil
}
func (s *Manager) backupDatabaseOnly(download bool) (string, string, error) {
var backupPath string
var backupName string
if download {
backupDir := s.Paths.Generated.Downloads
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
f, err := os.CreateTemp(backupDir, "backup*.sqlite")
if err != nil {
return "", "", err
}
backupPath = f.Name()
backupName = s.Database.DatabaseBackupPath("")
f.Close()
// delete the temp file so that the backup operation can create it
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
} else {
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
if backupDir != "" {
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
}
backupPath = s.Database.DatabaseBackupPath(backupDir)
backupName = filepath.Base(backupPath)
}
err := s.Database.Backup(backupPath)
if err != nil {
return "", "", err
}
return backupPath, backupName, nil
}

View file

@ -83,6 +83,21 @@ const (
ParallelTasks = "parallel_tasks"
parallelTasksDefault = 1
UseCustomSpriteInterval = "use_custom_sprite_interval"
UseCustomSpriteIntervalDefault = false
SpriteInterval = "sprite_interval"
SpriteIntervalDefault = 30
MinimumSprites = "minimum_sprites"
MinimumSpritesDefault = 10
MaximumSprites = "maximum_sprites"
MaximumSpritesDefault = 500
SpriteScreenshotSize = "sprite_screenshot_width"
spriteScreenshotSizeDefault = 160
PreviewPreset = "preview_preset"
TranscodeHardwareAcceleration = "ffmpeg.hardware_acceleration"
@ -194,6 +209,7 @@ const (
CSSEnabled = "cssenabled"
JavascriptEnabled = "javascriptenabled"
CustomLocalesEnabled = "customlocalesenabled"
DisableCustomizations = "disable_customizations"
ShowScrubber = "show_scrubber"
showScrubberDefault = true
@ -974,6 +990,50 @@ func (i *Config) GetParallelTasksWithAutoDetection() int {
return parallelTasks
}
// GetUseCustomSpriteInterval returns true if the sprite minimum, maximum, and interval settings
// should be used instead of the default
func (i *Config) GetUseCustomSpriteInterval() bool {
value := i.getBool(UseCustomSpriteInterval)
return value
}
// GetSpriteInterval returns the time (in seconds) to be between each scrubber sprite
// A value of 0 indicates that the sprite interval should be automatically determined
// based on the minimum sprite setting.
func (i *Config) GetSpriteInterval() float64 {
value := i.getFloat64(SpriteInterval)
return value
}
// GetMinimumSprites returns the minimum number of sprites that have to be generated
// A value of 0 will be overridden with the default of 10.
func (i *Config) GetMinimumSprites() int {
value := i.getInt(MinimumSprites)
if value <= 0 {
return MinimumSpritesDefault
}
return value
}
// GetMaximumSprites returns the maximum number of sprites that can be generated
// A value of 0 indicates no maximum.
func (i *Config) GetMaximumSprites() int {
value := i.getInt(MaximumSprites)
return value
}
// GetSpriteScreenshotSize returns the required size of the screenshots to be taken
// during sprite generation in pixels. This will be the width for landscape scenes
// and the height for portrait scenes, with the other dimension being scaled to maintain
// the aspect ratio. If the value is less than or equal to 0, the default will be used.
func (i *Config) GetSpriteScreenshotSize() int {
value := i.getInt(SpriteScreenshotSize)
if value <= 0 {
return spriteScreenshotSizeDefault
}
return value
}
func (i *Config) GetPreviewAudio() bool {
return i.getBool(PreviewAudio)
}
@ -1479,6 +1539,13 @@ func (i *Config) GetCustomLocalesEnabled() bool {
return i.getBool(CustomLocalesEnabled)
}
// GetDisableCustomizations returns true if all customizations (plugins, custom CSS,
// custom JavaScript, and custom locales) should be disabled. This is useful for
// troubleshooting issues without permanently disabling individual customizations.
func (i *Config) GetDisableCustomizations() bool {
return i.getBool(DisableCustomizations)
}
func (i *Config) GetHandyKey() string {
return i.getString(HandyKey)
}
@ -1853,6 +1920,12 @@ func (i *Config) setDefaultValues() {
i.setDefault(PreviewAudio, previewAudioDefault)
i.setDefault(SoundOnPreview, false)
i.setDefault(UseCustomSpriteInterval, UseCustomSpriteIntervalDefault)
i.setDefault(SpriteInterval, SpriteIntervalDefault)
i.setDefault(MinimumSprites, MinimumSpritesDefault)
i.setDefault(MaximumSprites, MaximumSpritesDefault)
i.setDefault(SpriteScreenshotSize, spriteScreenshotSizeDefault)
i.setDefault(ThemeColor, DefaultThemeColor)
i.setDefault(WriteImageThumbnails, writeImageThumbnailsDefault)

View file

@ -11,8 +11,10 @@ type ScanMetadataOptions struct {
ScanGenerateImagePreviews bool `json:"scanGenerateImagePreviews"`
// Generate sprites during scan
ScanGenerateSprites bool `json:"scanGenerateSprites"`
// Generate phashes during scan
// Generate video phashes during scan
ScanGeneratePhashes bool `json:"scanGeneratePhashes"`
// Generate image phashes during scan
ScanGenerateImagePhashes bool `json:"scanGenerateImagePhashes"`
// Generate image thumbnails during scan
ScanGenerateThumbnails bool `json:"scanGenerateThumbnails"`
// Generate image thumbnails during scan

View file

@ -21,8 +21,7 @@ type SpriteGenerator struct {
VideoChecksum string
ImageOutputPath string
VTTOutputPath string
Rows int
Columns int
Config SpriteGeneratorConfig
SlowSeek bool // use alternate seek function, very slow!
Overwrite bool
@ -30,13 +29,81 @@ type SpriteGenerator struct {
g *generate.Generator
}
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, rows int, cols int) (*SpriteGenerator, error) {
// SpriteGeneratorConfig holds configuration for the SpriteGenerator
type SpriteGeneratorConfig struct {
// MinimumSprites is the minimum number of sprites to generate, even if the video duration is short
// SpriteInterval will be adjusted accordingly to ensure at least this many sprites are generated.
// A value of 0 means no minimum, and the generator will use the provided SpriteInterval or
// calculate it based on the video duration and MaximumSprites
MinimumSprites int
// MaximumSprites is the maximum number of sprites to generate, even if the video duration is long
// SpriteInterval will be adjusted accordingly to ensure no more than this many sprites are generated
// A value of 0 means no maximum, and the generator will use the provided SpriteInterval or
// calculate it based on the video duration and MinimumSprites
MaximumSprites int
// SpriteInterval is the default interval in seconds between each sprite.
// If MinimumSprites or MaximumSprites are set, this value will be adjusted accordingly
// to ensure the desired number of sprites are generated
// A value of 0 means the generator will calculate the interval based on the video duration and
// the provided MinimumSprites and MaximumSprites
SpriteInterval float64
// SpriteSize is the size in pixels of the longest dimension of each sprite image.
// The other dimension will be automatically calculated to maintain the aspect ratio of the video
SpriteSize int
}
const (
// DefaultSpriteAmount is the default number of sprites to generate if no configuration is provided
// This corresponds to the legacy behavior of the generator, which generates 81 sprites at equal
// intervals across the video duration
DefaultSpriteAmount = 81
// DefaultSpriteSize is the default size in pixels of the longest dimension of each sprite image
// if no configuration is provided. This corresponds to the legacy behavior of the generator.
DefaultSpriteSize = 160
)
var DefaultSpriteGeneratorConfig = SpriteGeneratorConfig{
MinimumSprites: DefaultSpriteAmount,
MaximumSprites: DefaultSpriteAmount,
SpriteInterval: 0,
SpriteSize: DefaultSpriteSize,
}
// NewSpriteGenerator creates a new SpriteGenerator for the given video file and configuration
// It calculates the appropriate sprite interval and count based on the video duration and the provided configuration
func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageOutputPath string, vttOutputPath string, config SpriteGeneratorConfig) (*SpriteGenerator, error) {
exists, err := fsutil.FileExists(videoFile.Path)
if !exists {
return nil, err
}
if videoFile.VideoStreamDuration <= 0 {
s := fmt.Sprintf("video %s: duration(%.3f)/frame count(%d) invalid, skipping sprite creation", videoFile.Path, videoFile.VideoStreamDuration, videoFile.FrameCount)
return nil, errors.New(s)
}
config.SpriteInterval = calculateSpriteInterval(videoFile, config)
chunkCount := int(math.Ceil(videoFile.VideoStreamDuration / config.SpriteInterval))
// adjust the chunk count to the next highest perfect square, to ensure the sprite image
// is completely filled (no empty space in the grid) and the grid is as square as possible (minimizing the number of rows/columns)
gridSize := generate.GetSpriteGridSize(chunkCount)
newChunkCount := gridSize * gridSize
if newChunkCount != chunkCount {
logger.Debugf("[generator] adjusting chunk count from %d to %d to fit a %dx%d grid", chunkCount, newChunkCount, gridSize, gridSize)
chunkCount = newChunkCount
}
if config.SpriteSize <= 0 {
config.SpriteSize = DefaultSpriteSize
}
slowSeek := false
chunkCount := rows * cols
// For files with small duration / low frame count try to seek using frame number intead of seconds
if videoFile.VideoStreamDuration < 5 || (0 < videoFile.FrameCount && videoFile.FrameCount <= int64(chunkCount)) { // some files can have FrameCount == 0, only use SlowSeek if duration < 5
@ -71,9 +138,8 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO
VideoChecksum: videoChecksum,
ImageOutputPath: imageOutputPath,
VTTOutputPath: vttOutputPath,
Rows: rows,
Config: config,
SlowSeek: slowSeek,
Columns: cols,
g: &generate.Generator{
Encoder: instance.FFMpeg,
FFMpegConfig: instance.Config,
@ -83,6 +149,40 @@ func NewSpriteGenerator(videoFile ffmpeg.VideoFile, videoChecksum string, imageO
}, nil
}
func calculateSpriteInterval(videoFile ffmpeg.VideoFile, config SpriteGeneratorConfig) float64 {
// If a custom sprite interval is provided, start with that
spriteInterval := config.SpriteInterval
// If no custom interval is provided, calculate the interval based on the
// video duration and minimum sprite count
if spriteInterval <= 0 {
minSprites := config.MinimumSprites
if minSprites <= 0 {
panic("invalid configuration: MinimumSprites must be greater than 0 if SpriteInterval is not set")
}
logger.Debugf("[generator] calculating sprite interval for video duration %.3fs with minimum sprites %d", videoFile.VideoStreamDuration, minSprites)
return videoFile.VideoStreamDuration / float64(minSprites)
}
// Calculate the number of sprites that would be generated with the provided interval
spriteCount := int(math.Ceil(videoFile.VideoStreamDuration / spriteInterval))
// If the calculated sprite count is greater than the maximum, adjust the interval to meet the maximum
if config.MaximumSprites > 0 && spriteCount > int(config.MaximumSprites) {
spriteInterval = videoFile.VideoStreamDuration / float64(config.MaximumSprites)
logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which exceeds the maximum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MaximumSprites, spriteInterval)
}
// If the calculated sprite count is less than the minimum, adjust the interval to meet the minimum
if config.MinimumSprites > 0 && spriteCount < int(config.MinimumSprites) {
spriteInterval = videoFile.VideoStreamDuration / float64(config.MinimumSprites)
logger.Debugf("[generator] provided sprite interval %.1fs results in %d sprites, which is less than the minimum of %d, adjusting interval to %.1fs", config.SpriteInterval, spriteCount, config.MinimumSprites, spriteInterval)
}
return spriteInterval
}
func (g *SpriteGenerator) Generate() error {
if err := g.generateSpriteImage(); err != nil {
return err
@ -100,6 +200,8 @@ func (g *SpriteGenerator) generateSpriteImage() error {
var images []image.Image
isPortrait := g.Info.VideoFile.Height > g.Info.VideoFile.Width
if !g.SlowSeek {
logger.Infof("[generator] generating sprite image for %s", g.Info.VideoFile.Path)
// generate `ChunkCount` thumbnails
@ -107,8 +209,7 @@ func (g *SpriteGenerator) generateSpriteImage() error {
for i := 0; i < g.Info.ChunkCount; i++ {
time := float64(i) * stepSize
img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time)
img, err := g.g.SpriteScreenshot(context.TODO(), g.Info.VideoFile.Path, time, g.Config.SpriteSize, isPortrait)
if err != nil {
return err
}
@ -126,7 +227,7 @@ func (g *SpriteGenerator) generateSpriteImage() error {
return errors.New("invalid frame number conversion")
}
img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame))
img, err := g.g.SpriteScreenshotSlow(context.TODO(), g.Info.VideoFile.Path, int(frame), g.Config.SpriteSize)
if err != nil {
return err
}
@ -158,7 +259,7 @@ func (g *SpriteGenerator) generateSpriteVTT() error {
stepSize /= g.Info.FrameRate
}
return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize)
return g.g.SpriteVTT(context.TODO(), g.VTTOutputPath, g.ImageOutputPath, stepSize, g.Info.ChunkCount)
}
func (g *SpriteGenerator) imageExists() bool {

View file

@ -313,46 +313,6 @@ func (s *Manager) validateFFmpeg() error {
return nil
}
func (s *Manager) BackupDatabase(download bool) (string, string, error) {
var backupPath string
var backupName string
if download {
backupDir := s.Paths.Generated.Downloads
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
f, err := os.CreateTemp(backupDir, "backup*.sqlite")
if err != nil {
return "", "", err
}
backupPath = f.Name()
backupName = s.Database.DatabaseBackupPath("")
f.Close()
// delete the temp file so that the backup operation can create it
if err := os.Remove(backupPath); err != nil {
return "", "", fmt.Errorf("could not remove temporary backup file %v: %w", backupPath, err)
}
} else {
backupDir := s.Config.GetBackupDirectoryPathOrDefault()
if backupDir != "" {
if err := fsutil.EnsureDir(backupDir); err != nil {
return "", "", fmt.Errorf("could not create backup directory %v: %w", backupDir, err)
}
}
backupPath = s.Database.DatabaseBackupPath(backupDir)
backupName = filepath.Base(backupPath)
}
err := s.Database.Backup(backupPath)
if err != nil {
return "", "", err
}
return backupPath, backupName, nil
}
func (s *Manager) AnonymiseDatabase(download bool) (string, string, error) {
var outPath string
var outName string

View file

@ -100,6 +100,8 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error
return 0, err
}
cfg := config.GetInstance()
scanner := &file.Scanner{
Repository: file.NewRepository(s.Repository),
FileDecorators: []file.Decorator{
@ -118,6 +120,10 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error
},
FingerprintCalculator: &fingerprintCalculator{s.Config},
FS: &file.OsFS{},
ZipFileExtensions: cfg.GetGalleryExtensions(),
// ScanFilters is set in ScanJob.Execute
// HandlerRequiredFilters is set in ScanJob.Execute
Rescan: input.Rescan,
}
scanJob := ScanJob{

View file

@ -10,17 +10,17 @@ import (
)
type SceneService interface {
Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error)
Create(ctx context.Context, input models.CreateSceneInput) (*models.Scene, error)
AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error
Merge(ctx context.Context, sourceIDs []int, destinationID int, fileDeleter *scene.FileDeleter, options scene.MergeOptions) error
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error
FindByIDs(ctx context.Context, ids []int, load ...scene.LoadRelationshipOption) ([]*models.Scene, error)
sceneFingerprintGetter
}
type ImageService interface {
Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error
Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) error
DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error)
}
@ -31,7 +31,7 @@ type GalleryService interface {
SetCover(ctx context.Context, g *models.Gallery, coverImageId int) error
ResetCover(ctx context.Context, g *models.Gallery) error
Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error)
Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile, destroyFileEntry bool) ([]*models.Image, error)
ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error

View file

@ -565,6 +565,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
j.setProgressFromFilename(sceneHash[0:2], progress)
// check if the scene exists
var walkErr error
if err := j.Repository.WithReadTxn(ctx, func(ctx context.Context) error {
var err error
scenes, err = j.getScenesWithHash(ctx, sceneHash)
@ -575,15 +576,18 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
if len(scenes) == 0 {
j.logDelete("deleting unused marker directory: %s", sceneHash)
j.deleteDir(path)
} else {
// get the markers now
for _, scene := range scenes {
thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID)
if err != nil {
return fmt.Errorf("error getting markers for scene: %v", err)
}
markers = append(markers, thisMarkers...)
// #5911 - we've just deleted the directory, so skip it in the walk to avoid errors
walkErr = fs.SkipDir
return nil
}
// get the markers now
for _, scene := range scenes {
thisMarkers, err := j.Repository.SceneMarker.FindBySceneID(ctx, scene.ID)
if err != nil {
return fmt.Errorf("error getting markers for scene: %v", err)
}
markers = append(markers, thisMarkers...)
}
return nil
@ -591,7 +595,7 @@ func (j *CleanGeneratedJob) cleanMarkerFiles(ctx context.Context, progress *job.
logger.Error(err.Error())
}
return nil
return walkErr
}
filename := info.Name()

View file

@ -300,7 +300,10 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil
// only delete if the scene has no other files
if len(scene.Files.List()) <= 1 {
logger.Infof("Deleting scene %q since it has no other related files", scene.DisplayName())
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil {
const deleteGenerated = true
const deleteFile = false
const destroyFileEntry = false
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil {
return err
}
@ -421,7 +424,10 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil
if len(i.Files.List()) <= 1 {
logger.Infof("Deleting image %q since it has no other related files", i.DisplayName())
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil {
const deleteGenerated = true
const deleteFile = false
const destroyFileEntry = false
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, deleteGenerated, deleteFile, destroyFileEntry); err != nil {
return err
}

View file

@ -29,6 +29,7 @@ type GenerateMetadataInput struct {
// Generate transcodes even if not required
ForceTranscodes bool `json:"forceTranscodes"`
Phashes bool `json:"phashes"`
ImagePhashes bool `json:"imagePhashes"`
InteractiveHeatmapsSpeeds bool `json:"interactiveHeatmapsSpeeds"`
ClipPreviews bool `json:"clipPreviews"`
ImageThumbnails bool `json:"imageThumbnails"`
@ -36,6 +37,10 @@ type GenerateMetadataInput struct {
SceneIDs []string `json:"sceneIDs"`
// marker ids to generate for
MarkerIDs []string `json:"markerIDs"`
// image ids to generate for
ImageIDs []string `json:"imageIDs"`
// gallery ids to generate for
GalleryIDs []string `json:"galleryIDs"`
// overwrite existing media
Overwrite bool `json:"overwrite"`
}
@ -73,6 +78,7 @@ type totalsGenerate struct {
markers int64
transcodes int64
phashes int64
imagePhashes int64
interactiveHeatmapSpeeds int64
clipPreviews int64
imageThumbnails int64
@ -82,8 +88,9 @@ type totalsGenerate struct {
func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error {
var scenes []*models.Scene
var err error
var markers []*models.SceneMarker
var images []*models.Image
var err error
j.overwrite = j.input.Overwrite
j.fileNamingAlgo = config.GetInstance().GetVideoFileNamingAlgorithm()
@ -105,6 +112,14 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
if err != nil {
logger.Error(err.Error())
}
imageIDs, err := stringslice.StringSliceToIntSlice(j.input.ImageIDs)
if err != nil {
logger.Error(err.Error())
}
galleryIDs, err := stringslice.StringSliceToIntSlice(j.input.GalleryIDs)
if err != nil {
logger.Error(err.Error())
}
g := &generate.Generator{
Encoder: instance.FFMpeg,
@ -118,7 +133,7 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
r := j.repository
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
qb := r.Scene
if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 {
if len(j.input.SceneIDs) == 0 && len(j.input.MarkerIDs) == 0 && len(j.input.ImageIDs) == 0 && len(j.input.GalleryIDs) == 0 {
j.queueTasks(ctx, g, queue)
} else {
if len(j.input.SceneIDs) > 0 {
@ -141,6 +156,33 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
j.queueMarkerJob(g, m, queue)
}
}
if len(j.input.ImageIDs) > 0 {
images, err = r.Image.FindMany(ctx, imageIDs)
for _, i := range images {
if err := i.LoadFiles(ctx, r.Image); err != nil {
return err
}
j.queueImageJob(g, i, queue)
}
}
if len(j.input.GalleryIDs) > 0 {
for _, galleryID := range galleryIDs {
imgs, err := r.Image.FindByGalleryID(ctx, galleryID)
if err != nil {
return err
}
for _, img := range imgs {
if err := img.LoadFiles(ctx, r.Image); err != nil {
return err
}
j.queueImageJob(g, img, queue)
}
}
}
}
return nil
@ -172,14 +214,17 @@ func (j *GenerateJob) Execute(ctx context.Context, progress *job.Progress) error
if j.input.Phashes {
logMsg += fmt.Sprintf(" %d phashes", totals.phashes)
}
if j.input.ImagePhashes {
logMsg += fmt.Sprintf(" %d image phashes", totals.imagePhashes)
}
if j.input.InteractiveHeatmapsSpeeds {
logMsg += fmt.Sprintf(" %d heatmaps & speeds", totals.interactiveHeatmapSpeeds)
}
if j.input.ClipPreviews {
logMsg += fmt.Sprintf(" %d Image Clip Previews", totals.clipPreviews)
logMsg += fmt.Sprintf(" %d image clip previews", totals.clipPreviews)
}
if j.input.ImageThumbnails {
logMsg += fmt.Sprintf(" %d Image Thumbnails", totals.imageThumbnails)
logMsg += fmt.Sprintf(" %d image thumbnails", totals.imageThumbnails)
}
if logMsg == "Generating" {
logMsg = "Nothing selected to generate"
@ -284,7 +329,7 @@ func (j *GenerateJob) queueImagesTasks(ctx context.Context, g *generate.Generato
r := j.repository
for more := j.input.ClipPreviews || j.input.ImageThumbnails; more; {
for more := j.input.ClipPreviews || j.input.ImageThumbnails || j.input.ImagePhashes; more; {
if job.IsCancelled(ctx) {
return
}
@ -525,4 +570,23 @@ func (j *GenerateJob) queueImageJob(g *generate.Generator, image *models.Image,
queue <- task
}
}
if j.input.ImagePhashes {
// generate for all files in image
for _, f := range image.Files.List() {
if imageFile, ok := f.(*models.ImageFile); ok {
task := &GenerateImagePhashTask{
repository: j.repository,
File: imageFile,
Overwrite: j.overwrite,
}
if task.required() {
j.totals.imagePhashes++
j.totals.tasks++
queue <- task
}
}
}
}
}

View file

@ -0,0 +1,103 @@
package manager
import (
"context"
"fmt"
"github.com/stashapp/stash/pkg/hash/imagephash"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
type GenerateImagePhashTask struct {
repository models.Repository
File *models.ImageFile
Overwrite bool
}
func (t *GenerateImagePhashTask) GetDescription() string {
return fmt.Sprintf("Generating phash for %s", t.File.Path)
}
func (t *GenerateImagePhashTask) Start(ctx context.Context) {
if !t.required() {
return
}
var hash int64
set := false
// #4393 - if there is a file with the same md5, we can use the same phash
// only use this if we're not overwriting
if !t.Overwrite {
existing, err := t.findExistingPhash(ctx)
if err != nil {
logger.Warnf("Error finding existing phash: %v", err)
} else if existing != nil {
logger.Infof("Using existing phash for %s", t.File.Path)
hash = existing.(int64)
set = true
}
}
if !set {
generated, err := imagephash.Generate(instance.FFMpeg, t.File)
if err != nil {
logger.Errorf("Error generating phash for %q: %v", t.File.Path, err)
logErrorOutput(err)
return
}
hash = int64(*generated)
}
r := t.repository
if err := r.WithTxn(ctx, func(ctx context.Context) error {
t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{
Type: models.FingerprintTypePhash,
Fingerprint: hash,
})
return r.File.Update(ctx, t.File)
}); err != nil && ctx.Err() == nil {
logger.Errorf("Error setting phash: %v", err)
}
}
func (t *GenerateImagePhashTask) findExistingPhash(ctx context.Context) (interface{}, error) {
r := t.repository
var ret interface{}
if err := r.WithReadTxn(ctx, func(ctx context.Context) error {
md5 := t.File.Fingerprints.Get(models.FingerprintTypeMD5)
// find other files with the same md5
files, err := r.File.FindByFingerprint(ctx, models.Fingerprint{
Type: models.FingerprintTypeMD5,
Fingerprint: md5,
})
if err != nil {
return fmt.Errorf("finding files by md5: %w", err)
}
// find the first file with a phash
for _, file := range files {
if phash := file.Base().Fingerprints.Get(models.FingerprintTypePhash); phash != nil {
ret = phash
return nil
}
}
return nil
}); err != nil {
return nil, err
}
return ret, nil
}
func (t *GenerateImagePhashTask) required() bool {
if t.Overwrite {
return true
}
return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil
}

View file

@ -44,7 +44,7 @@ func (t *GeneratePhashTask) Start(ctx context.Context) {
if !set {
generated, err := videophash.Generate(instance.FFMpeg, t.File)
if err != nil {
logger.Errorf("Error generating phash: %v", err)
logger.Errorf("Error generating phash for %q: %v", t.File.Path, err)
logErrorOutput(err)
return
}

View file

@ -34,7 +34,17 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
imagePath := instance.Paths.Scene.GetSpriteImageFilePath(sceneHash)
vttPath := instance.Paths.Scene.GetSpriteVttFilePath(sceneHash)
generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, 9, 9)
cfg := DefaultSpriteGeneratorConfig
cfg.SpriteSize = instance.Config.GetSpriteScreenshotSize()
if instance.Config.GetUseCustomSpriteInterval() {
cfg.MinimumSprites = instance.Config.GetMinimumSprites()
cfg.MaximumSprites = instance.Config.GetMaximumSprites()
cfg.SpriteInterval = instance.Config.GetSpriteInterval()
}
generator, err := NewSpriteGenerator(*videoFile, sceneHash, imagePath, vttPath, cfg)
if err != nil {
logger.Errorf("error creating sprite generator: %s", err.Error())

View file

@ -2,13 +2,17 @@ package manager
import (
"context"
"errors"
"fmt"
"io/fs"
"path/filepath"
"regexp"
"runtime/debug"
"sync"
"time"
"github.com/99designs/gqlgen/graphql/handler/lru"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/file/video"
@ -24,14 +28,13 @@ import (
"github.com/stashapp/stash/pkg/txn"
)
type scanner interface {
Scan(ctx context.Context, handlers []file.Handler, options file.ScanOptions, progressReporter file.ProgressReporter)
}
type ScanJob struct {
scanner scanner
scanner *file.Scanner
input ScanMetadataInput
subscriptions *subscriptionManager
fileQueue chan file.ScannedFile
count int
}
func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
@ -55,22 +58,22 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
start := time.Now()
nTasks := cfg.GetParallelTasksWithAutoDetection()
const taskQueueSize = 200000
taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, cfg.GetParallelTasksWithAutoDetection())
taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, nTasks)
var minModTime time.Time
if j.input.Filter != nil && j.input.Filter.MinModTime != nil {
minModTime = *j.input.Filter.MinModTime
}
j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{
Paths: paths,
ScanFilters: []file.PathFilter{newScanFilter(c, repo, minModTime)},
ZipFileExtensions: cfg.GetGalleryExtensions(),
ParallelTasks: cfg.GetParallelTasksWithAutoDetection(),
HandlerRequiredFilters: []file.Filter{newHandlerRequiredFilter(cfg, repo)},
Rescan: j.input.Rescan,
}, progress)
// HACK - these should really be set in the scanner initialization
j.scanner.FileHandlers = getScanHandlers(j.input, taskQueue, progress)
j.scanner.ScanFilters = []file.PathFilter{newScanFilter(c, repo, minModTime)}
j.scanner.HandlerRequiredFilters = []file.Filter{newHandlerRequiredFilter(cfg, repo)}
j.runJob(ctx, paths, nTasks, progress)
taskQueue.Close()
@ -86,6 +89,264 @@ func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) error {
return nil
}
func (j *ScanJob) runJob(ctx context.Context, paths []string, nTasks int, progress *job.Progress) {
var wg sync.WaitGroup
wg.Add(1)
j.fileQueue = make(chan file.ScannedFile, scanQueueSize)
go func() {
defer func() {
wg.Done()
// handle panics in goroutine
if p := recover(); p != nil {
logger.Errorf("panic while queuing files for scan: %v", p)
logger.Errorf(string(debug.Stack()))
}
}()
if err := j.queueFiles(ctx, paths, progress); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error queuing files for scan: %v", err)
return
}
logger.Infof("Finished adding files to queue. %d files queued", j.count)
}()
defer wg.Wait()
j.processQueue(ctx, nTasks, progress)
}
const scanQueueSize = 200000
func (j *ScanJob) queueFiles(ctx context.Context, paths []string, progress *job.Progress) error {
fs := &file.OsFS{}
defer func() {
close(j.fileQueue)
progress.AddTotal(j.count)
progress.Definite()
}()
var err error
progress.ExecuteTask("Walking directory tree", func() {
for _, p := range paths {
err = file.SymWalk(fs, p, j.queueFileFunc(ctx, fs, nil, progress))
if err != nil {
return
}
}
})
return err
}
func (j *ScanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *file.ScannedFile, progress *job.Progress) fs.WalkDirFunc {
return func(path string, d fs.DirEntry, err error) error {
if err != nil {
// don't let errors prevent scanning
logger.Errorf("error scanning %s: %v", path, err)
return nil
}
if err = ctx.Err(); err != nil {
return err
}
info, err := d.Info()
if err != nil {
logger.Errorf("reading info for %q: %v", path, err)
return nil
}
if !j.scanner.AcceptEntry(ctx, path, info) {
if info.IsDir() {
logger.Debugf("Skipping directory %s", path)
return fs.SkipDir
}
logger.Debugf("Skipping file %s", path)
return nil
}
size, err := file.GetFileSize(f, path, info)
if err != nil {
return err
}
ff := file.ScannedFile{
BaseFile: &models.BaseFile{
DirEntry: models.DirEntry{
ModTime: file.ModTime(info),
},
Path: path,
Basename: filepath.Base(path),
Size: size,
},
FS: f,
Info: info,
}
if zipFile != nil {
ff.ZipFileID = &zipFile.ID
ff.ZipFile = zipFile
}
if info.IsDir() {
// handle folders immediately
if err := j.handleFolder(ctx, ff, progress); err != nil {
if !errors.Is(err, context.Canceled) {
logger.Errorf("error processing %q: %v", path, err)
}
// skip the directory since we won't be able to process the files anyway
return fs.SkipDir
}
return nil
}
// if zip file is present, we handle immediately
if zipFile != nil {
progress.ExecuteTask("Scanning "+path, func() {
// don't increment progress in zip files
if err := j.handleFile(ctx, ff, nil); err != nil {
if !errors.Is(err, context.Canceled) {
logger.Errorf("error processing %q: %v", path, err)
}
// don't return an error, just skip the file
}
})
return nil
}
logger.Tracef("Queueing file %s for scanning", path)
j.fileQueue <- ff
j.count++
return nil
}
}
func (j *ScanJob) processQueue(ctx context.Context, parallelTasks int, progress *job.Progress) {
if parallelTasks < 1 {
parallelTasks = 1
}
wg := sizedwaitgroup.New(parallelTasks)
func() {
defer func() {
wg.Wait()
// handle panics in goroutine
if p := recover(); p != nil {
logger.Errorf("panic while scanning files: %v", p)
logger.Errorf(string(debug.Stack()))
}
}()
for f := range j.fileQueue {
logger.Tracef("Processing queued file %s", f.Path)
if err := ctx.Err(); err != nil {
return
}
wg.Add()
ff := f
go func() {
defer wg.Done()
j.processQueueItem(ctx, ff, progress)
}()
}
}()
}
func (j *ScanJob) processQueueItem(ctx context.Context, f file.ScannedFile, progress *job.Progress) {
progress.ExecuteTask("Scanning "+f.Path, func() {
var err error
if f.Info.IsDir() {
err = j.handleFolder(ctx, f, progress)
} else {
err = j.handleFile(ctx, f, progress)
}
if err != nil && !errors.Is(err, context.Canceled) {
logger.Errorf("error processing %q: %v", f.Path, err)
}
})
}
func (j *ScanJob) handleFolder(ctx context.Context, f file.ScannedFile, progress *job.Progress) error {
if progress != nil {
defer progress.Increment()
}
_, err := j.scanner.ScanFolder(ctx, f)
if err != nil {
return err
}
return nil
}
func (j *ScanJob) handleFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error {
if progress != nil {
defer progress.Increment()
}
r, err := j.scanner.ScanFile(ctx, f)
if err != nil {
return err
}
// handle rename should have already handled the contents of the zip file
// so shouldn't need to scan it again
if (r.New || r.Updated) && j.scanner.IsZipFile(f.Info.Name()) {
ff := r.File
f.BaseFile = ff.Base()
// scan zip files with a different context that is not cancellable
// cancelling while scanning zip file contents results in the scan
// contents being partially completed
zipCtx := context.WithoutCancel(ctx)
if err := j.scanZipFile(zipCtx, f, progress); err != nil {
logger.Errorf("Error scanning zip file %q: %v", f.Path, err)
}
}
return nil
}
func (j *ScanJob) scanZipFile(ctx context.Context, f file.ScannedFile, progress *job.Progress) error {
zipFS, err := f.FS.OpenZip(f.Path, f.Size)
if err != nil {
if errors.Is(err, file.ErrNotReaderAt) {
// can't walk the zip file
// just return
logger.Debugf("Skipping zip file %q as it cannot be opened for walking", f.Path)
return nil
}
return err
}
defer zipFS.Close()
return file.SymWalk(zipFS, f.Path, j.queueFileFunc(ctx, zipFS, &f, progress))
}
type extensionConfig struct {
vidExt []string
imgExt []string
@ -463,6 +724,29 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f model
}
}
if t.ScanGenerateImagePhashes {
progress.AddTotal(1)
phashFn := func(ctx context.Context) {
mgr := GetInstance()
// Only generate phash for image files, not video files
if imageFile, ok := f.(*models.ImageFile); ok {
taskPhash := GenerateImagePhashTask{
repository: mgr.Repository,
File: imageFile,
Overwrite: overwrite,
}
taskPhash.Start(ctx)
}
progress.Increment()
}
if g.sequentialScanning {
phashFn(ctx)
} else {
g.taskQueue.Add(fmt.Sprintf("Generating phash for %s", path), phashFn)
}
}
return nil
}

View file

@ -275,6 +275,12 @@ func (t *stashBoxBatchStudioTagTask) getName() string {
}
func (t *stashBoxBatchStudioTagTask) Start(ctx context.Context) {
// Skip organized studios
if t.studio != nil && t.studio.Organized {
logger.Infof("Skipping organized studio %s", t.studio.Name)
return
}
studio, err := t.findStashBoxStudio(ctx)
if err != nil {
logger.Errorf("Error fetching studio data from stash-box: %v", err)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" fill-rule="evenodd" d="M480.72 36.58c-10.56.01-21.69 1.97-34.35 9.32-28.85 23.98-32.38 59.58-34.6 98.83 2.47 24.57 6.03 47.44-4.17 73.49-11.7 1.42-24.56 5.66-37.6 3.92-13.16-1.77-28.8.54-35.24 7.21-20.4 27.97-29.09 64.17-40.61 100.3-11.53 36.13-18.01 65.15-18.22 70.6-2.83 15.39-2.51 33.25 2.95 50.89 11.33 29.32 21.89 56.36 33.98 87.95.79 1.81-3.34 3.11-2.83 5.1s1.53 4.44 1.83 6.54c.31 2.09 1.54 4.43 1.7 6.54.16 2.12 3.58 4.21 3.68 6.28.2 4.13.72 8.09 1.09 11.57.35 3.46.94 6.44 2.03 8.64l10.12 14.39c1.57 2.4 7.54-.99 8.58.93 2.17 4.07 8.13 2.75 10.29.62 1.53-1.52 3.99 2.03 6.07 1.8 2.09-.21 4.15-.6 6.13-1.25-7.69 22.24-20 44.47-23.07 66.71-8.78 29.38-21.69 44.27-26.36 88.13-1.3 17.3-.07 34.6 0 51.89l-9.88 32.94L280 901.69c-19.57 58.62-20.55 113.74-24.72 174.61l-6.59 12.36v18.93c-3.64 30.03-16.39 58.76-20.53 88.73.83 2.15 1.18 4.75 3.06 6.7 3.02 3.11 8.17 5.46 11.77 8.47 4.47 3.09 9.69 2.36 14.92 1.66 54.03-16.43 20.05-34.21 27.03-52.83.98-17.36 1.37-34.58 13.18-51.07 2.31-9.33-1.25-18.67-3.3-28.01 5.88-43.68 48.11-124.8 50.24-134.25l15.66-65.07c16.14-20.59 23.3-52.18 32.94-80.72l43.65-86.48 21.31-51.73c9.84-.07 19-10.61 23.16-16.63l9.88.82c-.86 10.43.07 20.87 10.71 31.29l27.19 132.61 16.47 37.88 11.54 52.71 15.65 104.61 1.64 42-4.11 14.83.82 30.48-4.94 38.72-11.1 31.68c-1.36 4.76-.24 11.37 2.86 14.2 20.97 20.41 52.34-.35 55.72-11.59.63-18.29 1.25-32.22 4.12-50.52l-.54-35.66-2.47-4.94v-12.36l-3.3-18.12c4.08-41.75 10.1-82.84 9.89-126.02-.54-35.83-6.63-70.28-16.48-103.77-2.39-22.61-4.97-45.14-6.92-67.98-1.4-16.4-2.48-32.94-2.95-49.8 1.15-11.25 2.29-21.55 3.17-31.35 2.86-32.14 2.9-58.75-8.16-94.67l-2.43-6.59 4.95-3.29c3.65-.82 3.05 4.44 11.53-3.3 1.52 1.27 4.12.91 8.72-2.47 3.65-9.09 11.8-16.92 9.84-28.1-6.39-4.95-2.79-3.45-21.03-13.08-2.21-9.68.25-18.2.82-27.17.17-15.34-1.29-29.93-5.11-44.61-5.09-19.73-8.47-39.44-13-59.18-5.76-20.28-4.09-56.63-11.97-89.87-9.42-15.61-19.5-15.96-21.42-17.38l-11.08-31.12c3.56 6.11 6.41 12.23 10.71 18.33l-5.77-19.15c5.23 9.53 16.02 18.6 12.92 28.83 3.22-8.21-1.64-14.95-5.61-21.85 3.91 6.17 10.05 11.8 14.92 17.73-7.23-10.16-14.41-20.32-14.83-30.48l-4.94-24.71c3.06 7.81 2.43 15.63 9.65 23.44-4.6-8.91-1.76-17.82-2.24-26.75 1.76 9.07 3.58 18.13 9.89 27.19-6.47-12.36-6.92-24.71-9.06-37.06-2.43-8.59-5.1-17.18-6.13-25.77-1.33-11.18-.67-22.37-1.29-33.54l.83-42.84c-2.94-12.9-5.05-25.81-16.48-38.72-7.84-10.63-17.89-16.1-28.01-21.41-8-.74-15.89-1.91-24.11-1.91Zm69.42 210.28c.48 1.07 1.02 2.13 1.6 3.19-.6-1.05-1.15-2.11-1.6-3.19Zm-208.4 107.77c1.56 22.71.19 45.21 3.62 67.92 5.74 22.85 11.94 41.69 16.71 66.52.16 12.4 1.33 22.33-6.98 55.89l-4.83-13.46c-1.32-2.05-2.21-4.2-6.7-5.73-4.36-16.84-11.3-31.44-13.26-48.73-2.25-19.97-6.79-34.65-11.92-51.22 1.11-16.12 7.62-28.97 12.69-46.1l10.66-25.09Zm205.09 80.48 7.85 29.71c2.19 15.92 5.13 31.36 15.73 41.92l-6.7 17.77c.05-5.06.11-10.09-.59-15.14-3.01-7.45-6-12.83-9-17.16l-19.23-27.39c-1.85-2.04-4.76-4.67-8.16-7l-.87-7.26-1.76-7.29c10.68-.94 17.45-4.12 22.73-8.16Zm38.42 99.9 1.17 8.13-4.07 11.07-6.13-12.51 7-3.5 2.03-3.19Z"/></svg>

After

Width:  |  Height:  |  Size: 3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M376.15 24.84c-3.58 1.02-10.08 2.26-14.39 2.85-9.64 1.31-12.93 2.19-18.26 4.82-14.17 7.01-23.67 19.21-29.66 37.99-3.36 10.52-4.53 19.14-5.26 37.62-.37 9.64-1.1 18.99-1.75 21.99-3.43 16.14-10.3 24.47-23.3 28.12-5.33 1.53-7.52 1.75-20.09 1.75-7.67 0-14.03.07-14.03.15 0 .15 1.46.58 3.29 1.02s3.29 1.02 3.29 1.31-3.07.95-6.72 1.46c-12.2 1.75-22.72 5.41-27.98 9.86-1.68 1.46-1.97 1.97-1.17 2.56.73.44.37 1.1-1.68 2.7-3.36 2.7-6.72 7.38-8.18 11.4-1.31 3.8-1.46 11.25-.22 13.95s2.12 2.48 2.12-.58c0-7.38 8.04-12.2 16.44-9.86 1.83.51 3.29 1.02 3.29 1.17 0 .22-1.39 2.05-3.14 4.16-1.75 2.19-3.87 5.33-4.75 7.16-1.83 3.94-2.92 9.13-1.68 8.4 1.17-.73 1.1-.44-1.39 4.82-2.63 5.62-4.38 13.22-4.38 19.21 0 5.11 1.02 10.08 2.12 10.45.44.15.8 1.53.8 3.07 0 3.87 2.48 11.4 5.04 15.41 2.19 3.58 5.84 6.72 8.69 7.6l1.68.51-1.53-2.48c-.8-1.31-1.46-3.29-1.46-4.38 0-2.48 2.78-10.08 3.73-10.08.37 0 .66 1.75.66 3.8.07 2.12.51 4.68 1.1 5.7 1.02 1.75 1.1 1.75.66-1.17-.51-3.29.88-11.25 1.97-11.25.58 0 .22 5.55-.51 7.89-.58 2.05 1.31 10.67 3.36 15.19 2.19 4.82 3.36 6.06 2.78 2.85-.58-3.07.66-2.05 1.53 1.31.95 3.14 5.04 10.45 5.62 9.86.15-.22 0-2.7-.37-5.55-.73-5.84 0-13.88 1.83-19.87 1.24-4.09 5.84-13.81 7.82-16.44s2.12-.58.22 3.14c-3.36 6.72-5.62 15.92-6.06 24.69-.44 8.62.15 14.9 1.31 14.9.37 0 .66-.44.58-.95-.22-2.05.15-4.89.66-4.89.29 0 .44 2.12.29 4.68-.73 11.47 4.46 26.44 10.23 29.51 3.8 2.05 3.94 2.7 3.43 25.86-.51 24.4-1.24 31.63-8.33 81.3-5.92 41.57-6.57 46.9-8.4 66.26-2.19 24.11-2.12 23.74-12.35 43.25-2.19 4.16-3 6.57-3 8.91q0 3.21 4.24 5.26l4.16 1.97.22 6.65c.22 7.52.22 7.6 6.57 7.6h3.14l-.44 5.26c-1.1 12.71-2.78 24.18-4.53 32-3 13.22-2.85 14.76 2.41 25.93 4.09 8.84 13.37 25.71 18.85 34.33 2.05 3.07 3 3.87 5.99 4.82 3.43 1.02 3.58 1.24 4.09 4.31.29 1.83 1.1 19.72 1.68 39.81.58 20.09 1.75 45.07 2.56 55.52 3.43 43.83 3.36 54.64-.73 100.08-1.17 13.22-3.29 43.03-4.75 66.11-1.39 23.08-4.02 65.16-5.84 93.5-3.65 58.07-6.65 108.33-7.38 124.33l-.51 10.74 1.75.37c.95.22 4.53 1.1 8.04 1.83 8.99 2.12 14.83 4.46 21.33 8.55 5.33 3.36 5.62 3.73 5.26 5.77-1.9 11.18-.58 19.87 3.87 24.62 7.52 7.96 25.06 8.55 34.84 1.17 5.92-4.46 8.4-11.4 8.4-23.52v-6.79l7.74-3.43c4.31-1.97 9.42-4.09 11.54-4.82l3.73-1.24-.22-3.21c-.07-1.75-.95-19.65-1.97-39.74-3.8-75.83-4.68-90.58-9.13-148.29-4.46-57.71-5.19-71.44-5.62-104.83-.58-41.71.44-58.37 7.38-121.7 4.82-43.83 8.33-79.26 9.2-93.8.95-14.61 2.26-23.96 4.38-30.68 1.97-6.5 2.63-7.38 3.51-5.11.44 1.02 4.38 11.03 8.77 22.28 4.38 11.25 14.46 36.96 22.35 57.2 7.96 20.16 20.75 52.23 28.49 71.22 32.14 78.75 37.55 96.06 49.02 159.03 2.26 12.27 5.41 29.37 7.01 37.99 6.72 36.6 12.56 72.32 27.03 165.38 6.28 40.1 6.94 43.76 8.33 44.19.8.29 10.88 3.8 22.35 7.74l20.89 7.16.51 6.28c1.46 19.07 7.89 26.3 24.03 26.96 6.5.29 7.38.15 11.1-1.61 4.46-2.26 9.72-7.6 12.05-12.49 1.97-4.09 2.19-12.49.44-19.07-.66-2.48-1.17-4.68-1.17-4.89 0-.15 2.19-1.61 4.82-3.14 5.7-3.29 9.57-7.52 10.23-11.03.73-3.94-.88-11.91-7.38-36.31-7.67-28.78-33.24-118.05-39.45-137.77-2.63-8.25-10.52-32.07-17.6-52.96-17.82-52.81-19.21-57.93-34.63-124.91-8.99-39.3-18.7-80.79-30.68-130.76-5.55-23.3-11.18-47.77-12.42-54.42-2.63-14.03-5.84-24.11-13.15-41.27-2.92-6.87-6.65-16.07-8.25-20.45l-2.92-8.04-.22-18.63c-.07-10.23-.51-22.06-.95-26.22s-.58-7.89-.37-8.25c.29-.44 1.46-1.17 2.78-1.68 2.19-.95 2.34-.88 5.99 2.56 7.16 6.79 15.49 7.38 27.03 1.9 3.07-1.46 6.43-3.29 7.6-4.09l2.05-1.46 4.09 5.55 4.16 5.48 4.53-3.58c6.36-5.04 11.25-7.96 15.41-9.06 3-.8 4.02-1.61 6.79-5.11 4.53-5.99 8.99-14.54 13.73-26.88 2.26-5.84 6.28-15.19 8.91-20.82 18.34-39.23 23.08-55.96 19.58-69.32-.66-2.41-4.31-10.74-8.11-18.63-17.17-34.99-29.37-64.87-43.03-105.26-8.18-24.25-11.4-31.78-15.71-37.04-1.97-2.34-4.97-6.43-6.57-9.2-3.87-6.43-4.97-7.09-11.69-7.01-3.87.07-8.77-.73-16.8-2.56-7.6-1.83-14.83-2.92-21.99-3.51l-10.74-.88 1.31-3.8c11.18-32.43 14.03-69.18 7.38-96.79-6.43-27.25-27.9-59.46-47.92-71.95-3.07-1.9-8.4-4.46-11.91-5.62-5.7-2.05-7.38-2.26-16.58-2.48-9.06-.22-10.96 0-16.8 1.61Zm125.64 306.44c7.3 11.25 14.76 27.76 25.2 55.74 9.72 26.15 10.96 32.51 8.33 41.42-1.31 4.31-13.37 28.85-15.85 32.29-.95 1.31-3.14 2.63-5.84 3.65-5.55 1.97-8.55 4.82-8.55 7.89 0 1.83.73 3.07 3.29 5.55 1.83 1.75 3.29 3.58 3.29 4.09 0 .58-.95 1.53-2.05 2.19-1.17.73-3.51 2.78-5.19 4.6-2.92 3.21-3.07 3.51-1.9 4.75 1.1 1.24 1.1 1.39-.15 1.68-5.62 1.61-5.26 1.75-13.44-6.21-4.24-4.16-8.91-8.18-10.37-8.91-1.39-.8-2.78-2.12-3-2.92-.22-.88-.8-6.57-1.31-12.78-1.61-20.97 1.97-48.36 10.81-82.25 1.75-6.87 4.82-18.7 6.79-26.3 1.97-7.67 4.16-17.39 4.89-21.77.66-4.31 1.39-7.82 1.53-7.82s1.75 2.26 3.51 5.11ZM283.01 578.92c0 9.72-.66 11.61-2.19 6.79-.95-2.7-2.19-14.76-1.68-15.78.15-.29 1.1-.51 2.12-.51h1.75v9.5Zm-.73 60.92c.58.07 1.31.88 1.53 1.9.73 2.78.73 10.23.07 10.23-1.53 0-7.45-10.3-7.45-12.93 0-.22 1.1-.07 2.41.22 1.24.29 2.85.51 3.43.58Z" vector-effect="non-scaling-stroke"/></svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.5 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M472.98 38.67c-13.15.96-26.67 7.37-39.17 14.9-15.4 4.08-26.21 21.1-32.37 33.99-5.93 21.36-10.44 43.16-12.62 65.39-8.56 18.78 2.69 49.48-20.08 58.28-26.46 3.29-45.91 24.89-57.3 47.58-7.59 25.25-21.44 47.64-34.64 70.25-8.14 19.94-32.81 48.39-10.36 66.7 26.26 16.49 58.12 9.34 86.76 5.18 19.26 6.03 13.11 40.3 13.27 57.62.75 24.57-7.78 47.72-11.98 71.55-5.73 30.16 7.91 58.68 9.71 88.38 5.03 13.95-6.07 56.76 17.49 46.62 13.87 33.68 36.67 62.85 51.48 96.15 8.34 20.57 26.8 43.12 16.18 66.04-13.64 21.93-14.66 48.16-13.27 73.17-.62 26.72 8.46 52.33 10.04 78.64.81 38.12-21.73 71.17-25.9 108.47-1.34 21.76-26.4 39.99-12.31 61.54.39 2.59 6.27 16.37 9.72 29.42 2.95 11.19 7.33 18.65 11.32 25.9 7.35 14.5 20.87 3.11 23.96-3.83 8.76-19.79 14.33-29.42 13.27-5.18-1.06 24.24 47.37-4.77 52.13-24.35.58-2.38-.94-3.52 1.62-11.29 2.44-19.89-13.36-38.12-11.98-60.81-1.27-32.43-9.15-64.23-10.69-96.55-2.57-23.93 7.7-45.17 15.87-66.99 19.16-49.34 5.91-103.12 15.54-154.1 10.93-42.36 20.98-85.33 38.52-125.61-.89-18.39 20.38-20.42 25.58-30.76-17.12-45.49-12.18-96.36-31.73-141.15-11.79-19.53-23.53-41.51-34.95-59.57 10.41 8.04 27.29 13.76 37.22-2.59 2.46-11.07 14.4-21 9.72-32.7-8.61-21.13-4.68-43.27-8.42-65.72-10.33-20.34 26.7-1.53 19.42-11.33 21.04-14.01-21.31 8.01-11.98-12.3-4.57-23.82-10.07-47.05-19.75-69.29-7.05-48.59-12.56-98.26-29.46-144.71-7.04-10.12-4.91-16.98-13.6-28.49-10.4-14.73-23.1-19.4-36.26-18.45ZM343.16 329.72c5.59.8 1.72 20.91-.96 29.46-.23 6.92-4.89 5.55-9.4 4.21-23.94.47-6.9-16.97 1.29-26.87 4.43-5.2 7.2-7.07 9.06-6.8Z"/></svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.7 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M390.27 1215.71c-26.85-.4-39.98-26.54-37.66-50.52-5.05-22.51-3.56-45.85-4.53-68.85 2.06-23.14 21.76-41.17 15.57-66.3-3.11-37.23-4.87-74.88-.58-112.07 4.2-24.94 13.46-48.45 25.58-70.54.29-20.04-3.9-39.59-12.25-57.87-14.58-37.71-27.84-75.94-45.89-112.18-8.06-20.45-4.48-48.36-34.09-48.93-17.01-1.88-35.19-2.94-30.5-26.06-1.18-31.57 3.95-62.99 2.66-94.64 4.55-28.15-22.55-45.52-37.4-65.53-15.7-21.87-32.9-42.98-45.36-67-4.35-29.81 14.01-56.22 24.4-82.85 12.15-30.72 29.69-58.93 42.97-89.06 10.76-17.8 24.8-40.58 48.45-39.99 22.64-.45 46.86.54 65.3-14.85 17.26-15.49 22.96-38.33 32.33-58.42 12.24-28.4 35.57-54.65 67.74-58.99 24.33-3.89 50.81 5.21 68.9 21.61 3.9 22.95 19.71 38.48 25.56 60.65 2.49 21.53-2.29 45.5 24.99 52.18 24.74 16.83 50.28 32.79 73.42 51.79 14.52 18.65-5.63 46.05-27.33 47.31-28.5 13.29-59.15 21.79-90.55 24.61-27.88-4.29-18.63 31.66-1.72 39.88 23.56 21.16-22.88-12.64-23.18-12.36-.26 12.99-3.78 24.58-8.09 5.3-11.15-22.15 3.96 31.49-5.43 19.37-1.78-11.09-13.78-34.88-13.28-9.82 1.3 8.03 14.21 33.96-.67 15.64-6.99-15.67-3.58-33.86-5.09-50.74-26.39-2.77-11.88 22.66-7.91 36.5 5.97 25-25.13 39.55-22.68 61.96 12.94 16.43 31.57 29.5 42.84 49.13 24.41 33.32 29.99 74.92 40.63 113.67-1.49 14.89 32.93 29.94 3.81 30.29-16.72 11.07-10.87 39.91-21.52 57.74-7.71 38.93-19.29 77-31.3 114.78-8.06 27.2-18.27 54.63-17.23 83.57-.36 40.67-6.09 81.1-16.62 120.35-5.81 25.47-15.31 50.35-14.89 77.06-2.19 24.53 2.41 48.98 11.71 71.6 7.37 20.12-4.17 48.71-29.66 40.63-24.51-5.69-10.97 27.47 2.91 33.26 14.53 16.75 1.15 34.28-19.32 29.02-5.02 0-10.06.1-15.06-.33Zm-121.8-810.49c.16-22.09 10.41-42.25 5.24-64.23-2.08-9.98 4.9-41.86-5.44-38.66-6.72 22.85-29.25 40.55-28.37 65.34 8.51 16.52 21.61 30.41 27.22 48.67 1.8-3.45.92-7.45 1.35-11.12Zm252.68-105.3c.93-13.95-20.6-7.37-6.22 2.41 3.97 6.03 8.46 5.04 6.22-2.41Zm59.8-74.98c-11.22-15.73-26.65-27.69-40.46-40.91-11.99-1.74-12.72 24.78-21.55 33.89 1.23 17.12 35.65 4.3 49.7 7.82 4.1-.25 8.26-.03 12.31-.8Zm-20.67-81.28c4.51-20.53-17.82 12.62-1.15 4.06l1.15-4.06Zm-5.14-8.5c8.84-11.18-12.11-43.92-7.45-15.82 1.26 3.6-.62 29.43 7.45 15.82Z"/></svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.7 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M515.92 50.69c-6.99 3-13.47 11.05-20.4 10.88-4.77-.12-9.35-6.12-13.66-8.18-13.67-6.52-28.43-11.2-43.59-9.53-12.73 1.41-23.74 8.9-32.71 17.7-6.07 5.96-12.45 15.75-16.36 23.18-3.9 7.43-9.53 25.88-9.53 25.88s-12.4-.29-17.7 0c-15.19.82-35.43 2.84-50.42 5.48-20.31 3.58-49.51 5.07-66.77 16.36-1.71 1.12-3.48 3.46-4.13 5.4-1.83 5.41-1.55 13.55 0 19.05 2.34 8.3 10.81 17 15.01 24.53 15.71 28.21 37.76 49.62 61.29 70.9 7.51 6.79 24.53 23.18 24.53 23.18 6.98 5.76 16.45 11.52 16.36 19.05-15.6 19.28-35.29 36.21-38.65 62.64-1.61 12.64 5.13 24.18 12.76 34.06 6.81 7.35 16.02 14.71 19.05 23.18 1.41 10.87-1.74 21.78-1.35 32.71.35 9.62 5.5 19.48 6.83 31.36 2.99 26.68 8.29 56.95-2.7 80.34-7.19 15.31-9.76 30.37-13.66 46.37-5.57 22.84-7.24 44.89-5.48 68.12 2.63 34.67-1.67 27.3 2.78 55.9 2.44 15.68-5.66 18.62-.59 51.65s7.77 115.9-8.94 159.54c-5.29 13.81-5.21 26.61-5.48 40.89-.47 25.23 1.66 51.32-2.7 76.3-2.69 15.41-13.49 34-17.7 49.07-2.24 8.03-4.51 18.95-5.48 27.23-1.34 11.36-1.15 26.67-1.35 38.11-.14 8.19-7.42 28.07 0 27.32 56.79-5.79 182.54 27.49 216.67 6.83 13.32-8.06 54.71-1.21 77.67-4.12 5.62-.71-7.64-26.93-2.72-35.42 14.6-25.2 11.84-56.1 13.57-84.47 2.94-48.13 1.52-94.96-9.53-141.72-9.64-40.79-28.29-77.44-25.88-119.89.84-14.79 2.8-34.5 5.48-49.07 2.36-12.87 8.08-29.45 10.88-42.24 5-22.83 10.66-46.09 9.53-69.55-2.61-53.82-23.61-105.61-51.77-151.25-32.73-38.42-13.69-70.93 2.7-117.19 18.54-51.11 46.6-98.49 64.42-149.75 3.43-11.62 2.55-23.64 2.35-35.55 1.61-45.84 4.24-91.75-1.35-137.59-.2-3.71-.23-8.77-1.35-12.31-1.53-4.87-3.98-12.03-8.18-14.92-26.26-13.42-56.44-1.36-81.78 9.53Zm39.54 51.77c-5.44 34.07-2.93 22.83-4.13 32.71-1.93 15.86-3.17 37.14-3.78 53.11-.33 8.59 0 28.66 0 28.66l-9.8 8.18-9.53 5.4c-1.92-21.24 3.39-42.82 5.4-63.99 1.51-15.9.47-32.33-4.05-47.72-2.47-8.42-13.66-25.88-13.66-25.88 13.63 0 9.12.6 13.66 0s22.31-.46 27.23-4.28c7.16-5.55-.48 8.39-1.35 13.8ZM368.8 148.74c4.68 1.82 8.99 4.45 12.22 8.18 6.06 6.98.33 21.93 1.35 31.36s5.48 31.36 5.48 31.36l.88 12.33-34.94-28.69-43.59-32.71s41.88-28.34 58.59-21.84Z"/></svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.6 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M439.54 22.98h3.95c7.49.23 14.96 2.18 21.45 5.99 5.2 2.97 9.66 7.03 13.87 11.26 7.76 7.83 14.32 16.91 18.92 26.95 3.62 7.83 6.05 16.19 7.35 24.71.64 4.04.93 8.12 1.34 12.18 2.96 31.64 5.13 63.34 7.56 95.02.41 4.77.46 9.65 2 14.22 1.13 3.55 3.45 6.83 6.81 8.55 4.54 2.4 9.83 2.42 14.85 2.41 7.19.11 14.55.9 21.16 3.9 4.39 1.99 8.25 5.15 11.03 9.1 3.81 5.37 5.86 11.75 7.22 18.12 2.27 10.6 2.52 21.49 2.6 32.29.09 37.2-.42 74.39-.22 111.59.16 37.19 1.46 74.36 2.69 111.53 1.73 50.12 3.64 100.24 4.31 150.39v3.69c-.23 7.17-1.45 14.39-4.28 21.03-2.47 5.85-6.28 11.23-11.37 15.09-4.6 3.54-10.17 5.72-15.89 6.59-5.38.85-10.87.56-16.23-.25 2.52-6.96 5.5-13.77 7.54-20.9 2.25-7.83 4.71-15.6 6.48-23.55 1.4-6.12 2.14-12.37 2.34-18.64.32-8.17.53-16.57 3.53-24.29.74-2.03 1.77-3.94 2.77-5.85 1.71-3.69 1.72-7.9 1.35-11.88-.67-6.85-2.69-13.45-4.51-20.06-7.07-25.07-15.44-49.76-21.91-75-1.9-7.64-3.9-15.29-4.94-23.11-2.29-16.82-1.58-33.83-2.48-50.74-.38-6.42-.03-12.85-.05-19.27-2.16.06-4.31.04-6.47.13.03 18.82-.9 37.65.13 56.45 2.09 35.31 5.96 70.49 8.05 105.79 1.13 18.76 1.56 37.55 1.88 56.34.42 28.92.38 57.85.88 86.78.19 8.37.29 16.75.67 25.11 1 12 6.07 23.07 9.4 34.52 3.83 12.54 8.16 24.92 12.08 37.43 5.85 18.4 10.09 37.26 14.15 56.12 2.33 10.98 4.49 22 6.62 33.02.81 4.49-1.04 8.82-1.58 13.22-.36 2.27.01 4.56.22 6.84 1 10 2.45 19.96 3.84 29.91 1.16 8.03 2.31 16.1 2.21 24.23.05 20.62-5.81 40.93-15.27 59.15-3.27 6.43-7.24 12.46-10.92 18.65-12.65-5.55-25.15-11.48-38.19-16.07-13.93-4.95-28.7-7.95-43.54-7.37-5.28 33.1-2.88 67.28 6.18 99.51 1.81 6.51 3.88 12.96 6.18 19.32 2.15 6.05 4.85 11.94 6.03 18.29.93 4.73.95 9.83-1.09 14.29-2.31 5.14-6.75 8.91-11.31 12.03-4.47 2.94-9.13 5.91-14.48 6.91-5.99 1.26-12.58-.05-17.32-4.01-5.05-4.11-7.67-10.6-8.1-16.98-2.3-39.13-3.74-78.3-5.68-117.45-.34-4.9-1.05-9.77-1.6-14.64-1.67-.04-3.34-.08-5-.14-.48 5.59-1.41 11.13-1.66 16.74-.24 3.6.15 7.21.27 10.81 2.2 42.55 4.71 85.08 7.07 127.62.23 4.85.09 9.71.01 14.57-7.17-.22-14.35.1-21.53.17h-10.86c-12.07-.01-23.97-2.48-35.62-5.4-.84-.24-2.35-.62-1.97-1.79.93-1.73 2.79-2.65 4.38-3.65 2.58-1.58 5.51-2.49 7.96-4.27 3.26-2.14 6.56-4.24 9.99-6.1 4.07-2.03 7.89-5.08 9.62-9.41 1.32-3.12 1.7-6.54 1.81-9.89.75-16.74-1.12-33.49-4.28-49.91-3.13-16.3-7.48-32.34-11.69-48.38-10.8-40.85-19.33-82.24-28.74-123.42-.54-2.32-1.32-4.8-.45-7.14 3.38-10.97 2.52-22.83-.68-33.73-3.01-10.59-7.67-20.6-11.31-30.97-3.45-10.08-6.03-20.43-8.57-30.77-3.41-14.23-6.26-28.58-8.98-42.95-2.41-13.15-5.03-26.27-7.17-39.46-3.06-19.13-5.29-38.37-8.49-57.48-.41-4.68-4.21-8.38-8.29-10.24-4.78-1.52-9.44-3.57-13.5-6.55-5.08-3.65-9.18-8.55-12.22-14-3.94-7.03-6.3-14.8-8.22-22.58v-1.71c.59-4.84 1.45-9.64 2.13-14.46 5.15-33.97 10.32-67.93 15.48-101.9 2.31-15.05 4.52-30.12 7.02-45.14 2.98-18.14 6.09-36.27 8.28-54.53 2.44-20.62 3.82-41.35 4.47-62.09.42-16.02.63-32.06-.21-48.07-.96-20.03-2.92-39.99-3.64-60.03-.38-7.54.6-15.25 3.64-22.2 4.29-10.05 13.01-17.82 23.05-21.87 6.66-2.58 13.85-3.18 20.74-4.9 9.11-47.97 17.53-96.09 27.73-143.86.94-4.35 1.76-8.74 3.25-12.94 3.86-11.25 10.21-21.42 16.69-31.32 1.55-2.5 4.02-4.24 6.57-5.62 3.99-2.14 8.31-3.57 12.41-5.48 2.74-1.27 5.84-.27 8.68-1.02 6.44-1.61 13.01-2.81 19.65-3.15m-92.5 355.4c.01 19.2.01 38.39 0 57.59 2.11-3.68 3.52-7.73 4.62-11.82 2.49-9.87 3.06-20.25 1.46-30.32-.95-5.47-2.49-11.09-6.08-15.46M281.2 669.67c-.71 3.96.85 7.98 3.28 11.07 1.35 1.64 2.77 3.41 4.86 4.11 4.01 1.41 8.35.99 12.46 1.84.88-5.13 1.59-10.29 2.46-15.42-3.28 3-6.49 6.06-9.83 8.98-2.78-6.46-5.75-12.83-8.55-19.27-2.2 2.47-4.17 5.34-4.68 8.68Z"/></svg>

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M582.86 677.29c-5.58-11.04-5.58-37.61-5.58-37.61-13.88-171.93-20.53-177.51-20.53-177.51-3.92-25.99 3.68-109.52 3.68-109.52 7.95-83.53-17.68-95.28-17.68-95.28l-9.37-6.05c9.37-2.37 9.73-7.83 9.73-7.83-17.68.83-8.19-20.41-8.19-20.41 7.83-13.29 2.37-28.24 2.37-28.24 14.83-24.32-9.37-57.19-9.37-57.19 3.09-8.66-24.32-43.07-24.32-43.07-6.29-7.83-23.61-45.56-23.61-45.56-20.53-39.16-68.58-26.82-68.58-26.82s-58.26 6.41-58.97 66.69c0 0-2.37 42.36-9.49 51.02 0 0-10.32 21.83-8.66 43.07 0 0-18.87 63.36.71 79.74 0 0-27.29-4.51-27.29 40.11v45.09s-15.9 27.88 4.51 51.38l4.63 60.04s-14.12 45.33-16.97 85.2c0 0-9.73 47.58-15.19 66.33-5.58 18.75-8.19 21.6-7.12 29.9 0 0-13.29 28.24-13.29 37.61 0 0-4.39 7.24-2.73 11.63 0 0-6.64 19.93-4.39 24.32 0 0 14.36 19.93 18.27 23.26 0 0 3.32-6.05-2.73-15.43l-3.32-12.7s11.04 6.64 13.29 7.71c0 0 15.43 1.07 23.73-3.92 0 0 5.58-8.31.59-9.37 0 0 1.07-4.98-2.73-6.64 0 0-3.32-11.04 1.07-22.07 0 0 7.24-13.76 3.32-34.29 0 0 6.05-34.29 13.29-44.73l2.25 6.05s-.24 22.54-3.68 48.65c0 0-8.9 111.66 3.8 178.11 0 0 4.98 41.53 3.32 64.19 0 0-3.32 32.63 5.58 40.94 0 0 0 13.88 5.58 18.75 0 0 13.88 54.46 34.89 114.5 10.8 30.73 7.71 54.7 7.71 54.7-3.92 11.63-11.04 41.53-11.04 41.53-17.09 9.37-19.34 25.99-19.34 25.99-6.05 2.73-6.64 17.09-6.64 17.09-5.58 18.75 13.29 16.02 13.29 16.02 51.38-6.64 46.99-33.7 46.99-33.7 1.66-16.02 19.22-43.19 19.22-43.19l.12 25.39c3.92 4.39 6.53-.47 6.53-.47.12-18.39 5.7-36.07 5.7-36.07 18.27-16.02 7.24-23.73 7.24-23.73 0-4.39-6.64-9.97-6.64-9.97 2.25-5.58-6.05-10.56-6.05-10.56-5.58-4.39-11.63-16.61-11.63-16.61-7.71-12.7-4.98-55.89-4.98-55.89 12.7-37.02-13.41-85.2-13.41-85.2 3.2-15.43 1.31-49.72 1.31-49.72-1.07-30.38 13.88-88.52 13.88-88.52 12.22-24.8 14.48-74.75 14.83-94.69 1.66 11.27 8.42 25.16 8.42 25.16 9.49 20.41 15.43 50.31 15.43 50.31 3.8 28.48 11.04 50.9 11.04 50.9.59 13.29 14.36 55.29 14.36 55.29-2.25 36.55 13.88 47.58 13.88 47.58 2.25 12.22 3.32 78.55 3.32 78.55-1.07 61.35-17.68 97.89-17.68 97.89-9.37 9.97-29.31 71.31-29.31 71.31-10.56 6.64-18.75 19.93-18.75 19.93-4.39 0-11.04 12.22-11.04 12.22-21 12.7 13.29 19.93 13.29 19.93 53.63 5.58 58.85-18.27 58.85-18.27 3.92-11.63 18.39-42.6 18.39-42.6v24.32c2.49 4.98 8.42 0 8.42 0-1.66-16.02 6.29-36.55 6.29-36.55 17.68-27.65 5.81-28.24 5.81-28.24 1.66-9.37-10.44-21.6-10.44-21.6-2.25-5.58 3.32-18.75 3.32-18.75 53.04-117.83 29.31-176.44 29.31-176.44-1.07-12.7-6.64-45.33-6.64-45.33 5.58-48.65 1.66-108.33 1.66-108.33 2.73 4.98 19.93-3.32 19.93-3.32l12.1-8.31c5.58-2.73 11.75-27.53 11.75-27.53 4.39-6.76-9.61-40.58-9.61-40.58Zm-60.16-87.92-2.73-6.05c-.59-15.43-38.68-95.64-38.68-95.64-4.98-15.43 7.24-66.92 7.24-66.92 0-4.98 16.02-37.61 16.02-37.61l7.12 84.6c-7.59 33.58 30.02 152 31.56 156.75v.12-.12c-3.09-9.73-20.53-35.12-20.53-35.12Zm26.46 138.95 1.19-33.34c3.2 6.29 4.15 16.61 4.15 16.61-.83 15.43-5.34 16.73-5.34 16.73Z"/></svg>

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="m434.69 27.94 1.67-.07c1.39 2.28-4.86 6.26-3.95 9.02 11.38 3.47 22.47 7.89 33.36 12.65 1.55 2.77-2.74.28-3.78 1.94-3.26 3.39-8.62 1.06-12.76 2.02 1.92 6.03 10.2 1.14 14.42 4.13 6.59 3.05 14.27.61 20.81 3.8 10.62 2.71 21.79.16 32.49 2.56 38.39 8.38 78.55 6.98 116.34 18.22 12.55 5.44 30.09 9.58 33.14 25.25l.24 5.59c-2.35 14.64-10.42 27.45-17.79 40.02-11.3 20.29-23.93 39.78-36.34 59.39-9.89 18.01-22.55 34.24-34.25 51.06-13.31 18.09-19.06 40.24-24.76 61.58-3.57 20.11-11.88 39.14-14.61 59.39-1.43 4.61-3.81 8.9-4.95 13.6-1.24 9.62-5.54 18.48-8.59 27.6-7.2 12-14.5 24.8-14.35 39.27-3.54 21.74 14.85 37.5 22.63 55.94 11.03 28.11 23.51 55.69 32.64 84.52 8.37 18.83 13.29 38.91 17.52 59 7.94 37.57 7.19 76.17 11.65 114.18-.45 28.59 4.53 56.97 12.02 84.49 2.75 17.66 12.14 33.21 17.16 50.19 4.04 11.92 7.37 24.23 7.76 36.89.7 34.27 6.39 68.12 9.82 102.17 2.23 25.23 6.57 50.56 16.3 74.08 2.35 9.56 7.5 17.99 12.55 26.33 2.84 5.79 10.05 6.8 14.02 11.57 5.38 4.7 6.77 13.22 3.66 19.44-12.76 3.47-28.24 1.11-38.23 11.4-2.12 8.58 2.83 16.62 4.09 24.94-.51 1.22-1.52 2.13-2.47 3.01l-1.1.05c-4.12-4.16-4.25-10.82-7.22-15.77-3.44-9.49-11.85-15.78-20.22-20.7-2.77-1.44-2.8-5.93.24-7.01-4.42-6.03-2.64-14.1-1.52-20.92-.28-8.02-1.05-16.11-3.65-23.74-3.32-11.81-3.38-24.42-7.98-35.91-2.13-5.21-3.09-10.76-4.41-16.18-3.21-11.88-9.61-22.57-13.03-34.38-5.89-16.99-10.95-34.27-17.68-50.96-6.8-18.03-12.61-36.54-15.13-55.7-3.08-16.68-2.43-34.1-8.08-50.25-8.49-19.67-18.15-39.23-21.5-60.56-6.63-27.16-15.83-54.01-30.02-78.2-7.36-16.22-14.85-32.41-21.25-49.04-4.45-17.75-8.76-35.54-13.84-53.11-.63-1.71-1.3-4.26-3.6-4.05-7.25-1.2-14.48-.78-21.76-1.4-4.55 3.57-7.12 8.81-8.56 14.29-5.8 15.25-9.96 31.14-16.86 45.96-11.4 19.44-21 39.98-34.3 58.25-10.59 18.23-20.3 37.2-26.79 57.31-5.24 18.24-13.5 35.46-22.72 52-4.24 7.24-8.25 14.82-9.47 23.23-5.17 26.42-10.26 53.3-22.4 77.59-12.04 25.15-23.93 50.34-35.68 75.64-10.48 20.29-19.2 41.57-25.1 63.68-2.1 6.22-6.58 12.94-2.56 19.53 3.21 6.92 9.2 15.12 4.2 22.67 2.59-.33 3.96 2.22 3.56 4.52-3.93 4.88-8.68 9.1-12.08 14.42-7.84 11.3-9.39 25.29-13.63 38.07-1.7 1-3.68-.26-5.08-1.25 1.43-9.42 3.95-18.73 4.22-28.27-.04-5.48-7.07-8.39-11.27-5.13-12.78 4.61-26.14 13.15-40.1 9.73-2.24-2.91-3.24-6.59-4.68-9.9l-.07-1.7c-.23-7.84 4.77-14.02 9.7-19.48 1.16-4.41 5.29-5.56 9.14-6.57 6.88-17.21 13.42-34.62 21.79-51.19 6.73-15.66 11.39-32.12 16.11-48.46 6.36-33.01 15.66-65.54 21.52-98.79.98-13.65 4.05-27.19 8.03-40.26 6.15-16.38 14.88-31.63 22.72-47.21 7.09-20.5 14.78-41.02 17.7-62.62 4.13-25.36 8.76-50.63 13.04-75.96 4.29-30.12 10.04-60.2 20.71-88.77 6.67-22.68 18.32-43.39 30.73-63.35 5.99-15.39 14.98-29.53 19.18-45.62 4.32-16.26 13.84-30.49 18.97-46.42 3.33-16.59-.01-34.55-6.81-49.57-.53.26-.36 2.23-1.05 1.38-3.49-2.51-3.37-7.33-6.32-10.23-2.33-2.47-3.99-5.44-5.69-8.35-1.54 3.51.27 8.82-.7 12.96-3.4 2.05-3.28-5.64-6.24-2.22-2.56-4.25-7.1-6.8-9.1-11.48-2.01-1.77-3.2-6.37-6.37-4.17-3.6-3.03-5.61-8.32-9.56-11.31-.64 1.57.73 4.84-.96 5.42-.27-3.94.07-8.73-3.71-11.15-1.07 2.34-1.14 5.22-2.84 7.26.39-3.19 1.38-6.25 1.87-9.41-3.44-4.73-6.55-9.84-7.82-15.62-.43-5.23 2.56-9.8 4.57-14.38 1.64-6.65 2.47-13.63 5.91-19.69 2.6-4.58 3.99-10.86.66-15.42-10.32-15.16-15.01-33.93-28.74-46.73-24.72-24.64-46.13-52.22-68.42-79-12.37-17.13-27.93-32.24-36.88-51.63-4.09-8.33-3.1-19.07 3.12-26.1 9.18-9.65 20.47-16.86 31.64-23.96 20.09-9.27 39.62-19.78 60.32-27.73 7.73-4.35 15.5-8.68 23.81-11.89 13.3-7.14 27.05-13.79 41.92-16.91 16.9-5.78 35.25-10.3 53.04-6.4 6.55-.97 12.64-4.22 18.66-6.98m-78.8 47.61c-10.25 1.69-17.44 9.9-26.2 14.7-15.09 7.58-26.7 20.61-42.24 27.47-11.59 8.63-24.87 14.37-37.05 22.02-4 4.3 2.9 8.97 5.81 11.64 18.22 13.74 34.82 29.39 51.23 45.19 10.09 10.42 22.69 17.73 34.25 26.31 1.23-4.24 2.53-8.51 3.15-12.88.04-13.72-9.1-25.34-10.32-38.89.79-8.24-.35-16.62 1.14-24.79 3.11-13.04 12.91-23.42 24.07-30.3 3.65-1.9 6.15-5.19 9.22-7.81 5.51-3.53 6.39-10.85 11.79-14.5 3.64-3.08 8.66-3.42 12.76-5.69-1.3-2.5-4.36 1.26-6.15-.06 1.38-3.02 3.76-5.33 6.11-7.59-9-.66-18.6-.78-27.24-4.27-3.27-1.46-6.9-1.19-10.34-.55m142.15 19.7c-1.85.65-5.24.48-5.39 3.01 3.91.32 6.94 5.48 10.78 2.8 2.78 10.29 13.92 15.48 15.9 26.13.98 8.08-2.63 15.43-6.1 22.4 2.8 5.3 5.81 10.51 7.97 16.13 3.34 5.04 7.63 9.8 8.29 16.12-.7 10.97 11.3 18.21 9.91 29.34 25.71-21.59 42.67-51.25 66.87-74.3 3.49-3.9 8.02-6.91 10.55-11.61 1.02-1.96-.53-3.48-2.2-3.65-31.86-5.13-62.58-15.12-93.84-23-7.44-1.88-14.92-5.44-22.75-3.35m10.25 17.62c1.13 3 6.09 5.03 3.73 8.37 3.22 4.18-.54 9.55.96 14.2.77 3.09-1.26 5.96-3.74 7.61-.21 2.08 1.81 4.05 3.23 5.4 7.76-10.67 8.36-28.21-4.18-35.58Z"/></svg>

After

Width:  |  Height:  |  Size: 4.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M423.52 18.72c-23.04 1.06-33.36 15.17-35.68 20.59-2.48 5.78-36.33 56.19-36.33 56.19l-20.59 27.23v1.23c-3.67 5.71-8.71 14.06-10.76 20.22-3.3 9.91-3.32 10.69-3.32 10.69l-2.46 10.76 4.98-4.12 4.12-14.01 7.58-19.5c.19 5.9.49 17.21-.14 26.08-.83 11.56-9.1 21.45-9.1 21.45v24.77s-2.47 4.13-9.9 5.78c-7.43 1.65-9.9.02-4.12 3.32s8.23 2.46 8.23 2.46l-7.44 15.67s-23.95 24.75-30.55 33.01-60.24 65.23-60.24 65.23-32.21 35.49-32.21 50.35 15.67 28.1 15.67 28.1 50.41 62.73 54.53 67.68c4.13 4.95 5.76 19.02 8.23 33.88 2.48 14.86 34.67-13.22 34.67-13.22s8.27 7.42 13.22 9.9c4.95 2.48 12.35-3.32 12.35-3.32s1.69 36.34.87 43.77c-.83 7.43-3.35 70.14-2.53 122.14.83 52.01-33.8 227.89-33.8 227.89l33.01 8.23s-4.17 26.41-.87 56.12c3.3 29.72 23.11 123.01 23.11 123.01l2.53 42.9-3.32 1.66-3.32 10.76.87 9.9s-4.15 11.56-3.32 23.11c.83 11.56 4.98 19.79 4.98 19.79s1.59 14.84 7.37 25.57 33.88 17.34 33.88 17.34l16.54-.79-7.44-36.33-3.32-14.01s-1.66-19.85-1.66-41.32-10.7-47.05-12.35-69.34c-1.65-22.29 4.96-44.54 7.44-68.48 2.48-23.94 5.78-92.45 5.78-92.45s38.76 2.42 74.25 3.25c35.5.83 66.89-4.12 66.89-4.12s1.61 23.15 4.91 41.32c3.3 18.16 11.61 35.45 14.08 45.36l19 75.98-2.53 42.11s-14.74 23.05-12.35 33.8c1.65 7.43 9.9 21.45 9.9 21.45l.79 47.09 5.78 3.25s-.83-39.61 0-44.57c.83-4.95 4.16-1.6 10.76 2.53s8.28 38.8 9.1 46.23c.83 7.43 20.59 9.03 20.59 9.03l54.53-3.25-33.88-26.44s-5.78-18.14-5.78-25.57-3.32-32.25-3.32-37.2-2.42-14.02-3.25-21.45-7.44-47.89-7.44-47.89-1.63-146.06-2.46-153.49c-.83-7.43-3.32-22.32-3.32-22.32l14.01-4.98s-9.91-36.31-11.56-44.57c-1.65-8.26-8.21-51.18-16.47-104.01-8.25-52.83-47.89-188.16-47.89-188.16V523.1l4.91.79s-1.66-20.63-1.66-33.01-12.35-59.44-12.35-59.44-1.66-13.21-1.66-17.34 4.98-18.13 4.98-18.13 20.63 36.31 33.01 33.01c25.72-6.86 42.1-20.68 34.67-99.1-8.89-93.86-21.45-126.26-21.45-126.26l-29.76-7.44c-9.91-2.48-37.92-13.22-37.92-13.22l3.25-9.1 2.53-17.34 9.03 11.56-6.57-18.13 2.46-9.1-3.32-11.56s-2.45-16.51-6.57-28.89c-4.13-12.38-15.72-33.83-28.1-50.35-12.38-16.51-33.82-30.52-60.24-31.35-1.65-.05-3.23-.07-4.77 0Zm93.9 104.01 4.19 8.23 1.59 10.76-4.12 7.37-1.66-19.79v-6.57Zm-149.37 10.69-7.44 15.67-11.56 7.44 9.1-16.47 9.9-6.65Zm146.12 20.66v21.45l-1.66 4.12-6.65-3.25 4.19-9.97 4.12-12.35Zm-171.69 18.13-4.19 13.22-7.37 4.98 4.12-9.9 7.44-8.31Zm1.59 15.75 1.66 9.03-10.69 2.53 9.03-11.56Zm-22.54 125.25c1.45.06 2.68.64 3.61 1.88 4.95 6.6 18.13 42.91 18.13 42.91l13.22 33.8 7.44 17.34v22.32l-7.44 1.66-28.89 30.55s-18.19-27.27-28.1-33.88-23.93-19-27.23-24.77c-3.3-5.78-18.17-21.43-13.22-29.69 4.95-8.26 21.46-37.97 34.67-46.23 10.73-6.71 21.51-16.13 27.81-15.89Z"/></svg>

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M492.92 13.57h1.22c8.75 2.8 18.36.25 26.68 4.72-4.42-8.75 6.46 5.52 9.19 4.26-.65-2.46 3.96 2.05 2.53-.41-.49-2.14-5.71-6.12-1.24-6.01 4.85 1.06 4.44 5.97 8.46 11.25 3.64.53 11.6 4.08 8.93 9.56 1.76 1.66 10 1.16 4.63 2.06 8.55 5.05 10.1 15.04 15.16 23-3.08-.73-1.47 3.49-1.71 5.31 2.09 10.05 3.13 23.41-2.45 30.72-1.69 10.53-5.39 20.6-9.76 30.27-6.24 3.63-4.6 16.5-11.72 17.12-3.03 5.57-5.73 11.37-8.59 17.03 3.18 9.37 4.47 19.3 6.56 28.96-3.53-1.28-3.96-1.06-5.85 1.93 3.06-1.74 5.15 7.26 1.47 5.04-5.55-2.09 2.94 10.49-3.19 8.26-5.99-3.58 4.06 10.49-3.79 8.07 1.23-9.35-2.68-18.67-3.61-28.08-.42-4.88 4.57-11.58 1.83-15.24-3.71 10.18-12.5 17.22-20.05 24.07 5.02 2.68 7.17 7.73 7.7 13.13 3.47 11.54 12.79 19.67 19.67 29.15 2.15 4.44 7.39 4.05 11.4 5.37 14.06 3.12 24.97 15.12 28.83 28.72 5.81 13.74 7.66 28.53 9.37 43.2v29.23c-4.21 19.23-1.5 39.1-4.97 58.43-5.84 23.1 3.14 46.42.3 69.8 4.91 45.45-13.06 88.7-16.36 133.52-8.02 16.62-5.15 35.58-7.43 53.37-5.71 11.46-9.44 24.48-19.83 32.66-5 .83-9.43 3.37-14.37 4.46.6 28.64 2.89 57.24 4.47 85.87 2.91 50.58 1.16 100.73 5.12 151.21-.02 9.3-2.74 18.89.16 27.99.6 1.59 4.63 4.13 1.3 4.85-10.61.84-21.36 2.31-31.99.98-7.98 1.53 2.02-9.72-7.62-7.24-2.3 22.07-7.9 43.77-14.83 64.82 2.95 20.97 8.13 42.79 20.19 60.69 7.25 6.58 10.59 16.06 16.02 24.15 5.27 16.37-9.3 29.55-13.25 44.51-2.86 13.26-1.97 26.95-2.07 40.42-2.39.41-4.98.82-7.24-.24-3.96-15.74 8.37-41.07-10.08-50.71-10.61 7.13-9.95 22.7-18.65 31.52-3.12 9.15-15.82 6.94-21 13.75-2.03 6.52-.15 25.04-11.79 16.3-.25-6.6 5.52-20.88-6.02-18.73-8.18 10.47-2.47 28.02-13.25 37.5-14.82 6.59-31.69 4.9-47.37 7.88h-4.96s-.02.03-4.6 0h-5.71c-4.63-.75-9.53-1.17-13.7-3.48-4.29-5.87-.68-12.54-3.31-18.62-.9-12.38 14.42-14.04 19.53-23.21 4.09-7 14.42-8.14 15.95-16.81 18.08-50.42 48.95-99.3 43.87-154.96-14.12-23.04-29.8-45.32-41.42-69.81-6.72-11.69-14.68-22.68-22.9-33.31-9.9 2.56-13.97-9.35-23.21-9.72-9.29.46-18-2.92-27.09-3.65 5.61-45.64 7.53-91.59 13.51-137.17 5.56-34.55 5.71-68.64 9.09-103.63 3.61-31.39 6.36-63.04 10.57-94.26-.44-7.88 7.17-19.22 2.9-28.52-5.18 8.56-11.91 16.39-15.02 26.06-1.45 9.74.45 20-3.3 29.38-4.07 10.99-11.82 22.96-5.99 34.73 1.69 5.3 9.13 7.61 7.96 13.89-6.92 2.6-13.55-4.98-17.65-9.79.59 9.15 13.45 10.95 15.72 19.37-.47 3.54-4.34 3.39-7.03 3.06-1.15 8.85-13.62-3.65-14.87 5.33-8.42-.39-15.28-8.46-20.32-14.84-8.39-8.15-2.45-20.95-6.6-30.67v-4.7c10.65-19.41 22.28-38.76 25.4-61.17 9.99-29.64 22.11-58.6 28.45-89.35 9.09-20.13 6.54-42.25 8.77-63.69 2.61-20.5 1.85-39.91 4.63-58.71 6.42-11.48 13.18-22.89 19.06-34.63 7.68-8.14 6.32-23.55 14.14-33.01 2.35-10.34 3.93-21.24 7.69-31.31 5.45-12.06 19.7-15.78 31.76-16.95 10.99 3.35 15.09-9.98 23.91-13.67 8.95-4.07 2.75-12.06 5.25-19.44-9.98 2.46 2.62-11.96-.29-15.74-4.5-.5.56 8.28-4.08 8.4-2.8-.31-.47-3.77-.18-5.39 3.21-4.35-6-2.03-2.97-5.64 1.96-9.66 7.09-18.28 10.3-27.57 4.26-2.6-4.76-2.76-4.75-6.7-2.88-11.29-4.94-23.64-3.45-35.09 11.8-13.66 4.09-33.9 14.3-48.39 2.96-7.43 9.56-12.05 14.57-17.97.58-2.08-1.08-1.71-2.45-1.16 3.86-3.41 9.47-3.91 13.5-6.96 4.19-9.19 18.7-4.1 23.95-8.97m26.87 376.08c-3.57 7.7-4.63 16.46-10.53 23.14-2.56 16.66-1.25 33.96-4.34 50.5 1.1 5.23 1.31 12.93-4.69 15.27-3.96-1.22-.65-6.18-.56-8.93 1-15.57 2.44-31.02 4.55-46.39-13.66 12.06-7.35 34.11-7.47 50.6 2.48 4.24-5.39 8.9-6.41 3.75.37-11.54-.13-23.05 1.85-34.5-2.38 3.66-3.32 8.64-3.56 12.47-9.39 2.22 4.76 10.24-2.38 14.37-8.4 4.94 1.62 14.71-4.54 21.13 2.41 5.86 8.08 11.65 5.27 19.06-3.14 5.12 5.21 7.9 3.65 13.38 8.02 17.32 14.1 35.59 25.04 51.36 5.11 9.37 7.14 20.04 9.75 30.18 6.66-8.25 7.47-19.49 8.05-29.65.27-36.01-5.69-71.8-6.2-107.78-3.5-25.77-1.7-53.15-7.51-77.96Zm-17.37 36.26q.33.33 0 0Zm-5.35 39.48q.33.33 0 0ZM307.75 638.7q.33.33 0 0Zm5.35 88.34q.33.33 0 0Zm-16.05 158.58q.33.33 0 0Zm43.49 309.13q.33.32 0 0Z"/></svg>

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="m280.5 271.75 1.28-3.04c.69-5.89-1.28 3.04-1.28 3.04Zm-17.82 37.38-1.98.23s-.6 2.67 1.98-.23Zm334.89 88.5c2.08-31.5-58.57-175.18-58.57-175.18-2.76-17.08-36.92-29.42-36.92-29.42l4.14-3.37c-2.7-27.39-31.34-39.72-31.34-39.72-13.67-2.06-6.15-8.9-6.15-8.9.66-108.74-93.4-119.08-93.4-119.08-71.66-13.6-86.65 104.73-86.65 104.73 4.77 17.06-1.33 36.23-1.33 36.23-5.46 13.71-11.63 10.97-11.63 10.97 7.51 4.08 8.91 0 8.91 0s9.52-4.1 2.04.7c-7.51 4.74-3.44 14.34-3.44 14.34l8.2-12.27-5.48 36.26c-6.79 23.25-31.36 19.13-31.36 19.13l6.13 3.45c14.01 13.91 24.13 9.05 26.18 7.73-.09.64-.16 1.17-.22 1.79-2.7 17.78-7.91 21.55-9.98 22.36 4.33-.65 7.94-5.26 7.94-5.26l-2.83 6.57c-.13 1-.32 2.4-.6 4.4-2.04 13.64-26.61 17.78-26.61 17.78 9.57 8.24 18.43 0 18.43 0l-3.45 8.24c-3.42 5.49-5.5 8.46-6.87 9.99l7.55-1.1-6.09 11.58c-32.71 26.65-10.24 34.96-10.24 34.96l2.05-4.17c2.04-15.75 6.83-7.52 6.83-7.52l-4.09 14.36C263.5 381.2 243 398.95 243 398.95s-4.04 0 2.06.71c6.15.7 8.87-2.7 8.87-2.7 5.72-1.35 7.72-3.21 8.45-4.38l-2.98 7.79c-13.66 6.15-12.28 35.58-12.28 35.58 12.96 11.6 6.13 29.44 6.13 29.44v8.16c-2.04 2.7.68 11.62.68 11.62 0 6.16-2.72 8.25-2.72 8.25-5.47 4.73-2.05 31.46-2.05 31.46l2.05 1.33c-7.5 18.45-2.73 18.45-2.73 18.45 1.32 2.74 18.42 30.84 18.42 30.84l-14.33 64.3c8.16 10.24 19.11 24.64 19.11 24.64-4.79 4.79 11.56 104.68 11.56 104.68l.66 7.55c0 34.9-7.46 97.81-7.46 97.81-19.11 39.01-7.51 123.16-7.51 123.16s-4.2-48.22 1.33 8.21c5.48 55.39 2.1 93.74 2.1 93.74-4.13 12.28-7.53 36.28-7.53 36.28-.65 13.01-6.11 25.99-6.11 25.99-8.93 55.43-5.47 59.47-5.47 59.47 3.42 15.13 40.95 3.58 41.61 3.43 4.42-1.23 6.79-19.15 6.79-19.15s2.72-47.16 8.2-59.56c.25-.5.43-1.01.65-1.58 4.13-11.85-3.38-24.43-3.38-24.43-5.47-13.68 3.4-62.87 3.4-62.87 12.96-36.94 23.22-121.16 23.22-121.16-2.78-19.07 4.05-59.47 4.05-59.47 7.51-21.21 14.31-64.36 14.31-64.36 2.75-4.73 20.48-136.09 20.48-136.09l40.89.63c3.43 15.75 35.52 166.34 35.52 166.34 10.2 20.47 19.04 60.43 19.11 62.88.65 24.64 13.61 78.03 13.61 78.03 16.31 36.93 31.34 111.53 31.34 111.53-5.43 19.17-1.32 34.22-1.32 34.22-2.07 47.19 1.32 79.99 1.32 79.99 0 19.16 52.5 8.24 52.5 8.24 13.64-19.86-10.21-94.4-10.21-94.4l-2.01-8.88c-2.78-18.56-10.94-37.02-10.94-37.02-1.38-6.77-2.03-80.67-2.03-80.67 6.8-66.39-5.47-102.65-5.47-102.65-2.04-14.67-14.32-49.24-14.32-49.24-1.33-17.09-8.19-56.1-8.19-56.1 0-12.99 4.09-114.29 4.09-114.29l3.43.68c4.1-7.52 6.12-75.25 6.12-75.25 9.5-.72 13.65-7.53 13.65-7.53 8.16-5.48 7.5-17.1 7.5-17.1 11.55 0 10.93-5.52 10.93-5.52l47.72-131.31c8.85-19.14-8.22-41.06-8.22-41.06Zm-263.15 25.28c-8.19 8.9-6.91 10.99-6.91 10.99s-28.6 67.73-35.43 68.37c-1.17.14-1.84.14-2.35.14l-3.07 1.9s13.59-62.22 28.61-84.12c0 0 5.43-32.15 5.43-38.97s-5.43-23.27-5.43-23.27 6.18 17.74 15.02 21.2c0 0 12.27 34.9 4.13 43.76Zm208.59-2.72s-2.67 4.82-2.67 10.94c0 0-4.29 62.94-8.23 78.53-.18.9-.43 1.67-.64 2.23-4.09 10.27-7.5 19.12-6.15 22.56 0 0-15.69 19.88-8.19 23.3l3.4 3.37-10.91-1.32s-8.16-8.24-7.56-24.68c0 0-5.41-6.16-6.78-5.43l-4.06-22.6 8.83-3.39s-9.49-57.5-16.37-69.79c0 0 3.47 6.84 10.28 17.74l5.45 7.56s8.18-4.75 6.81-17.11c0 0-6.16 1.36-8.87-8.19-2.69-9.59.65-2.09.65-2.09s6.89-.68 8.22-12.26c0 0 7.51 10.22 19.78 0l6.12-8.21 3.38-6.72c-1.6-.87-4.16-2.7-8.75-6.31 0 0-10.3-4.08-11.67-19.86 0 0-4.06-12.98 8.9-18.44 0 0 8.14 41.72 15.67 38.96 0 0 6.78 4.79 2.03 11 0 0 4.11 3.42 1.33 10.22Z"/></svg>

After

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M466.93 27.82c-15.66 1.45-31.72 10.54-37.85 25.54-18.09 21.05-26.17 48.37-32.01 74.96-3.51 18.38-7.2 37.27-3.34 55.91-3.7 13.66 1.39 26.72 7.6 38.63-13.02 8.59-29.85 5.55-42.96 13.84-9.79 4.43-13.4 14.87-20.36 22.11-6.35 20.73-11.87 41.76-18.5 62.45-7.46 21.41 3.67 42.53 6.74 63.63 7.84 21.04-2.12 42.36-1.76 64.16.16 32.55 4.66 64.86 6.74 97.29-.27 26.6 7.69 52.2 12.32 78.16-.26 18.72-.7 37.49-1.64 56.22 1.23-.14 2.46-.35 3.7-.47-3.48 27.06-6.11 54.21-9.09 81.32-4.73 25.18-11.31 50.08-19.74 74.27-8.56 20.74-22.31 38.88-30.68 59.72-13.22 33.46-15.81 69.73-22.16 104.84-5.68 29.43-7.3 60.89-23.34 87.1-4.25 5.87-.84 13.29-3.54 19.54-8.58 26.56-9.02 55.48-17.99 81.61v13.23c.81 9.81 10.12 15.05 17.52 19.85h5.79c12.95-4.03 34.45-4.08 35.62-21.49 2.04-47.63-5.97-98.67 16.41-142.82 17.14-33.74 29.84-69.79 50.31-101.79 15.67-27.34 12.89-61.44 30.8-87.59 20-31.1 24.93-68.71 39.95-102.12 11.48-33.02 33.56-61.5 43.42-95.06 4.23-9.51 8.16-19.16 12.31-28.69 10.02.94 20 2.55 30.13 1.91-1 19.68 5.58 38.54 8.77 57.73 9.07 41.46 4.02 84.5 10.94 126.12 13.2 30.31-.18 63.63 8.47 94.86 6.99 23.35 9.13 47.69 13.01 71.66 2.81 22.16 8.22 44.54 4.04 66.91-.9 34.14 3.74 68.19 3.67 102.47-.18 16.93 20.74 17.49 32.94 20.41 18.57.41 29.97-22.83 19.48-37.76-4.01-23.4-9.86-46.53-16.62-69.31.2-6.42 1.84-12.97.09-19.34-5.52-23.95 1.42-48.22 3.97-72.09 3.87-36.03 13.4-72.06 8.63-108.48-3.32-23.5-13.53-46.03-11.62-70.19-1.32-47.99 13.51-94 20.51-141 4.1-15.14 13.32-28.62 17.18-43.88 1.07-12.69 5.47-24.56 6.13-37.26V570.8c-12.08-40.93-15.67-83.8-25.17-125.32-3.82-16.52-17.39-32.79-10.9-49.8.93-19.61 9.87-38.3 8.3-58.05-3.37-20.42-14.29-38.49-20.03-58.23-5.73-22.87-25.09-43.12-49.71-44.28 1.84-11.98-1.38-24.07-.29-36.05 3.29 7.3.67 20.02 6.41 23.63 1.35-7.66 6.67-14.56 11.32-21.07.29-28.94-1.32-59.37-12.24-86.3 2.32-6.49 1.37-14.58-4.76-18.61-2.53-7.63-.45-16.27-2.97-24.07-3.01-13.15-14.46-21.41-21.87-31.94-10.33-7.46-22.97-11.17-35.52-12.88h-6.55Zm67.13 332.15c3.24 11.16 6.31 22.82 9.57 34.14-.09 8.8 1.87 17.82 6.39 25.41 3.01 12.81 2.84 26.15 6.28 38.88.36 1.49-1.45-.19-1.54-.77-3.73-11.87-15.18-19.29-19.05-31.15-6.5-21.7-.75-46.23-1.66-66.52Zm-160.55 46.86c7.07 22.61 19.13 44.72 16.24 69.22-3.26 21.6-2.35 43.68-9.25 64.66-8.87 8.34-13.94 21.36-18.43 30.89-6.84-45.45 12-90.83 8.78-136.64-1.45-9.5.73-18.89 2.65-28.13Z"/></svg>

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 850 1250"><path fill="#fff" d="M307.41 127.49c.55-.43.97-.95 1.3-1.52-1.88 1.9-3.08 2.75-1.29 1.52Zm318.81 989.97c-7.54-14.53-27.11-25.22-26.81-54.03.28-28.77-8.44-95.16-8.72-97.28-.31-2.02-9.07-59.61-20.77-78.88-11.78-19.21-28.33-55.7-28.33-55.7s-12.64-55.79-13.86-82.5c-1.18-26.7-1.8-64.37-1.8-64.37l9.96-.58s-15.05-78.89-25.32-94.01c-10.18-15.1-13.22-61.13-12.64-81.22.61-20.21 2.14-46.28 2.14-46.28s74.36-41.53 78.56-51.85c4.19-10.43-1.53-70.35-1.53-74.5s-21.39-95.74-22.85-99.94c-1.51-4.11-10.21-27-12.98-32.64-2.71-5.66-14.13-19.27-31.88-18.41 0 0 11.45-2.3 18.33 1.55 6.96 3.84-8.43-6.54-14.41-7.12-6.02-.61-6.31-2.98-9.08-4.74 0 0 14.49 4.13 16.89 5.61 2.43 1.44-4.53-27.6-14.12-35.31 0 0 11.1 8 13.54 16.9 0 0 .3-9.47-8.17-19.85-8.41-10.38-11.99-13.03-15.3-19.84 0 0 12.62-5.63 21.37 11.83 8.7 17.48 6 12.75 11.41 16.91 5.42 4.17-4.81-5.35-6.3-12.45-1.53-7.13-11.75-23.46-19.59-25.83-7.81-2.32-11.73-9.47-18.06-23.92 0 0 3.02-2.14 8.43 8.5 5.4 10.69 15.06 16.94 19.85 19.61 4.84 2.62-10.82-11.57-18.65-27.02-7.86-15.36-14.72-9.76-19.29-16.33-4.52-6.49-25.88-68.16-74.34-46.25 0 0-34.62-13.33-54.19 8.02-19.53 21.38-13.82 35.9-20.17 36.5-6.32.56-15.94-.95-21.96 3.54-5.99 4.43-6.57 11.28-6.57 11.28s3.54-4.16 10.78-8.02c7.25-3.84 15.06-.29 19.28-2.65 0 0-5.74 12.19-9.35 13.9-3.6 1.79-10.21 3.9-10.49 15.46-.25 9.85 1.4 16.92-.52 20.41 2.28-2.28 5.58-6.28 5.94-10.34 0 0 1.48 15.43-1.85 18.09-3.27 2.68-12.04 7.09-12.63 10.08-.57 3.01 8.14 6.52 9.63 11.84 1.26 4.54 3.58-3.02 2.87-8.21 1.61 4.21 4.18 10.94 3.46 12.35-1.18 2.39-3.92 8.63-4.8 10.1-.9 1.52.6-2.91-3.33 11-3.92 13.98-2.41 17.5 2.74 9.19 5.1-8.3 2.05-15.98 6.89-22.25 4.84-6.23 10.55-13.35 7.23-1.46-3.31 11.83-8.42 26.64-13.86 30.53-5.4 3.88-2.39 8.28-1.48 10.39.91 2.02 9.02-10.71 12.03-17.21 2.99-6.52 0 15.69-5.12 19.23-5.11 3.63-16.84 1.25-16.56 8.33.29 7.13 5.71 9.22 7.79 9.22s-4.78-4.46-.88-8.89c3.95-4.48 13.25-3.91 16.55-10.39 3.34-6.53 3.05 3.26 1.19 6.26-1.79 2.91-1.79 5.9-6.28 9.15-4.53 3.23 5.42 4.19 11.42-.57 0 0-6 10.1-31.27 21.38-25.33 11.2-34.06 60.2-34.06 60.2s-13.22 55.14-23.19 64.31-12.34 10.45-19.88 82.76c0 0-1.76 6.81 9.38 10.41 0 0 50.56 29.96 59.33 31.42 0 0-27.74 91.59-42.47 121.27-14.76 29.7-15.38 51.58-15.38 51.58l31.95 4.52s12.3 102.58 9.63 140.86c-2.71 38.23-7.55 62.26-9.63 69.11-2.14 6.8-11.51 42.34-5.78 78.53 5.78 36.22 6.96 78.62 8.16 88.07 1.19 9.49 6.02 38.57 5.4 53.12-.56 14.55-5.4 22.57-9.61 29.09-4.25 6.5-4.25 28.42-4.25 28.42s-2.68 51.04 1.84 57.81c4.5 6.86 32.8 14.6 41.56 12.18 8.73-2.38 19.83-11.81 20.74-19.53.89-7.7-8.76-25.87-8.42-42.45.29-16.59-.59-42.72-.59-45.65s-4.25-16.04-3.62-32.63c.62-16.63 9.32-66.12 17.75-101.45 8.42-35.21 6.63-86.86 6.93-90.43.29-3.56.29-11.26 11.13-42.98 10.86-31.72 12.04-71.5 13.83-76.54 1.82-5 16.55-85.71 16.55-85.71l31.92 1.19s37.03 118.34 42.18 126.65c5.06 8.28 13.2 34.69 36.71 70 0 0 7.48 10.38 23.78 67.57 16.25 57.22 27.97 56.65 42.45 98.48 14.44 41.81 10.53 81.83 10.53 81.83s-3.61 35.64-18.7 52.5c-14.98 16.92-18.04 28.81-14.98 30.59 2.94 1.78 76.42 9.74 83.62-20.49 7.26-30.27 3.34-36.2 6.64-44.19 3.27-8.06 13.79-22.26 6.33-36.78v.02Zm-106-788.91 13.23 54.55-18.03 12.2 4.8-66.75ZM311.91 451.93l-35.54-24.89 42.45-112.69c-11.17 54.28 16.24 69.09 16.24 69.09l2.73 13.39c3.9 28.41-25.89 55.11-25.89 55.11Z"/></svg>

After

Width:  |  Height:  |  Size: 3.3 KiB

Some files were not shown because too many files have changed in this diff Show more