From c38660d209d134b7b848721244ce31276f856990 Mon Sep 17 00:00:00 2001 From: InfiniteTF Date: Mon, 12 Apr 2021 01:04:40 +0200 Subject: [PATCH] Add phash generation and dupe checking (#1158) --- go.mod | 1 + go.sum | 4 + graphql/documents/data/scene-slim.graphql | 2 + graphql/documents/data/scene.graphql | 1 + graphql/documents/queries/scene.graphql | 6 + graphql/schema/schema.graphql | 3 + graphql/schema/types/metadata.graphql | 3 + graphql/schema/types/scene.graphql | 2 + pkg/api/resolver_model_scene.go | 10 + pkg/api/resolver_query_find_scene.go | 15 + pkg/api/urlbuilders/scene.go | 4 + pkg/database/database.go | 2 +- pkg/database/migrations/20_phash.up.sql | 1 + pkg/manager/generator_phash.go | 121 ++++ pkg/manager/jsonschema/scene.go | 1 + pkg/manager/manager_tasks.go | 25 +- pkg/manager/task_generate_phash.go | 62 ++ pkg/manager/task_scan.go | 11 + pkg/models/mocks/SceneReaderWriter.go | 24 + pkg/models/model_scene.go | 2 + pkg/models/scene.go | 1 + pkg/scene/export.go | 4 + pkg/scene/export_test.go | 4 + pkg/scene/import.go | 5 + .../stashbox/graphql/generated_client.go | 327 ++++----- .../stashbox/graphql/generated_models.go | 345 ++++++++-- pkg/scraper/stashbox/stash_box.go | 18 +- pkg/sqlite/scene.go | 64 ++ pkg/utils/phash.go | 57 ++ .../src/components/Changelog/versions/v070.md | 1 + ui/v2.5/src/components/Help/Manual.tsx | 6 + ui/v2.5/src/components/List/styles.scss | 8 + .../SceneDetails/SceneFileInfoPanel.tsx | 14 + .../components/Scenes/SceneGenerateDialog.tsx | 8 + ui/v2.5/src/components/Settings/Settings.tsx | 7 + .../Settings/SettingsDuplicatePanel.tsx | 270 ++++++++ .../SettingsTasksPanel/GenerateButton.tsx | 8 + .../SettingsTasksPanel/SettingsTasksPanel.tsx | 10 + ui/v2.5/src/components/Settings/styles.scss | 53 ++ .../components/Tagger/StashSearchResult.tsx | 16 +- ui/v2.5/src/docs/en/Deduplication.md | 9 + .../corona10/goimagehash/.gitignore | 14 + .../corona10/goimagehash/AUTHORS.md | 5 + .../corona10/goimagehash/CODEOWNERS | 1 + .../corona10/goimagehash/Gopkg.lock | 17 + .../corona10/goimagehash/Gopkg.toml | 34 + .../github.com/corona10/goimagehash/LICENSE | 25 + .../github.com/corona10/goimagehash/README.md | 93 +++ vendor/github.com/corona10/goimagehash/doc.go | 5 + .../corona10/goimagehash/etcs/doc.go | 5 + .../corona10/goimagehash/etcs/utils.go | 61 ++ vendor/github.com/corona10/goimagehash/go.mod | 3 + vendor/github.com/corona10/goimagehash/go.sum | 2 + .../corona10/goimagehash/hashcompute.go | 183 ++++++ .../corona10/goimagehash/imagehash.go | 294 +++++++++ .../corona10/goimagehash/imagehash18.go | 13 + .../corona10/goimagehash/imagehash19.go | 9 + .../corona10/goimagehash/transforms/dct.go | 75 +++ .../corona10/goimagehash/transforms/doc.go | 5 + .../corona10/goimagehash/transforms/pixels.go | 39 ++ vendor/github.com/nfnt/resize/.travis.yml | 7 + vendor/github.com/nfnt/resize/LICENSE | 13 + vendor/github.com/nfnt/resize/README.md | 151 +++++ vendor/github.com/nfnt/resize/converter.go | 438 +++++++++++++ vendor/github.com/nfnt/resize/filters.go | 143 ++++ vendor/github.com/nfnt/resize/nearest.go | 318 +++++++++ vendor/github.com/nfnt/resize/resize.go | 620 ++++++++++++++++++ vendor/github.com/nfnt/resize/thumbnail.go | 55 ++ vendor/github.com/nfnt/resize/ycc.go | 387 +++++++++++ vendor/modules.txt | 6 + 70 files changed, 4342 insertions(+), 214 deletions(-) create mode 100644 pkg/database/migrations/20_phash.up.sql create mode 100644 pkg/manager/generator_phash.go create mode 100644 pkg/manager/task_generate_phash.go create mode 100644 pkg/utils/phash.go create mode 100644 ui/v2.5/src/components/Settings/SettingsDuplicatePanel.tsx create mode 100644 ui/v2.5/src/docs/en/Deduplication.md create mode 100644 vendor/github.com/corona10/goimagehash/.gitignore create mode 100644 vendor/github.com/corona10/goimagehash/AUTHORS.md create mode 100644 vendor/github.com/corona10/goimagehash/CODEOWNERS create mode 100644 vendor/github.com/corona10/goimagehash/Gopkg.lock create mode 100644 vendor/github.com/corona10/goimagehash/Gopkg.toml create mode 100644 vendor/github.com/corona10/goimagehash/LICENSE create mode 100644 vendor/github.com/corona10/goimagehash/README.md create mode 100644 vendor/github.com/corona10/goimagehash/doc.go create mode 100644 vendor/github.com/corona10/goimagehash/etcs/doc.go create mode 100644 vendor/github.com/corona10/goimagehash/etcs/utils.go create mode 100644 vendor/github.com/corona10/goimagehash/go.mod create mode 100644 vendor/github.com/corona10/goimagehash/go.sum create mode 100644 vendor/github.com/corona10/goimagehash/hashcompute.go create mode 100644 vendor/github.com/corona10/goimagehash/imagehash.go create mode 100644 vendor/github.com/corona10/goimagehash/imagehash18.go create mode 100644 vendor/github.com/corona10/goimagehash/imagehash19.go create mode 100644 vendor/github.com/corona10/goimagehash/transforms/dct.go create mode 100644 vendor/github.com/corona10/goimagehash/transforms/doc.go create mode 100644 vendor/github.com/corona10/goimagehash/transforms/pixels.go create mode 100644 vendor/github.com/nfnt/resize/.travis.yml create mode 100644 vendor/github.com/nfnt/resize/LICENSE create mode 100644 vendor/github.com/nfnt/resize/README.md create mode 100644 vendor/github.com/nfnt/resize/converter.go create mode 100644 vendor/github.com/nfnt/resize/filters.go create mode 100644 vendor/github.com/nfnt/resize/nearest.go create mode 100644 vendor/github.com/nfnt/resize/resize.go create mode 100644 vendor/github.com/nfnt/resize/thumbnail.go create mode 100644 vendor/github.com/nfnt/resize/ycc.go diff --git a/go.mod b/go.mod index c4d9865bf..d99b84bf4 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/antchfx/htmlquery v1.2.3 github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c github.com/chromedp/chromedp v0.5.3 + github.com/corona10/goimagehash v1.0.3 github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/disintegration/imaging v1.6.0 github.com/fvbommel/sortorder v1.0.2 diff --git a/go.sum b/go.sum index d08261d70..d81e4867b 100644 --- a/go.sum +++ b/go.sum @@ -83,6 +83,8 @@ github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/corona10/goimagehash v1.0.3 h1:NZM518aKLmoNluluhfHGxT3LGOnrojrxhGn63DR/CZA= +github.com/corona10/goimagehash v1.0.3/go.mod h1:VkvE0mLn84L4aF8vCb6mafVajEb6QYMHl2ZJLn0mOGI= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= @@ -573,6 +575,8 @@ github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+ github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= diff --git a/graphql/documents/data/scene-slim.graphql b/graphql/documents/data/scene-slim.graphql index 473012d55..f427eb904 100644 --- a/graphql/documents/data/scene-slim.graphql +++ b/graphql/documents/data/scene-slim.graphql @@ -10,6 +10,7 @@ fragment SlimSceneData on Scene { o_counter organized path + phash file { size @@ -29,6 +30,7 @@ fragment SlimSceneData on Scene { webp vtt chapters_vtt + sprite } scene_markers { diff --git a/graphql/documents/data/scene.graphql b/graphql/documents/data/scene.graphql index 1f8061e06..491983b4f 100644 --- a/graphql/documents/data/scene.graphql +++ b/graphql/documents/data/scene.graphql @@ -10,6 +10,7 @@ fragment SceneData on Scene { o_counter organized path + phash file { size diff --git a/graphql/documents/queries/scene.graphql b/graphql/documents/queries/scene.graphql index 87bb3fd7d..daeabbaaf 100644 --- a/graphql/documents/queries/scene.graphql +++ b/graphql/documents/queries/scene.graphql @@ -16,6 +16,12 @@ query FindScenesByPathRegex($filter: FindFilterType) { } } +query FindDuplicateScenes($distance: Int) { + findDuplicateScenes(distance: $distance) { + ...SlimSceneData + } +} + query FindScene($id: ID!, $checksum: String) { findScene(id: $id, checksum: $checksum) { ...SceneData diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 29a1e5681..048aa37d4 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -9,6 +9,9 @@ type Query { findScenesByPathRegex(filter: FindFilterType): FindScenesResultType! + """ Returns any groups of scenes that are perceptual duplicates within the queried distance """ + findDuplicateScenes(distance: Int): [[Scene!]!]! + """Return valid stream paths""" sceneStreams(id: ID): [SceneStreamEndpoint!]! diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index f83f3ad78..600d8f9c8 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -7,6 +7,7 @@ input GenerateMetadataInput { previewOptions: GeneratePreviewOptionsInput markers: Boolean! transcodes: Boolean! + phashes: Boolean! """scene ids to generate for""" sceneIDs: [ID!] @@ -42,6 +43,8 @@ input ScanMetadataInput { scanGenerateImagePreviews: Boolean """Generate sprites during scan""" scanGenerateSprites: Boolean + """Generate phashes during scan""" + scanGeneratePhashes: Boolean } input CleanMetadataInput { diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index f37c0bfd9..84d2fdf79 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -16,6 +16,7 @@ type ScenePathsType { webp: String # Resolver vtt: String # Resolver chapters_vtt: String # Resolver + sprite: String # Resolver } type SceneMovie { @@ -35,6 +36,7 @@ type Scene { organized: Boolean! o_counter: Int path: String! + phash: String file: SceneFileType! # Resolver paths: ScenePathsType! # Resolver diff --git a/pkg/api/resolver_model_scene.go b/pkg/api/resolver_model_scene.go index 960c561ff..dcec7fa86 100644 --- a/pkg/api/resolver_model_scene.go +++ b/pkg/api/resolver_model_scene.go @@ -83,6 +83,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S streamPath := builder.GetStreamURL() webpPath := builder.GetStreamPreviewImageURL() vttPath := builder.GetSpriteVTTURL() + spritePath := builder.GetSpriteURL() chaptersVttPath := builder.GetChaptersVTTURL() return &models.ScenePathsType{ Screenshot: &screenshotPath, @@ -91,6 +92,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S Webp: &webpPath, Vtt: &vttPath, ChaptersVtt: &chaptersVttPath, + Sprite: &spritePath, }, nil } @@ -200,3 +202,11 @@ func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret [] return ret, nil } + +func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) { + if obj.Phash.Valid { + hexval := utils.PhashToString(obj.Phash.Int64) + return &hexval, nil + } + return nil, nil +} diff --git a/pkg/api/resolver_query_find_scene.go b/pkg/api/resolver_query_find_scene.go index 44a111646..be250e101 100644 --- a/pkg/api/resolver_query_find_scene.go +++ b/pkg/api/resolver_query_find_scene.go @@ -151,3 +151,18 @@ func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models. return ret, nil } + +func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int) (ret [][]*models.Scene, err error) { + dist := 0 + if distance != nil { + dist = *distance + } + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + ret, err = repo.Scene().FindDuplicates(dist) + return err + }); err != nil { + return nil, err + } + + return ret, nil +} diff --git a/pkg/api/urlbuilders/scene.go b/pkg/api/urlbuilders/scene.go index 57c50f3cf..5d7af407c 100644 --- a/pkg/api/urlbuilders/scene.go +++ b/pkg/api/urlbuilders/scene.go @@ -33,6 +33,10 @@ func (b SceneURLBuilder) GetSpriteVTTURL() string { return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt" } +func (b SceneURLBuilder) GetSpriteURL() string { + return b.BaseURL + "/scene/" + b.SceneID + "_sprite.jpg" +} + func (b SceneURLBuilder) GetScreenshotURL(updateTime time.Time) string { return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?" + strconv.FormatInt(updateTime.Unix(), 10) } diff --git a/pkg/database/database.go b/pkg/database/database.go index e4099b073..8082e0978 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -23,7 +23,7 @@ import ( var DB *sqlx.DB var WriteMu *sync.Mutex var dbPath string -var appSchemaVersion uint = 19 +var appSchemaVersion uint = 20 var databaseSchemaVersion uint const sqlite3Driver = "sqlite3ex" diff --git a/pkg/database/migrations/20_phash.up.sql b/pkg/database/migrations/20_phash.up.sql new file mode 100644 index 000000000..c1c889956 --- /dev/null +++ b/pkg/database/migrations/20_phash.up.sql @@ -0,0 +1 @@ +ALTER TABLE `scenes` ADD COLUMN `phash` blob; diff --git a/pkg/manager/generator_phash.go b/pkg/manager/generator_phash.go new file mode 100644 index 000000000..4e711560b --- /dev/null +++ b/pkg/manager/generator_phash.go @@ -0,0 +1,121 @@ +package manager + +import ( + "fmt" + "image" + "image/color" + "math" + "os" + "sort" + + "github.com/corona10/goimagehash" + "github.com/disintegration/imaging" + "github.com/fvbommel/sortorder" + + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/utils" +) + +type PhashGenerator struct { + Info *GeneratorInfo + + VideoChecksum string + Columns int + Rows int +} + +func NewPhashGenerator(videoFile ffmpeg.VideoFile, checksum string) (*PhashGenerator, error) { + exists, err := utils.FileExists(videoFile.Path) + if !exists { + return nil, err + } + + generator, err := newGeneratorInfo(videoFile) + if err != nil { + return nil, err + } + + return &PhashGenerator{ + Info: generator, + VideoChecksum: checksum, + Columns: 5, + Rows: 5, + }, nil +} + +func (g *PhashGenerator) Generate() (*uint64, error) { + encoder := ffmpeg.NewEncoder(instance.FFMPEGPath) + + sprite, err := g.generateSprite(&encoder) + if err != nil { + return nil, err + } + + hash, err := goimagehash.PerceptionHash(sprite) + if err != nil { + return nil, err + } + hashValue := hash.GetHash() + return &hashValue, nil +} + +func (g *PhashGenerator) generateSprite(encoder *ffmpeg.Encoder) (image.Image, error) { + logger.Infof("[generator] generating phash sprite for %s", g.Info.VideoFile.Path) + + // Generate sprite image offset by 5% on each end to avoid intro/outros + chunkCount := g.Columns * g.Rows + offset := 0.05 * g.Info.VideoFile.Duration + stepSize := (0.9 * g.Info.VideoFile.Duration) / float64(chunkCount) + for i := 0; i < chunkCount; i++ { + time := offset + (float64(i) * stepSize) + num := fmt.Sprintf("%.3d", i) + filename := "phash_" + g.VideoChecksum + "_" + num + ".bmp" + + options := ffmpeg.ScreenshotOptions{ + OutputPath: instance.Paths.Generated.GetTmpPath(filename), + Time: time, + Width: 160, + } + if err := encoder.Screenshot(g.Info.VideoFile, options); err != nil { + return nil, err + } + } + + // Combine all of the thumbnails into a sprite image + pattern := fmt.Sprintf("phash_%s_.+\\.bmp$", g.VideoChecksum) + imagePaths, err := utils.MatchEntries(instance.Paths.Generated.Tmp, pattern) + if err != nil { + return nil, err + } + sort.Sort(sortorder.Natural(imagePaths)) + var images []image.Image + for _, imagePath := range imagePaths { + img, err := imaging.Open(imagePath) + if err != nil { + return nil, err + } + images = append(images, img) + } + + if len(images) == 0 { + return nil, fmt.Errorf("images slice is empty, failed to generate phash sprite for %s", g.Info.VideoFile.Path) + } + width := images[0].Bounds().Size().X + height := images[0].Bounds().Size().Y + canvasWidth := width * g.Columns + canvasHeight := height * g.Rows + montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{}) + for index := 0; index < len(images); index++ { + x := width * (index % g.Columns) + y := height * int(math.Floor(float64(index)/float64(g.Rows))) + img := images[index] + montage = imaging.Paste(montage, img, image.Pt(x, y)) + } + + for _, imagePath := range imagePaths { + os.Remove(imagePath) + } + + return montage, nil +} diff --git a/pkg/manager/jsonschema/scene.go b/pkg/manager/jsonschema/scene.go index 79c466be6..540447757 100644 --- a/pkg/manager/jsonschema/scene.go +++ b/pkg/manager/jsonschema/scene.go @@ -39,6 +39,7 @@ type Scene struct { Title string `json:"title,omitempty"` Checksum string `json:"checksum,omitempty"` OSHash string `json:"oshash,omitempty"` + Phash string `json:"phash,omitempty"` Studio string `json:"studio,omitempty"` URL string `json:"url,omitempty"` Date string `json:"date,omitempty"` diff --git a/pkg/manager/manager_tasks.go b/pkg/manager/manager_tasks.go index 28e42022b..ff8116bda 100644 --- a/pkg/manager/manager_tasks.go +++ b/pkg/manager/manager_tasks.go @@ -222,6 +222,7 @@ func (s *singleton) Scan(input models.ScanMetadataInput) { GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews), GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews), GenerateSprite: utils.IsTrue(input.ScanGenerateSprites), + GeneratePhash: utils.IsTrue(input.ScanGeneratePhashes), } go task.Start(&wg) @@ -427,7 +428,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { logger.Infof("Taking too long to count content. Skipping...") logger.Infof("Generating content") } else { - logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes) + logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes, totalsNeeded.phashes) } fileNamingAlgo := config.GetVideoFileNamingAlgorithm() @@ -501,6 +502,16 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) { } go task.Start(&wg) } + + if input.Phashes { + task := GeneratePhashTask{ + Scene: *scene, + fileNamingAlgorithm: fileNamingAlgo, + txnManager: s.TxnManager, + } + wg.Add() + go task.Start(&wg) + } } wg.Wait() @@ -992,6 +1003,7 @@ type totalsGenerate struct { imagePreviews int64 markers int64 transcodes int64 + phashes int64 } func (s *singleton) neededGenerate(scenes []*models.Scene, input models.GenerateMetadataInput) *totalsGenerate { @@ -1065,6 +1077,17 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate totals.transcodes++ } } + + if input.Phashes { + task := GeneratePhashTask{ + Scene: *scene, + fileNamingAlgorithm: fileNamingAlgo, + } + + if task.shouldGenerate() { + totals.phashes++ + } + } } //check for timeout select { diff --git a/pkg/manager/task_generate_phash.go b/pkg/manager/task_generate_phash.go new file mode 100644 index 000000000..f8ef6d6be --- /dev/null +++ b/pkg/manager/task_generate_phash.go @@ -0,0 +1,62 @@ +package manager + +import ( + "github.com/remeh/sizedwaitgroup" + + "context" + "database/sql" + + "github.com/stashapp/stash/pkg/ffmpeg" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +type GeneratePhashTask struct { + Scene models.Scene + fileNamingAlgorithm models.HashAlgorithm + txnManager models.TransactionManager +} + +func (t *GeneratePhashTask) Start(wg *sizedwaitgroup.SizedWaitGroup) { + defer wg.Done() + + if !t.shouldGenerate() { + return + } + + videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false) + if err != nil { + logger.Errorf("error reading video file: %s", err.Error()) + return + } + + sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) + generator, err := NewPhashGenerator(*videoFile, sceneHash) + + if err != nil { + logger.Errorf("error creating phash generator: %s", err.Error()) + return + } + hash, err := generator.Generate() + if err != nil { + logger.Errorf("error generating phash: %s", err.Error()) + return + } + + if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error { + qb := r.Scene() + hashValue := sql.NullInt64{Int64: int64(*hash), Valid: true} + scenePartial := models.ScenePartial{ + ID: t.Scene.ID, + Phash: &hashValue, + } + _, err := qb.Update(scenePartial) + return err + }); err != nil { + logger.Error(err.Error()) + } +} + +func (t *GeneratePhashTask) shouldGenerate() bool { + return !t.Scene.Phash.Valid +} diff --git a/pkg/manager/task_scan.go b/pkg/manager/task_scan.go index d35432f4a..d9ce9581c 100644 --- a/pkg/manager/task_scan.go +++ b/pkg/manager/task_scan.go @@ -31,6 +31,7 @@ type ScanTask struct { calculateMD5 bool fileNamingAlgorithm models.HashAlgorithm GenerateSprite bool + GeneratePhash bool GeneratePreview bool GenerateImagePreview bool zipGallery *models.Gallery @@ -55,6 +56,16 @@ func (t *ScanTask) Start(wg *sizedwaitgroup.SizedWaitGroup) { go taskSprite.Start(&iwg) } + if t.GeneratePhash { + iwg.Add() + taskPhash := GeneratePhashTask{ + Scene: *s, + fileNamingAlgorithm: t.fileNamingAlgorithm, + txnManager: t.TxnManager, + } + go taskPhash.Start(&iwg) + } + if t.GeneratePreview { iwg.Add() diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 0e5295759..b5c3af191 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -438,6 +438,30 @@ func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) { return r0, r1 } +// FindDuplicates provides a mock function with given fields: distance +func (_m *SceneReaderWriter) FindDuplicates(distance int) ([][]*models.Scene, error) { + ret := _m.Called(distance) + + var r0 [][]*models.Scene + if rf, ok := ret.Get(0).(func(int) [][]*models.Scene); ok { + r0 = rf(distance) + + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(distance) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetCover provides a mock function with given fields: sceneID func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) { ret := _m.Called(sceneID) diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index 40bcd43e9..514ef8cbf 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -29,6 +29,7 @@ type Scene struct { Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"` StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"` + Phash sql.NullInt64 `db:"phash,omitempty" json:"phash"` CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` } @@ -58,6 +59,7 @@ type ScenePartial struct { StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"` FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"` + Phash *sql.NullInt64 `db:"phash,omitempty" json:"phash"` CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"` UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` } diff --git a/pkg/models/scene.go b/pkg/models/scene.go index ef4485717..8e77b2497 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -8,6 +8,7 @@ type SceneReader interface { FindByPath(path string) (*Scene, error) FindByPerformerID(performerID int) ([]*Scene, error) FindByGalleryID(performerID int) ([]*Scene, error) + FindDuplicates(distance int) ([][]*Scene, error) CountByPerformerID(performerID int) (int, error) // FindByStudioID(studioID int) ([]*Scene, error) FindByMovieID(movieID int) ([]*Scene, error) diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 9fcd6d096..5f723cdf5 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -27,6 +27,10 @@ func ToBasicJSON(reader models.SceneReader, scene *models.Scene) (*jsonschema.Sc newSceneJSON.OSHash = scene.OSHash.String } + if scene.Phash.Valid { + newSceneJSON.Phash = utils.PhashToString(scene.Phash.Int64) + } + if scene.Title.Valid { newSceneJSON.Title = scene.Title.String } diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index 2d30d9672..dc3164f13 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -7,6 +7,7 @@ import ( "github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/utils" "github.com/stretchr/testify/assert" "testing" @@ -43,6 +44,7 @@ const ( checksum = "checksum" oshash = "oshash" title = "title" + phash = -3846826108889195 date = "2001-01-01" rating = 5 ocounter = 2 @@ -112,6 +114,7 @@ func createFullScene(id int) models.Scene { Height: models.NullInt64(height), OCounter: ocounter, OSHash: models.NullString(oshash), + Phash: models.NullInt64(phash), Rating: models.NullInt64(rating), Organized: organized, Size: models.NullString(size), @@ -147,6 +150,7 @@ func createFullJSONScene(image string) *jsonschema.Scene { Details: details, OCounter: ocounter, OSHash: oshash, + Phash: utils.PhashToString(phash), Rating: rating, Organized: organized, URL: url, diff --git a/pkg/scene/import.go b/pkg/scene/import.go index eee87c8a8..a1cad8808 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -73,6 +73,11 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene { Path: i.Path, } + if sceneJSON.Phash != "" { + hash, err := strconv.ParseUint(sceneJSON.Phash, 16, 64) + newScene.Phash = sql.NullInt64{Int64: int64(hash), Valid: err == nil} + } + if sceneJSON.Title != "" { newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true} } diff --git a/pkg/scraper/stashbox/graphql/generated_client.go b/pkg/scraper/stashbox/graphql/generated_client.go index 0cd062fc1..aaae56b8d 100644 --- a/pkg/scraper/stashbox/graphql/generated_client.go +++ b/pkg/scraper/stashbox/graphql/generated_client.go @@ -18,56 +18,67 @@ func NewClient(cli *http.Client, baseURL string, options ...client.HTTPRequestOp } type Query struct { - FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\"" - QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\"" - FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\"" - QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\"" - FindTag *Tag "json:\"findTag\" graphql:\"findTag\"" - QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\"" - FindScene *Scene "json:\"findScene\" graphql:\"findScene\"" - FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" - FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" - QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\"" - FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\"" - QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\"" - FindUser *User "json:\"findUser\" graphql:\"findUser\"" - QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\"" - Me *User "json:\"me\" graphql:\"me\"" - SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\"" - SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\"" - Version Version "json:\"version\" graphql:\"version\"" + FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\"" + QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\"" + FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\"" + QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\"" + FindTag *Tag "json:\"findTag\" graphql:\"findTag\"" + QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\"" + FindTagCategory *TagCategory "json:\"findTagCategory\" graphql:\"findTagCategory\"" + QueryTagCategories QueryTagCategoriesResultType "json:\"queryTagCategories\" graphql:\"queryTagCategories\"" + FindScene *Scene "json:\"findScene\" graphql:\"findScene\"" + FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" + FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" + QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\"" + FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\"" + QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\"" + FindUser *User "json:\"findUser\" graphql:\"findUser\"" + QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\"" + Me *User "json:\"me\" graphql:\"me\"" + SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\"" + SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\"" + Version Version "json:\"version\" graphql:\"version\"" } type Mutation struct { - SceneCreate *Scene "json:\"sceneCreate\" graphql:\"sceneCreate\"" - SceneUpdate *Scene "json:\"sceneUpdate\" graphql:\"sceneUpdate\"" - SceneDestroy bool "json:\"sceneDestroy\" graphql:\"sceneDestroy\"" - PerformerCreate *Performer "json:\"performerCreate\" graphql:\"performerCreate\"" - PerformerUpdate *Performer "json:\"performerUpdate\" graphql:\"performerUpdate\"" - PerformerDestroy bool "json:\"performerDestroy\" graphql:\"performerDestroy\"" - StudioCreate *Studio "json:\"studioCreate\" graphql:\"studioCreate\"" - StudioUpdate *Studio "json:\"studioUpdate\" graphql:\"studioUpdate\"" - StudioDestroy bool "json:\"studioDestroy\" graphql:\"studioDestroy\"" - TagCreate *Tag "json:\"tagCreate\" graphql:\"tagCreate\"" - TagUpdate *Tag "json:\"tagUpdate\" graphql:\"tagUpdate\"" - TagDestroy bool "json:\"tagDestroy\" graphql:\"tagDestroy\"" - UserCreate *User "json:\"userCreate\" graphql:\"userCreate\"" - UserUpdate *User "json:\"userUpdate\" graphql:\"userUpdate\"" - UserDestroy bool "json:\"userDestroy\" graphql:\"userDestroy\"" - ImageCreate *Image "json:\"imageCreate\" graphql:\"imageCreate\"" - ImageUpdate *Image "json:\"imageUpdate\" graphql:\"imageUpdate\"" - ImageDestroy bool "json:\"imageDestroy\" graphql:\"imageDestroy\"" - RegenerateAPIKey string "json:\"regenerateAPIKey\" graphql:\"regenerateAPIKey\"" - ChangePassword bool "json:\"changePassword\" graphql:\"changePassword\"" - SceneEdit Edit "json:\"sceneEdit\" graphql:\"sceneEdit\"" - PerformerEdit Edit "json:\"performerEdit\" graphql:\"performerEdit\"" - StudioEdit Edit "json:\"studioEdit\" graphql:\"studioEdit\"" - TagEdit Edit "json:\"tagEdit\" graphql:\"tagEdit\"" - EditVote Edit "json:\"editVote\" graphql:\"editVote\"" - EditComment Edit "json:\"editComment\" graphql:\"editComment\"" - ApplyEdit Edit "json:\"applyEdit\" graphql:\"applyEdit\"" - CancelEdit Edit "json:\"cancelEdit\" graphql:\"cancelEdit\"" - SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" + SceneCreate *Scene "json:\"sceneCreate\" graphql:\"sceneCreate\"" + SceneUpdate *Scene "json:\"sceneUpdate\" graphql:\"sceneUpdate\"" + SceneDestroy bool "json:\"sceneDestroy\" graphql:\"sceneDestroy\"" + PerformerCreate *Performer "json:\"performerCreate\" graphql:\"performerCreate\"" + PerformerUpdate *Performer "json:\"performerUpdate\" graphql:\"performerUpdate\"" + PerformerDestroy bool "json:\"performerDestroy\" graphql:\"performerDestroy\"" + StudioCreate *Studio "json:\"studioCreate\" graphql:\"studioCreate\"" + StudioUpdate *Studio "json:\"studioUpdate\" graphql:\"studioUpdate\"" + StudioDestroy bool "json:\"studioDestroy\" graphql:\"studioDestroy\"" + TagCreate *Tag "json:\"tagCreate\" graphql:\"tagCreate\"" + TagUpdate *Tag "json:\"tagUpdate\" graphql:\"tagUpdate\"" + TagDestroy bool "json:\"tagDestroy\" graphql:\"tagDestroy\"" + UserCreate *User "json:\"userCreate\" graphql:\"userCreate\"" + UserUpdate *User "json:\"userUpdate\" graphql:\"userUpdate\"" + UserDestroy bool "json:\"userDestroy\" graphql:\"userDestroy\"" + ImageCreate *Image "json:\"imageCreate\" graphql:\"imageCreate\"" + ImageDestroy bool "json:\"imageDestroy\" graphql:\"imageDestroy\"" + NewUser *string "json:\"newUser\" graphql:\"newUser\"" + ActivateNewUser *User "json:\"activateNewUser\" graphql:\"activateNewUser\"" + GenerateInviteCode string "json:\"generateInviteCode\" graphql:\"generateInviteCode\"" + RescindInviteCode bool "json:\"rescindInviteCode\" graphql:\"rescindInviteCode\"" + GrantInvite int "json:\"grantInvite\" graphql:\"grantInvite\"" + RevokeInvite int "json:\"revokeInvite\" graphql:\"revokeInvite\"" + TagCategoryCreate *TagCategory "json:\"tagCategoryCreate\" graphql:\"tagCategoryCreate\"" + TagCategoryUpdate *TagCategory "json:\"tagCategoryUpdate\" graphql:\"tagCategoryUpdate\"" + TagCategoryDestroy bool "json:\"tagCategoryDestroy\" graphql:\"tagCategoryDestroy\"" + RegenerateAPIKey string "json:\"regenerateAPIKey\" graphql:\"regenerateAPIKey\"" + ResetPassword bool "json:\"resetPassword\" graphql:\"resetPassword\"" + ChangePassword bool "json:\"changePassword\" graphql:\"changePassword\"" + SceneEdit Edit "json:\"sceneEdit\" graphql:\"sceneEdit\"" + PerformerEdit Edit "json:\"performerEdit\" graphql:\"performerEdit\"" + StudioEdit Edit "json:\"studioEdit\" graphql:\"studioEdit\"" + TagEdit Edit "json:\"tagEdit\" graphql:\"tagEdit\"" + EditVote Edit "json:\"editVote\" graphql:\"editVote\"" + EditComment Edit "json:\"editComment\" graphql:\"editComment\"" + ApplyEdit Edit "json:\"applyEdit\" graphql:\"applyEdit\"" + CancelEdit Edit "json:\"cancelEdit\" graphql:\"cancelEdit\"" + SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" } type URLFragment struct { URL string "json:\"url\" graphql:\"url\"" @@ -76,8 +87,8 @@ type URLFragment struct { type ImageFragment struct { ID string "json:\"id\" graphql:\"id\"" URL string "json:\"url\" graphql:\"url\"" - Width *int "json:\"width\" graphql:\"width\"" - Height *int "json:\"height\" graphql:\"height\"" + Width int "json:\"width\" graphql:\"width\"" + Height int "json:\"height\" graphql:\"height\"" } type StudioFragment struct { Name string "json:\"name\" graphql:\"name\"" @@ -189,9 +200,21 @@ fragment SceneFragment on Scene { ... FingerprintFragment } } -fragment URLFragment on URL { +fragment ImageFragment on Image { + id url - type + width + height +} +fragment StudioFragment on Studio { + name + id + urls { + ... URLFragment + } + images { + ... ImageFragment + } } fragment PerformerAppearanceFragment on PerformerAppearance { as @@ -199,6 +222,24 @@ fragment PerformerAppearanceFragment on PerformerAppearance { ... PerformerFragment } } +fragment FuzzyDateFragment on FuzzyDate { + date + accuracy +} +fragment MeasurementsFragment on Measurements { + band_size + cup_size + waist + hip +} +fragment URLFragment on URL { + url + type +} +fragment TagFragment on Tag { + name + id +} fragment PerformerFragment on Performer { id name @@ -232,45 +273,15 @@ fragment PerformerFragment on Performer { ... BodyModificationFragment } } -fragment FuzzyDateFragment on FuzzyDate { - date - accuracy +fragment BodyModificationFragment on BodyModification { + location + description } fragment FingerprintFragment on Fingerprint { algorithm hash duration } -fragment ImageFragment on Image { - id - url - width - height -} -fragment StudioFragment on Studio { - name - id - urls { - ... URLFragment - } - images { - ... ImageFragment - } -} -fragment TagFragment on Tag { - name - id -} -fragment MeasurementsFragment on Measurements { - band_size - cup_size - waist - hip -} -fragment BodyModificationFragment on BodyModification { - location - description -} ` func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, httpRequestOptions ...client.HTTPRequestOption) (*FindSceneByFingerprint, error) { @@ -291,11 +302,29 @@ const FindScenesByFingerprintsQuery = `query FindScenesByFingerprints ($fingerpr ... SceneFragment } } -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment +fragment BodyModificationFragment on BodyModification { + location + description +} +fragment ImageFragment on Image { + id + url + width + height +} +fragment StudioFragment on Studio { + name + id + urls { + ... URLFragment } + images { + ... ImageFragment + } +} +fragment TagFragment on Tag { + name + id } fragment PerformerFragment on Performer { id @@ -336,11 +365,6 @@ fragment MeasurementsFragment on Measurements { waist hip } -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} fragment SceneFragment on Scene { id title @@ -370,33 +394,20 @@ fragment URLFragment on URL { url type } -fragment ImageFragment on Image { - id - url - width - height -} -fragment TagFragment on Tag { - name - id -} -fragment StudioFragment on Studio { - name - id - urls { - ... URLFragment - } - images { - ... ImageFragment +fragment PerformerAppearanceFragment on PerformerAppearance { + as + performer { + ... PerformerFragment } } fragment FuzzyDateFragment on FuzzyDate { date accuracy } -fragment BodyModificationFragment on BodyModification { - location - description +fragment FingerprintFragment on Fingerprint { + algorithm + hash + duration } ` @@ -418,16 +429,50 @@ const SearchSceneQuery = `query SearchScene ($term: String!) { ... SceneFragment } } +fragment PerformerAppearanceFragment on PerformerAppearance { + as + performer { + ... PerformerFragment + } +} +fragment BodyModificationFragment on BodyModification { + location + description +} +fragment FingerprintFragment on Fingerprint { + algorithm + hash + duration +} +fragment SceneFragment on Scene { + id + title + details + duration + date + urls { + ... URLFragment + } + images { + ... ImageFragment + } + studio { + ... StudioFragment + } + tags { + ... TagFragment + } + performers { + ... PerformerAppearanceFragment + } + fingerprints { + ... FingerprintFragment + } +} fragment URLFragment on URL { url type } -fragment ImageFragment on Image { - id - url - width - height -} fragment TagFragment on Tag { name id @@ -475,30 +520,11 @@ fragment MeasurementsFragment on Measurements { waist hip } -fragment SceneFragment on Scene { +fragment ImageFragment on Image { id - title - details - duration - date - urls { - ... URLFragment - } - images { - ... ImageFragment - } - studio { - ... StudioFragment - } - tags { - ... TagFragment - } - performers { - ... PerformerAppearanceFragment - } - fingerprints { - ... FingerprintFragment - } + url + width + height } fragment StudioFragment on Studio { name @@ -510,21 +536,6 @@ fragment StudioFragment on Studio { ... ImageFragment } } -fragment PerformerAppearanceFragment on PerformerAppearance { - as - performer { - ... PerformerFragment - } -} -fragment BodyModificationFragment on BodyModification { - location - description -} -fragment FingerprintFragment on Fingerprint { - algorithm - hash - duration -} ` func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchScene, error) { diff --git a/pkg/scraper/stashbox/graphql/generated_models.go b/pkg/scraper/stashbox/graphql/generated_models.go index a8715092b..9fa66170f 100644 --- a/pkg/scraper/stashbox/graphql/generated_models.go +++ b/pkg/scraper/stashbox/graphql/generated_models.go @@ -7,6 +7,8 @@ import ( "io" "strconv" "time" + + "github.com/99designs/gqlgen/graphql" ) type EditDetails interface { @@ -17,6 +19,13 @@ type EditTarget interface { IsEditTarget() } +type ActivateNewUserInput struct { + Name string `json:"name"` + Email string `json:"email"` + ActivationKey string `json:"activation_key"` + Password string `json:"password"` +} + type ApplyEditInput struct { ID string `json:"id"` } @@ -58,11 +67,15 @@ type Edit struct { Target EditTarget `json:"target"` TargetType TargetTypeEnum `json:"target_type"` // Objects to merge with the target. Only applicable to merges - MergeSources []EditTarget `json:"merge_sources"` - Operation OperationEnum `json:"operation"` - Details EditDetails `json:"details"` - Comments []*EditComment `json:"comments"` - Votes []*VoteComment `json:"votes"` + MergeSources []EditTarget `json:"merge_sources"` + Operation OperationEnum `json:"operation"` + Details EditDetails `json:"details"` + // Previous state of fields being modified - null if operation is create or delete. + OldDetails EditDetails `json:"old_details"` + // Entity specific options + Options *PerformerEditOptions `json:"options"` + Comments []*EditComment `json:"comments"` + Votes []*VoteComment `json:"votes"` // = Accepted - Rejected VoteCount int `json:"vote_count"` Status VoteStatusEnum `json:"status"` @@ -115,11 +128,6 @@ type EditVoteInput struct { Type VoteTypeEnum `json:"type"` } -type EthnicityCriterionInput struct { - Value *EthnicityEnum `json:"value"` - Modifier CriterionModifier `json:"modifier"` -} - type EyeColorCriterionInput struct { Value *EyeColorEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` @@ -157,6 +165,11 @@ type FuzzyDateInput struct { Accuracy DateAccuracyEnum `json:"accuracy"` } +type GrantInviteInput struct { + UserID string `json:"user_id"` + Amount int `json:"amount"` +} + type HairColorCriterionInput struct { Value *HairColorEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` @@ -170,12 +183,13 @@ type IDCriterionInput struct { type Image struct { ID string `json:"id"` URL string `json:"url"` - Width *int `json:"width"` - Height *int `json:"height"` + Width int `json:"width"` + Height int `json:"height"` } type ImageCreateInput struct { - URL string `json:"url"` + URL *string `json:"url"` + File *graphql.Upload `json:"file"` } type ImageDestroyInput struct { @@ -183,8 +197,8 @@ type ImageDestroyInput struct { } type ImageUpdateInput struct { - ID string `json:"id"` - URL string `json:"url"` + ID string `json:"id"` + URL *string `json:"url"` } type IntCriterionInput struct { @@ -211,6 +225,11 @@ type MultiIDCriterionInput struct { Modifier CriterionModifier `json:"modifier"` } +type NewUserInput struct { + Email string `json:"email"` + InviteKey *string `json:"invite_key"` +} + type Performer struct { ID string `json:"id"` Name string `json:"name"` @@ -234,6 +253,8 @@ type Performer struct { Piercings []*BodyModification `json:"piercings"` Images []*Image `json:"images"` Deleted bool `json:"deleted"` + Edits []*Edit `json:"edits"` + SceneCount int `json:"scene_count"` } func (Performer) IsEditTarget() {} @@ -276,21 +297,25 @@ type PerformerDestroyInput struct { } type PerformerEdit struct { - Name *string `json:"name"` - Disambiguation *string `json:"disambiguation"` - AddedAliases []string `json:"added_aliases"` - RemovedAliases []string `json:"removed_aliases"` - Gender *GenderEnum `json:"gender"` - AddedUrls []*URL `json:"added_urls"` - RemovedUrls []*URL `json:"removed_urls"` - Birthdate *FuzzyDate `json:"birthdate"` - Ethnicity *EthnicityEnum `json:"ethnicity"` - Country *string `json:"country"` - EyeColor *EyeColorEnum `json:"eye_color"` - HairColor *HairColorEnum `json:"hair_color"` + Name *string `json:"name"` + Disambiguation *string `json:"disambiguation"` + AddedAliases []string `json:"added_aliases"` + RemovedAliases []string `json:"removed_aliases"` + Gender *GenderEnum `json:"gender"` + AddedUrls []*URL `json:"added_urls"` + RemovedUrls []*URL `json:"removed_urls"` + Birthdate *string `json:"birthdate"` + BirthdateAccuracy *string `json:"birthdate_accuracy"` + Ethnicity *EthnicityEnum `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *EyeColorEnum `json:"eye_color"` + HairColor *HairColorEnum `json:"hair_color"` // Height in cm Height *int `json:"height"` - Measurements *Measurements `json:"measurements"` + CupSize *string `json:"cup_size"` + BandSize *int `json:"band_size"` + WaistSize *int `json:"waist_size"` + HipSize *int `json:"hip_size"` BreastType *BreastTypeEnum `json:"breast_type"` CareerStartYear *int `json:"career_start_year"` CareerEndYear *int `json:"career_end_year"` @@ -329,6 +354,22 @@ type PerformerEditInput struct { Edit *EditInput `json:"edit"` // Not required for destroy type Details *PerformerEditDetailsInput `json:"details"` + // Controls aliases modification for merges and name modifications + Options *PerformerEditOptionsInput `json:"options"` +} + +type PerformerEditOptions struct { + // Set performer alias on scenes without alias to old name if name is changed + SetModifyAliases bool `json:"set_modify_aliases"` + // Set performer alias on scenes attached to merge sources to old name + SetMergeAliases bool `json:"set_merge_aliases"` +} + +type PerformerEditOptionsInput struct { + // Set performer alias on scenes without alias to old name if name is changed + SetModifyAliases *bool `json:"set_modify_aliases"` + // Set performer alias on scenes attached to merge sources to old name + SetMergeAliases *bool `json:"set_merge_aliases"` } type PerformerFilterType struct { @@ -339,13 +380,13 @@ type PerformerFilterType struct { // Search aliases only - assumes like query unless quoted Alias *string `json:"alias"` Disambiguation *StringCriterionInput `json:"disambiguation"` - Gender *GenderEnum `json:"gender"` + Gender *GenderFilterEnum `json:"gender"` // Filter to search urls - assumes like query unless quoted URL *string `json:"url"` Birthdate *DateCriterionInput `json:"birthdate"` BirthYear *IntCriterionInput `json:"birth_year"` Age *IntCriterionInput `json:"age"` - Ethnicity *EthnicityCriterionInput `json:"ethnicity"` + Ethnicity *EthnicityFilterEnum `json:"ethnicity"` Country *StringCriterionInput `json:"country"` EyeColor *EyeColorCriterionInput `json:"eye_color"` HairColor *HairColorCriterionInput `json:"hair_color"` @@ -410,6 +451,11 @@ type QueryStudiosResultType struct { Studios []*Studio `json:"studios"` } +type QueryTagCategoriesResultType struct { + Count int `json:"count"` + TagCategories []*TagCategory `json:"tag_categories"` +} + type QueryTagsResultType struct { Count int `json:"count"` Tags []*Tag `json:"tags"` @@ -420,6 +466,15 @@ type QueryUsersResultType struct { Users []*User `json:"users"` } +type ResetPasswordInput struct { + Email string `json:"email"` +} + +type RevokeInviteInput struct { + UserID string `json:"user_id"` + Amount int `json:"amount"` +} + type RoleCriterionInput struct { Value []RoleEnum `json:"value"` Modifier CriterionModifier `json:"modifier"` @@ -515,6 +570,8 @@ type SceneFilterType struct { Date *DateCriterionInput `json:"date"` // Filter to only include scenes with this studio Studios *MultiIDCriterionInput `json:"studios"` + // Filter to only include scenes with this studio as primary or parent + ParentStudio *string `json:"parentStudio"` // Filter to only include scenes with these tags Tags *MultiIDCriterionInput `json:"tags"` // Filter to only include scenes with these performers @@ -598,9 +655,12 @@ type StudioEditInput struct { type StudioFilterType struct { // Filter to search name - assumes like query unless quoted Name *string `json:"name"` + // Filter to search studio and parent studio name - assumes like query unless quoted + Names *string `json:"names"` // Filter to search url - assumes like query unless quoted - URL *string `json:"url"` - Parent *IDCriterionInput `json:"parent"` + URL *string `json:"url"` + Parent *IDCriterionInput `json:"parent"` + HasParent *bool `json:"has_parent"` } type StudioUpdateInput struct { @@ -613,20 +673,46 @@ type StudioUpdateInput struct { } type Tag struct { - ID string `json:"id"` - Name string `json:"name"` - Description *string `json:"description"` - Aliases []string `json:"aliases"` - Deleted bool `json:"deleted"` - Edits []*Edit `json:"edits"` + ID string `json:"id"` + Name string `json:"name"` + Description *string `json:"description"` + Aliases []string `json:"aliases"` + Deleted bool `json:"deleted"` + Edits []*Edit `json:"edits"` + Category *TagCategory `json:"category"` } func (Tag) IsEditTarget() {} +type TagCategory struct { + ID string `json:"id"` + Name string `json:"name"` + Group TagGroupEnum `json:"group"` + Description *string `json:"description"` +} + +type TagCategoryCreateInput struct { + Name string `json:"name"` + Group TagGroupEnum `json:"group"` + Description *string `json:"description"` +} + +type TagCategoryDestroyInput struct { + ID string `json:"id"` +} + +type TagCategoryUpdateInput struct { + ID string `json:"id"` + Name *string `json:"name"` + Group *TagGroupEnum `json:"group"` + Description *string `json:"description"` +} + type TagCreateInput struct { Name string `json:"name"` Description *string `json:"description"` Aliases []string `json:"aliases"` + CategoryID *string `json:"category_id"` } type TagDestroyInput struct { @@ -638,6 +724,7 @@ type TagEdit struct { Description *string `json:"description"` AddedAliases []string `json:"added_aliases"` RemovedAliases []string `json:"removed_aliases"` + CategoryID *string `json:"category_id"` } func (TagEdit) IsEditDetails() {} @@ -646,6 +733,7 @@ type TagEditDetailsInput struct { Name *string `json:"name"` Description *string `json:"description"` Aliases []string `json:"aliases"` + CategoryID *string `json:"category_id"` } type TagEditInput struct { @@ -661,6 +749,8 @@ type TagFilterType struct { Names *string `json:"names"` // Filter to search name - assumes like query unless quoted Name *string `json:"name"` + // Filter to category ID + CategoryID *string `json:"category_id"` } type TagUpdateInput struct { @@ -668,6 +758,7 @@ type TagUpdateInput struct { Name *string `json:"name"` Description *string `json:"description"` Aliases []string `json:"aliases"` + CategoryID *string `json:"category_id"` } type URL struct { @@ -695,21 +786,26 @@ type User struct { // Votes on unsuccessful edits UnsuccessfulVotes int `json:"unsuccessful_votes"` // Calls to the API from this user over a configurable time period - APICalls int `json:"api_calls"` + APICalls int `json:"api_calls"` + InvitedBy *User `json:"invited_by"` + InviteTokens *int `json:"invite_tokens"` + ActiveInviteCodes []string `json:"active_invite_codes"` } type UserChangePasswordInput struct { // Password in plain text - ExistingPassword string `json:"existing_password"` - NewPassword string `json:"new_password"` + ExistingPassword *string `json:"existing_password"` + NewPassword string `json:"new_password"` + ResetKey *string `json:"reset_key"` } type UserCreateInput struct { Name string `json:"name"` // Password in plain text - Password string `json:"password"` - Roles []RoleEnum `json:"roles"` - Email string `json:"email"` + Password string `json:"password"` + Roles []RoleEnum `json:"roles"` + Email string `json:"email"` + InvitedByID *string `json:"invited_by_id"` } type UserDestroyInput struct { @@ -735,6 +831,8 @@ type UserFilterType struct { UnsuccessfulVotes *IntCriterionInput `json:"unsuccessful_votes"` // Filter by number of API calls APICalls *IntCriterionInput `json:"api_calls"` + // Filter by user that invited + InvitedBy *string `json:"invited_by"` } type UserUpdateInput struct { @@ -960,6 +1058,61 @@ func (e EthnicityEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } +type EthnicityFilterEnum string + +const ( + EthnicityFilterEnumUnknown EthnicityFilterEnum = "UNKNOWN" + EthnicityFilterEnumCaucasian EthnicityFilterEnum = "CAUCASIAN" + EthnicityFilterEnumBlack EthnicityFilterEnum = "BLACK" + EthnicityFilterEnumAsian EthnicityFilterEnum = "ASIAN" + EthnicityFilterEnumIndian EthnicityFilterEnum = "INDIAN" + EthnicityFilterEnumLatin EthnicityFilterEnum = "LATIN" + EthnicityFilterEnumMiddleEastern EthnicityFilterEnum = "MIDDLE_EASTERN" + EthnicityFilterEnumMixed EthnicityFilterEnum = "MIXED" + EthnicityFilterEnumOther EthnicityFilterEnum = "OTHER" +) + +var AllEthnicityFilterEnum = []EthnicityFilterEnum{ + EthnicityFilterEnumUnknown, + EthnicityFilterEnumCaucasian, + EthnicityFilterEnumBlack, + EthnicityFilterEnumAsian, + EthnicityFilterEnumIndian, + EthnicityFilterEnumLatin, + EthnicityFilterEnumMiddleEastern, + EthnicityFilterEnumMixed, + EthnicityFilterEnumOther, +} + +func (e EthnicityFilterEnum) IsValid() bool { + switch e { + case EthnicityFilterEnumUnknown, EthnicityFilterEnumCaucasian, EthnicityFilterEnumBlack, EthnicityFilterEnumAsian, EthnicityFilterEnumIndian, EthnicityFilterEnumLatin, EthnicityFilterEnumMiddleEastern, EthnicityFilterEnumMixed, EthnicityFilterEnumOther: + return true + } + return false +} + +func (e EthnicityFilterEnum) String() string { + return string(e) +} + +func (e *EthnicityFilterEnum) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = EthnicityFilterEnum(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid EthnicityFilterEnum", str) + } + return nil +} + +func (e EthnicityFilterEnum) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + type EyeColorEnum string const ( @@ -1014,16 +1167,18 @@ type FingerprintAlgorithm string const ( FingerprintAlgorithmMd5 FingerprintAlgorithm = "MD5" FingerprintAlgorithmOshash FingerprintAlgorithm = "OSHASH" + FingerprintAlgorithmPhash FingerprintAlgorithm = "PHASH" ) var AllFingerprintAlgorithm = []FingerprintAlgorithm{ FingerprintAlgorithmMd5, FingerprintAlgorithmOshash, + FingerprintAlgorithmPhash, } func (e FingerprintAlgorithm) IsValid() bool { switch e { - case FingerprintAlgorithmMd5, FingerprintAlgorithmOshash: + case FingerprintAlgorithmMd5, FingerprintAlgorithmOshash, FingerprintAlgorithmPhash: return true } return false @@ -1097,6 +1252,55 @@ func (e GenderEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } +type GenderFilterEnum string + +const ( + GenderFilterEnumUnknown GenderFilterEnum = "UNKNOWN" + GenderFilterEnumMale GenderFilterEnum = "MALE" + GenderFilterEnumFemale GenderFilterEnum = "FEMALE" + GenderFilterEnumTransgenderMale GenderFilterEnum = "TRANSGENDER_MALE" + GenderFilterEnumTransgenderFemale GenderFilterEnum = "TRANSGENDER_FEMALE" + GenderFilterEnumIntersex GenderFilterEnum = "INTERSEX" +) + +var AllGenderFilterEnum = []GenderFilterEnum{ + GenderFilterEnumUnknown, + GenderFilterEnumMale, + GenderFilterEnumFemale, + GenderFilterEnumTransgenderMale, + GenderFilterEnumTransgenderFemale, + GenderFilterEnumIntersex, +} + +func (e GenderFilterEnum) IsValid() bool { + switch e { + case GenderFilterEnumUnknown, GenderFilterEnumMale, GenderFilterEnumFemale, GenderFilterEnumTransgenderMale, GenderFilterEnumTransgenderFemale, GenderFilterEnumIntersex: + return true + } + return false +} + +func (e GenderFilterEnum) String() string { + return string(e) +} + +func (e *GenderFilterEnum) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = GenderFilterEnum(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid GenderFilterEnum", str) + } + return nil +} + +func (e GenderFilterEnum) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + type HairColorEnum string const ( @@ -1205,6 +1409,10 @@ const ( RoleEnumEdit RoleEnum = "EDIT" RoleEnumModify RoleEnum = "MODIFY" RoleEnumAdmin RoleEnum = "ADMIN" + // May generate invites without tokens + RoleEnumInvite RoleEnum = "INVITE" + // May grant and rescind invite tokens and resind invite keys + RoleEnumManageInvites RoleEnum = "MANAGE_INVITES" ) var AllRoleEnum = []RoleEnum{ @@ -1213,11 +1421,13 @@ var AllRoleEnum = []RoleEnum{ RoleEnumEdit, RoleEnumModify, RoleEnumAdmin, + RoleEnumInvite, + RoleEnumManageInvites, } func (e RoleEnum) IsValid() bool { switch e { - case RoleEnumRead, RoleEnumVote, RoleEnumEdit, RoleEnumModify, RoleEnumAdmin: + case RoleEnumRead, RoleEnumVote, RoleEnumEdit, RoleEnumModify, RoleEnumAdmin, RoleEnumInvite, RoleEnumManageInvites: return true } return false @@ -1285,6 +1495,49 @@ func (e SortDirectionEnum) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } +type TagGroupEnum string + +const ( + TagGroupEnumPeople TagGroupEnum = "PEOPLE" + TagGroupEnumScene TagGroupEnum = "SCENE" + TagGroupEnumAction TagGroupEnum = "ACTION" +) + +var AllTagGroupEnum = []TagGroupEnum{ + TagGroupEnumPeople, + TagGroupEnumScene, + TagGroupEnumAction, +} + +func (e TagGroupEnum) IsValid() bool { + switch e { + case TagGroupEnumPeople, TagGroupEnumScene, TagGroupEnumAction: + return true + } + return false +} + +func (e TagGroupEnum) String() string { + return string(e) +} + +func (e *TagGroupEnum) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = TagGroupEnum(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid TagGroupEnum", str) + } + return nil +} + +func (e TagGroupEnum) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + type TargetTypeEnum string const ( diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index 1dac41422..20a0fc95a 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -66,7 +66,7 @@ func (c Client) QueryStashBoxScene(queryStr string) ([]*models.ScrapedScene, err } // FindStashBoxScenesByFingerprints queries stash-box for scenes using every -// scene's MD5 checksum and/or oshash. +// scene's MD5/OSHASH checksum, or PHash func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.ScrapedScene, error) { ids, err := utils.StringSliceToIntSlice(sceneIDs) if err != nil { @@ -95,6 +95,10 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.S if scene.OSHash.Valid { fingerprints = append(fingerprints, scene.OSHash.String) } + + if scene.Phash.Valid { + fingerprints = append(fingerprints, utils.PhashToString(scene.Phash.Int64)) + } } return nil @@ -189,6 +193,18 @@ func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) ( Fingerprint: &fingerprint, }) } + + if scene.Phash.Valid && scene.Duration.Valid { + fingerprint := graphql.FingerprintInput{ + Hash: utils.PhashToString(scene.Phash.Int64), + Algorithm: graphql.FingerprintAlgorithmPhash, + Duration: int(scene.Duration.Float64), + } + fingerprints = append(fingerprints, graphql.FingerprintSubmission{ + SceneID: sceneStashID, + Fingerprint: &fingerprint, + }) + } } } diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index ffd2f01c5..a727c7bfd 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -4,9 +4,11 @@ import ( "database/sql" "fmt" "strconv" + "strings" "github.com/jmoiron/sqlx" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) const sceneTable = "scenes" @@ -61,6 +63,20 @@ SELECT id FROM scenes WHERE scenes.oshash is null ` +var findExactDuplicateQuery = ` +SELECT GROUP_CONCAT(id) as ids +FROM scenes +WHERE phash IS NOT NULL +GROUP BY phash +HAVING COUNT(*) > 1; +` + +var findAllPhashesQuery = ` +SELECT id, phash +FROM scenes +WHERE phash IS NOT NULL +` + type sceneQueryBuilder struct { repository } @@ -824,3 +840,51 @@ func (qb *sceneQueryBuilder) GetStashIDs(sceneID int) ([]*models.StashID, error) func (qb *sceneQueryBuilder) UpdateStashIDs(sceneID int, stashIDs []models.StashID) error { return qb.stashIDRepository().replace(sceneID, stashIDs) } + +func (qb *sceneQueryBuilder) FindDuplicates(distance int) ([][]*models.Scene, error) { + var dupeIds [][]int + if distance == 0 { + var ids []string + if err := qb.tx.Select(&ids, findExactDuplicateQuery); err != nil { + return nil, err + } + + for _, id := range ids { + strIds := strings.Split(id, ",") + var sceneIds []int + for _, strId := range strIds { + if intId, err := strconv.Atoi(strId); err == nil { + sceneIds = append(sceneIds, intId) + } + } + dupeIds = append(dupeIds, sceneIds) + } + } else { + var hashes []*utils.Phash + + if err := qb.queryFunc(findAllPhashesQuery, nil, func(rows *sqlx.Rows) error { + phash := utils.Phash{ + Bucket: -1, + } + if err := rows.StructScan(&phash); err != nil { + return err + } + + hashes = append(hashes, &phash) + return nil + }); err != nil { + return nil, err + } + + dupeIds = utils.FindDuplicates(hashes, distance) + } + + var duplicates [][]*models.Scene + for _, sceneIds := range dupeIds { + if scenes, err := qb.FindMany(sceneIds); err == nil { + duplicates = append(duplicates, scenes) + } + } + + return duplicates, nil +} diff --git a/pkg/utils/phash.go b/pkg/utils/phash.go new file mode 100644 index 000000000..f5e1f2cd9 --- /dev/null +++ b/pkg/utils/phash.go @@ -0,0 +1,57 @@ +package utils + +import ( + "strconv" + + "github.com/corona10/goimagehash" +) + +type Phash struct { + SceneID int `db:"id"` + Hash int64 `db:"phash"` + Neighbors []int + Bucket int +} + +func FindDuplicates(hashes []*Phash, distance int) [][]int { + for i, scene := range hashes { + sceneHash := goimagehash.NewImageHash(uint64(scene.Hash), goimagehash.PHash) + for j, neighbor := range hashes { + if i != j { + neighborHash := goimagehash.NewImageHash(uint64(neighbor.Hash), goimagehash.PHash) + neighborDistance, _ := sceneHash.Distance(neighborHash) + if neighborDistance <= distance { + scene.Neighbors = append(scene.Neighbors, j) + } + } + } + } + + var buckets [][]int + for _, scene := range hashes { + if len(scene.Neighbors) > 0 && scene.Bucket == -1 { + bucket := len(buckets) + scenes := []int{scene.SceneID} + scene.Bucket = bucket + findNeighbors(bucket, scene.Neighbors, hashes, &scenes) + buckets = append(buckets, scenes) + } + } + + return buckets +} + +func findNeighbors(bucket int, neighbors []int, hashes []*Phash, scenes *[]int) { + for _, id := range neighbors { + hash := hashes[id] + if hash.Bucket == -1 { + hash.Bucket = bucket + *scenes = append(*scenes, hash.SceneID) + findNeighbors(bucket, hash.Neighbors, hashes, scenes) + } + } +} + +func PhashToString(phash int64) string { + return strconv.FormatUint(uint64(phash), 16) +} diff --git a/ui/v2.5/src/components/Changelog/versions/v070.md b/ui/v2.5/src/components/Changelog/versions/v070.md index 92eccf82a..9dbfdf5ab 100644 --- a/ui/v2.5/src/components/Changelog/versions/v070.md +++ b/ui/v2.5/src/components/Changelog/versions/v070.md @@ -1,4 +1,5 @@ ### ✨ New Features +* Added [perceptual dupe checker](/settings?tab=duplicates). * Support access to system without logging in via API key. * Added scene queue. diff --git a/ui/v2.5/src/components/Help/Manual.tsx b/ui/v2.5/src/components/Help/Manual.tsx index c53405f61..89ed6637b 100644 --- a/ui/v2.5/src/components/Help/Manual.tsx +++ b/ui/v2.5/src/components/Help/Manual.tsx @@ -14,6 +14,7 @@ import Contributing from "src/docs/en/Contributing.md"; import SceneFilenameParser from "src/docs/en/SceneFilenameParser.md"; import KeyboardShortcuts from "src/docs/en/KeyboardShortcuts.md"; import Help from "src/docs/en/Help.md"; +import Deduplication from "src/docs/en/Deduplication.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; interface IManualProps { @@ -86,6 +87,11 @@ export const Manual: React.FC = ({ title: "Scene Tagger", content: Tagger, }, + { + key: "Deduplication.md", + title: "Dupe Checker", + content: Deduplication, + }, { key: "KeyboardShortcuts.md", title: "Keyboard Shortcuts", diff --git a/ui/v2.5/src/components/List/styles.scss b/ui/v2.5/src/components/List/styles.scss index 8ea230bc4..6cdea51aa 100644 --- a/ui/v2.5/src/components/List/styles.scss +++ b/ui/v2.5/src/components/List/styles.scss @@ -6,6 +6,14 @@ padding-left: 15px; padding-right: 15px; transition: none; + + &:first-child { + border-left: none; + } + + &:last-child { + border-right: none; + } } } diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx index 6a41e67ed..343cd77a8 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneFileInfoPanel.tsx @@ -219,10 +219,24 @@ export const SceneFileInfoPanel: React.FC = ( ); } + function renderPhash() { + if (props.scene.phash) { + return ( +
+ + PHash + + +
+ ); + } + } + return (
{renderOSHash()} {renderChecksum()} + {renderPhash()} {renderPath()} {renderStream()} {renderFileSize()} diff --git a/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx b/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx index abf950e9e..72a1601ee 100644 --- a/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneGenerateDialog.tsx @@ -19,6 +19,7 @@ export const SceneGenerateDialog: React.FC = ( const { data, error, loading } = useConfiguration(); const [sprites, setSprites] = useState(true); + const [phashes, setPhashes] = useState(true); const [previews, setPreviews] = useState(true); const [markers, setMarkers] = useState(true); const [transcodes, setTranscodes] = useState(false); @@ -60,6 +61,7 @@ export const SceneGenerateDialog: React.FC = ( try { await mutateMetadataGenerate({ sprites, + phashes, previews, imagePreviews: previews && imagePreviews, markers, @@ -243,6 +245,12 @@ export const SceneGenerateDialog: React.FC = ( label="Transcodes (MP4 conversions of unsupported video formats)" onChange={() => setTranscodes(!transcodes)} /> + setPhashes(!phashes)} + />
{ const location = useLocation(); @@ -45,6 +46,9 @@ export const Settings: React.FC = () => { Logs + + Dupe Checker + About @@ -71,6 +75,9 @@ export const Settings: React.FC = () => { + + + diff --git a/ui/v2.5/src/components/Settings/SettingsDuplicatePanel.tsx b/ui/v2.5/src/components/Settings/SettingsDuplicatePanel.tsx new file mode 100644 index 000000000..53916033a --- /dev/null +++ b/ui/v2.5/src/components/Settings/SettingsDuplicatePanel.tsx @@ -0,0 +1,270 @@ +import React, { useState } from "react"; +import { Button, Col, Form, Row, Table } from "react-bootstrap"; +import { Link, useHistory } from "react-router-dom"; +import { FormattedNumber } from "react-intl"; +import querystring from "query-string"; + +import * as GQL from "src/core/generated-graphql"; +import { + LoadingIndicator, + ErrorMessage, + HoverPopover, +} from "src/components/Shared"; +import { Pagination } from "src/components/List/Pagination"; +import { TextUtils } from "src/utils"; +import { DeleteScenesDialog } from "src/components/Scenes/DeleteScenesDialog"; + +const CLASSNAME = "DuplicateChecker"; + +export const SettingsDuplicatePanel: React.FC = () => { + const history = useHistory(); + const { page, size, distance } = querystring.parse(history.location.search); + const currentPage = Number.parseInt( + Array.isArray(page) ? page[0] : page ?? "1", + 10 + ); + const pageSize = Number.parseInt( + Array.isArray(size) ? size[0] : size ?? "20", + 10 + ); + const hashDistance = Number.parseInt( + Array.isArray(distance) ? distance[0] : distance ?? "0", + 10 + ); + const [isMultiDelete, setIsMultiDelete] = useState(false); + const [checkedScenes, setCheckedScenes] = useState>( + {} + ); + const { data, loading, refetch } = GQL.useFindDuplicateScenesQuery({ + fetchPolicy: "no-cache", + variables: { distance: hashDistance }, + }); + const [deletingScene, setDeletingScene] = useState< + GQL.SlimSceneDataFragment[] | null + >(null); + + if (loading) return ; + if (!data) return ; + + const scenes = data?.findDuplicateScenes ?? []; + const filteredScenes = scenes.slice( + (currentPage - 1) * pageSize, + currentPage * pageSize + ); + const checkCount = Object.keys(checkedScenes).filter( + (id) => checkedScenes[id] + ).length; + + const setQuery = (q: Record) => { + history.push({ + search: querystring.stringify({ + ...querystring.parse(history.location.search), + ...q, + }), + }); + }; + + function onDeleteDialogClosed(deleted: boolean) { + setDeletingScene(null); + if (deleted) { + refetch(); + if (isMultiDelete) setCheckedScenes({}); + } + } + + const handleCheck = (checked: boolean, sceneID: string) => { + setCheckedScenes({ ...checkedScenes, [sceneID]: checked }); + }; + + const handleDeleteChecked = () => { + setDeletingScene(scenes.flat().filter((s) => checkedScenes[s.id])); + setIsMultiDelete(true); + }; + + const handleDeleteScene = (scene: GQL.SlimSceneDataFragment) => { + setDeletingScene([scene]); + setIsMultiDelete(false); + }; + + const renderFilesize = (filesize: string | null | undefined) => { + const { size: parsedSize, unit } = TextUtils.fileSize( + Number.parseInt(filesize ?? "0", 10) + ); + return ( + + ); + }; + + return ( +
+ {deletingScene && ( + + )} +

Duplicate Scenes

+ + + Search Accuracy + + + setQuery({ + distance: + e.currentTarget.value === "0" + ? undefined + : e.currentTarget.value, + page: undefined, + }) + } + defaultValue={distance ?? 0} + className="ml-4" + > + + + + + + + + + Levels below “Exact” can take longer to calculate. False + positives might also be returned on lower accuracy levels. + + +
+
+ {scenes.length} sets of duplicates found. +
+ {checkCount > 0 && ( + + )} + + setQuery({ page: newPage === 1 ? undefined : newPage }) + } + /> + + setQuery({ + size: + e.currentTarget.value === "20" + ? undefined + : e.currentTarget.value, + }) + } + > + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + {filteredScenes.map((group) => + group.map((scene, i) => ( + + + + + + + + + + + + )) + )} + +
TitleDurationFilesizeResolutionBitrateCodecDelete
+ + handleCheck(e.currentTarget.checked, scene.id) + } + /> + + + } + placement="right" + > + + + + + {scene.title ?? TextUtils.fileNameFromPath(scene.path)} + + + {scene.file.duration && + TextUtils.secondsToTimestamp(scene.file.duration)} + {renderFilesize(scene.file.size)}{`${scene.file.width}x${scene.file.height}`} + +  mbps + {scene.file.video_codec} + +
+ {scenes.length === 0 && ( +

+ No duplicates found. Make sure the phash task has been run. +

+ )} +
+ ); +}; diff --git a/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx b/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx index 77ced445b..69d8f46cd 100644 --- a/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx +++ b/ui/v2.5/src/components/Settings/SettingsTasksPanel/GenerateButton.tsx @@ -6,6 +6,7 @@ import { useToast } from "src/hooks"; export const GenerateButton: React.FC = () => { const Toast = useToast(); const [sprites, setSprites] = useState(true); + const [phashes, setPhashes] = useState(true); const [previews, setPreviews] = useState(true); const [markers, setMarkers] = useState(true); const [transcodes, setTranscodes] = useState(false); @@ -15,6 +16,7 @@ export const GenerateButton: React.FC = () => { try { await mutateMetadataGenerate({ sprites, + phashes, previews, imagePreviews: previews && imagePreviews, markers, @@ -64,6 +66,12 @@ export const GenerateButton: React.FC = () => { label="Transcodes (MP4 conversions of unsupported video formats)" onChange={() => setTranscodes(!transcodes)} /> + setPhashes(!phashes)} + />
{getDurationStatus(scene, stashScene.file?.duration)} - {getFingerprintStatus( - scene, - stashScene.checksum ?? stashScene.oshash ?? undefined - )} + {getFingerprintStatus(scene, stashScene)} diff --git a/ui/v2.5/src/docs/en/Deduplication.md b/ui/v2.5/src/docs/en/Deduplication.md new file mode 100644 index 000000000..e44d535e6 --- /dev/null +++ b/ui/v2.5/src/docs/en/Deduplication.md @@ -0,0 +1,9 @@ +# Dupe Checker + +[The dupe checker](/settings?tab=duplicates) searches your collection for scenes that are perceptually similar. This means that the files don't need to be identical, and will be identified even with different bitrates, resolutions, and intros/outros. + +To achieve this stash needs to generate what's called a phash, or perceptual hash. Similar to sprite generation stash will generate a set of 25 images from fixed points in the scene. These images will be stitched together, and then hashed using the phash algorithm. The phash can then be used to find scenes that are the same or similar to others in the database. Phash generation can be run during scan, or as a separate task. Note that generation can take a while due to the work involved with extracting screenshots. + +The dupe checker can be run with four different levels of accuracy. `Exact` looks for scenes that have exactly the same phash. This is a fast and accurate operation that should not yield any false positives except in very rare cases. The other accuracy levels look for duplicate files within a set distance of each other. This means the scenes don't have exactly the same phash, but are very similar. `High` and `Medium` should still yield very good results with few or no false positives. `Low` is likely to produce some false positives, but might still be useful for finding dupes. + +Note that to generate a phash stash requires an uncorrupted file. If any errors are encountered during sprite generation the phash will not be generated. This is to prevent false positives. diff --git a/vendor/github.com/corona10/goimagehash/.gitignore b/vendor/github.com/corona10/goimagehash/.gitignore new file mode 100644 index 000000000..a1338d685 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/.gitignore @@ -0,0 +1,14 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ diff --git a/vendor/github.com/corona10/goimagehash/AUTHORS.md b/vendor/github.com/corona10/goimagehash/AUTHORS.md new file mode 100644 index 000000000..832d2331f --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/AUTHORS.md @@ -0,0 +1,5 @@ +## AUTHORS +- [Dominik Honnef](https://github.com/dominikh) dominik@honnef.co +- [Dong-hee Na](https://github.com/corona10/) donghee.na92@gmail.com +- [Gustavo Brunoro](https://github.com/brunoro/) git@hitnail.net +- [Alex Higashino](https://github.com/TokyoWolFrog/) TokyoWolFrog@mayxyou.com \ No newline at end of file diff --git a/vendor/github.com/corona10/goimagehash/CODEOWNERS b/vendor/github.com/corona10/goimagehash/CODEOWNERS new file mode 100644 index 000000000..4da23f706 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/CODEOWNERS @@ -0,0 +1 @@ +*.go @corona10 diff --git a/vendor/github.com/corona10/goimagehash/Gopkg.lock b/vendor/github.com/corona10/goimagehash/Gopkg.lock new file mode 100644 index 000000000..c51994eb6 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/Gopkg.lock @@ -0,0 +1,17 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + branch = "master" + digest = "1:34534b73e925d20cc72cf202f8b482fdcbe3a1b113e19375f31aadabd0f0f97d" + name = "github.com/nfnt/resize" + packages = ["."] + pruneopts = "UT" + revision = "83c6a9932646f83e3267f353373d47347b6036b2" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = ["github.com/nfnt/resize"] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/corona10/goimagehash/Gopkg.toml b/vendor/github.com/corona10/goimagehash/Gopkg.toml new file mode 100644 index 000000000..7d0d6d5e7 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/Gopkg.toml @@ -0,0 +1,34 @@ +# Gopkg.toml example +# +# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" +# +# [prune] +# non-go = false +# go-tests = true +# unused-packages = true + + +[[constraint]] + branch = "master" + name = "github.com/nfnt/resize" + +[prune] + go-tests = true + unused-packages = true diff --git a/vendor/github.com/corona10/goimagehash/LICENSE b/vendor/github.com/corona10/goimagehash/LICENSE new file mode 100644 index 000000000..37b5a9609 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2017, Dong-hee Na +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/corona10/goimagehash/README.md b/vendor/github.com/corona10/goimagehash/README.md new file mode 100644 index 000000000..07bab1435 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/README.md @@ -0,0 +1,93 @@ +![GitHub Action](https://github.com/corona10/goimagehash/workflows/goimagehash%20workflow/badge.svg) +[![GoDoc](https://godoc.org/github.com/corona10/goimagehash?status.svg)](https://godoc.org/github.com/corona10/goimagehash) +[![Go Report Card](https://goreportcard.com/badge/github.com/corona10/goimagehash)](https://goreportcard.com/report/github.com/corona10/goimagehash) + +# goimagehash +> Inspired by [imagehash](https://github.com/JohannesBuchner/imagehash) + +A image hashing library written in Go. ImageHash supports: +* [Average hashing](http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html) +* [Difference hashing](http://www.hackerfactor.com/blog/index.php?/archives/529-Kind-of-Like-That.html) +* [Perception hashing](http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html) +* [Wavelet hashing](https://fullstackml.com/wavelet-image-hash-in-python-3504fdd282b5) [TODO] + +## Installation +``` +go get github.com/corona10/goimagehash +``` +## Special thanks to +* [Haeun Kim](https://github.com/haeungun/) + +## Usage + +``` Go +func main() { + file1, _ := os.Open("sample1.jpg") + file2, _ := os.Open("sample2.jpg") + defer file1.Close() + defer file2.Close() + + img1, _ := jpeg.Decode(file1) + img2, _ := jpeg.Decode(file2) + hash1, _ := goimagehash.AverageHash(img1) + hash2, _ := goimagehash.AverageHash(img2) + distance, _ := hash1.Distance(hash2) + fmt.Printf("Distance between images: %v\n", distance) + + hash1, _ = goimagehash.DifferenceHash(img1) + hash2, _ = goimagehash.DifferenceHash(img2) + distance, _ = hash1.Distance(hash2) + fmt.Printf("Distance between images: %v\n", distance) + width, height := 8, 8 + hash3, _ = goimagehash.ExtAverageHash(img1, width, height) + hash4, _ = goimagehash.ExtAverageHash(img2, width, height) + distance, _ = hash3.Distance(hash4) + fmt.Printf("Distance between images: %v\n", distance) + fmt.Printf("hash3 bit size: %v\n", hash3.Bits()) + fmt.Printf("hash4 bit size: %v\n", hash4.Bits()) + + var b bytes.Buffer + foo := bufio.NewWriter(&b) + _ = hash4.Dump(foo) + foo.Flush() + bar := bufio.NewReader(&b) + hash5, _ := goimagehash.LoadExtImageHash(bar) +} +``` + +## Release Note +### v1.0.3 +- Add workflow for GithubAction +- Fix typo on the GoDoc for LoadImageHash + +### v1.0.2 +- go.mod is now used for install goimagehash + +### v1.0.1 +- Perception/ExtPerception hash creation times are reduced + +### v1.0.0 +**IMPORTANT** +goimagehash v1.0.0 does not have compatible with the before version for future features + +- More flexible extended hash APIs are provided ([ExtAverageHash](https://godoc.org/github.com/corona10/goimagehash#ExtAverageHash), [ExtPerceptionHash](https://godoc.org/github.com/corona10/goimagehash#ExtPerceptionHash), [ExtDifferenceHash](https://godoc.org/github.com/corona10/goimagehash#ExtDifferenceHash)) +- New serialization APIs are provided([ImageHash.Dump](https://godoc.org/github.com/corona10/goimagehash#ImageHash.Dump), [ExtImageHash.Dump](https://godoc.org/github.com/corona10/goimagehash#ExtImageHash.Dump)) +- [ExtImageHashFromString](https://godoc.org/github.com/corona10/goimagehash#ExtImageHashFromString), [ImageHashFromString](https://godoc.org/github.com/corona10/goimagehash#ImageHashFromString) is deprecated and will be removed +- New deserialization APIs are provided([LoadImageHash](https://godoc.org/github.com/corona10/goimagehash#LoadImageHash), [LoadExtImageHash](https://godoc.org/github.com/corona10/goimagehash#LoadExtImageHash)) +- Bits APIs are provided to measure actual bit size of hash + +### v0.3.0 +- Support DifferenceHashExtend. +- Support AverageHashExtend. +- Support PerceptionHashExtend by @TokyoWolFrog. + +### v0.2.0 +- Perception Hash is updated. +- Fix a critical bug of finding median value. + +### v0.1.0 +- Support Average hashing +- Support Difference hashing +- Support Perception hashing +- Use bits.OnesCount64 for computing Hamming distance by @dominikh +- Support hex serialization methods to ImageHash by @brunoro diff --git a/vendor/github.com/corona10/goimagehash/doc.go b/vendor/github.com/corona10/goimagehash/doc.go new file mode 100644 index 000000000..39e655de0 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/doc.go @@ -0,0 +1,5 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package goimagehash diff --git a/vendor/github.com/corona10/goimagehash/etcs/doc.go b/vendor/github.com/corona10/goimagehash/etcs/doc.go new file mode 100644 index 000000000..0c1485dbc --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/etcs/doc.go @@ -0,0 +1,5 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package etcs diff --git a/vendor/github.com/corona10/goimagehash/etcs/utils.go b/vendor/github.com/corona10/goimagehash/etcs/utils.go new file mode 100644 index 000000000..795c75fa6 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/etcs/utils.go @@ -0,0 +1,61 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package etcs + +// MeanOfPixels function returns a mean of pixels. +func MeanOfPixels(pixels []float64) float64 { + m := 0.0 + lens := len(pixels) + if lens == 0 { + return 0 + } + + for _, p := range pixels { + m += p + } + + return m / float64(lens) +} + +// MedianOfPixels function returns a median value of pixels. +// It uses quick selection algorithm. +func MedianOfPixels(pixels []float64) float64 { + tmp := make([]float64, len(pixels)) + copy(tmp, pixels) + l := len(tmp) + pos := l / 2 + v := quickSelectMedian(tmp, 0, l-1, pos) + return v +} + +func quickSelectMedian(sequence []float64, low int, hi int, k int) float64 { + if low == hi { + return sequence[k] + } + + for low < hi { + pivot := low/2 + hi/2 + pivotValue := sequence[pivot] + storeIdx := low + sequence[pivot], sequence[hi] = sequence[hi], sequence[pivot] + for i := low; i < hi; i++ { + if sequence[i] < pivotValue { + sequence[storeIdx], sequence[i] = sequence[i], sequence[storeIdx] + storeIdx++ + } + } + sequence[hi], sequence[storeIdx] = sequence[storeIdx], sequence[hi] + if k <= storeIdx { + hi = storeIdx + } else { + low = storeIdx + 1 + } + } + + if len(sequence)%2 == 0 { + return sequence[k-1]/2 + sequence[k]/2 + } + return sequence[k] +} diff --git a/vendor/github.com/corona10/goimagehash/go.mod b/vendor/github.com/corona10/goimagehash/go.mod new file mode 100644 index 000000000..681fdc2bf --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/go.mod @@ -0,0 +1,3 @@ +module github.com/corona10/goimagehash + +require github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 diff --git a/vendor/github.com/corona10/goimagehash/go.sum b/vendor/github.com/corona10/goimagehash/go.sum new file mode 100644 index 000000000..96adbed66 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/go.sum @@ -0,0 +1,2 @@ +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= +github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= diff --git a/vendor/github.com/corona10/goimagehash/hashcompute.go b/vendor/github.com/corona10/goimagehash/hashcompute.go new file mode 100644 index 000000000..9b1fcbfe3 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/hashcompute.go @@ -0,0 +1,183 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package goimagehash + +import ( + "errors" + "image" + + "github.com/corona10/goimagehash/etcs" + "github.com/corona10/goimagehash/transforms" + "github.com/nfnt/resize" +) + +// AverageHash fuction returns a hash computation of average hash. +// Implementation follows +// http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html +func AverageHash(img image.Image) (*ImageHash, error) { + if img == nil { + return nil, errors.New("Image object can not be nil") + } + + // Create 64bits hash. + ahash := NewImageHash(0, AHash) + resized := resize.Resize(8, 8, img, resize.Bilinear) + pixels := transforms.Rgb2Gray(resized) + flattens := transforms.FlattenPixels(pixels, 8, 8) + avg := etcs.MeanOfPixels(flattens) + + for idx, p := range flattens { + if p > avg { + ahash.leftShiftSet(len(flattens) - idx - 1) + } + } + + return ahash, nil +} + +// DifferenceHash function returns a hash computation of difference hash. +// Implementation follows +// http://www.hackerfactor.com/blog/?/archives/529-Kind-of-Like-That.html +func DifferenceHash(img image.Image) (*ImageHash, error) { + if img == nil { + return nil, errors.New("Image object can not be nil") + } + + dhash := NewImageHash(0, DHash) + resized := resize.Resize(9, 8, img, resize.Bilinear) + pixels := transforms.Rgb2Gray(resized) + idx := 0 + for i := 0; i < len(pixels); i++ { + for j := 0; j < len(pixels[i])-1; j++ { + if pixels[i][j] < pixels[i][j+1] { + dhash.leftShiftSet(64 - idx - 1) + } + idx++ + } + } + + return dhash, nil +} + +// PerceptionHash function returns a hash computation of phash. +// Implementation follows +// http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html +func PerceptionHash(img image.Image) (*ImageHash, error) { + if img == nil { + return nil, errors.New("Image object can not be nil") + } + + phash := NewImageHash(0, PHash) + resized := resize.Resize(64, 64, img, resize.Bilinear) + pixels := transforms.Rgb2Gray(resized) + dct := transforms.DCT2D(pixels, 64, 64) + flattens := transforms.FlattenPixels(dct, 8, 8) + median := etcs.MedianOfPixels(flattens) + + for idx, p := range flattens { + if p > median { + phash.leftShiftSet(len(flattens) - idx - 1) + } + } + return phash, nil +} + +// ExtPerceptionHash function returns phash of which the size can be set larger than uint64 +// Some variable name refer to https://github.com/JohannesBuchner/imagehash/blob/master/imagehash/__init__.py +// Support 64bits phash (width=8, height=8) and 256bits phash (width=16, height=16) +// Important: width * height should be the power of 2 +func ExtPerceptionHash(img image.Image, width, height int) (*ExtImageHash, error) { + imgSize := width * height + if img == nil { + return nil, errors.New("Image object can not be nil") + } + if imgSize <= 0 || imgSize&(imgSize-1) != 0 { + return nil, errors.New("width * height should be power of 2") + } + var phash []uint64 + resized := resize.Resize(uint(imgSize), uint(imgSize), img, resize.Bilinear) + pixels := transforms.Rgb2Gray(resized) + dct := transforms.DCT2D(pixels, imgSize, imgSize) + flattens := transforms.FlattenPixels(dct, width, height) + median := etcs.MedianOfPixels(flattens) + + lenOfUnit := 64 + if imgSize%lenOfUnit == 0 { + phash = make([]uint64, imgSize/lenOfUnit) + } else { + phash = make([]uint64, imgSize/lenOfUnit+1) + } + for idx, p := range flattens { + indexOfArray := idx / lenOfUnit + indexOfBit := lenOfUnit - idx%lenOfUnit - 1 + if p > median { + phash[indexOfArray] |= 1 << uint(indexOfBit) + } + } + return NewExtImageHash(phash, PHash, imgSize), nil +} + +// ExtAverageHash function returns ahash of which the size can be set larger than uint64 +// Support 64bits ahash (width=8, height=8) and 256bits ahash (width=16, height=16) +func ExtAverageHash(img image.Image, width, height int) (*ExtImageHash, error) { + if img == nil { + return nil, errors.New("Image object can not be nil") + } + var ahash []uint64 + imgSize := width * height + + resized := resize.Resize(uint(width), uint(height), img, resize.Bilinear) + pixels := transforms.Rgb2Gray(resized) + flattens := transforms.FlattenPixels(pixels, width, height) + avg := etcs.MeanOfPixels(flattens) + + lenOfUnit := 64 + if imgSize%lenOfUnit == 0 { + ahash = make([]uint64, imgSize/lenOfUnit) + } else { + ahash = make([]uint64, imgSize/lenOfUnit+1) + } + for idx, p := range flattens { + indexOfArray := idx / lenOfUnit + indexOfBit := lenOfUnit - idx%lenOfUnit - 1 + if p > avg { + ahash[indexOfArray] |= 1 << uint(indexOfBit) + } + } + return NewExtImageHash(ahash, AHash, imgSize), nil +} + +// ExtDifferenceHash function returns dhash of which the size can be set larger than uint64 +// Support 64bits dhash (width=8, height=8) and 256bits dhash (width=16, height=16) +func ExtDifferenceHash(img image.Image, width, height int) (*ExtImageHash, error) { + if img == nil { + return nil, errors.New("Image object can not be nil") + } + + var dhash []uint64 + imgSize := width * height + + resized := resize.Resize(uint(width)+1, uint(height), img, resize.Bilinear) + pixels := transforms.Rgb2Gray(resized) + + lenOfUnit := 64 + if imgSize%lenOfUnit == 0 { + dhash = make([]uint64, imgSize/lenOfUnit) + } else { + dhash = make([]uint64, imgSize/lenOfUnit+1) + } + idx := 0 + for i := 0; i < len(pixels); i++ { + for j := 0; j < len(pixels[i])-1; j++ { + indexOfArray := idx / lenOfUnit + indexOfBit := lenOfUnit - idx%lenOfUnit - 1 + if pixels[i][j] < pixels[i][j+1] { + dhash[indexOfArray] |= 1 << uint(indexOfBit) + } + idx++ + } + } + return NewExtImageHash(dhash, DHash, imgSize), nil +} diff --git a/vendor/github.com/corona10/goimagehash/imagehash.go b/vendor/github.com/corona10/goimagehash/imagehash.go new file mode 100644 index 000000000..9cc384a4e --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/imagehash.go @@ -0,0 +1,294 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package goimagehash + +import ( + "encoding/binary" + "encoding/gob" + "encoding/hex" + "errors" + "fmt" + "io" +) + +// Kind describes the kinds of hash. +type Kind int + +// ImageHash is a struct of hash computation. +type ImageHash struct { + hash uint64 + kind Kind +} + +// ExtImageHash is a struct of big hash computation. +type ExtImageHash struct { + hash []uint64 + kind Kind + bits int +} + +const ( + // Unknown is a enum value of the unknown hash. + Unknown Kind = iota + // AHash is a enum value of the average hash. + AHash + //PHash is a enum value of the perceptual hash. + PHash + // DHash is a enum value of the difference hash. + DHash + // WHash is a enum value of the wavelet hash. + WHash +) + +// NewImageHash function creates a new image hash. +func NewImageHash(hash uint64, kind Kind) *ImageHash { + return &ImageHash{hash: hash, kind: kind} +} + +// Bits method returns an actual hash bit size +func (h *ImageHash) Bits() int { + return 64 +} + +// Distance method returns a distance between two hashes. +func (h *ImageHash) Distance(other *ImageHash) (int, error) { + if h.GetKind() != other.GetKind() { + return -1, errors.New("Image hashes's kind should be identical") + } + + lhash := h.GetHash() + rhash := other.GetHash() + + hamming := lhash ^ rhash + return popcnt(hamming), nil +} + +// GetHash method returns a 64bits hash value. +func (h *ImageHash) GetHash() uint64 { + return h.hash +} + +// GetKind method returns a kind of image hash. +func (h *ImageHash) GetKind() Kind { + return h.kind +} + +func (h *ImageHash) leftShiftSet(idx int) { + h.hash |= 1 << uint(idx) +} + +const strFmt = "%1s:%016x" + +// Dump method writes a binary serialization into w io.Writer. +func (h *ImageHash) Dump(w io.Writer) error { + type D struct { + Hash uint64 + Kind Kind + } + enc := gob.NewEncoder(w) + err := enc.Encode(D{Hash: h.hash, Kind: h.kind}) + if err != nil { + return err + } + return nil +} + +// LoadImageHash method loads a ImageHash from io.Reader. +func LoadImageHash(b io.Reader) (*ImageHash, error) { + type E struct { + Hash uint64 + Kind Kind + } + var e E + dec := gob.NewDecoder(b) + err := dec.Decode(&e) + if err != nil { + return nil, err + } + return &ImageHash{hash: e.Hash, kind: e.Kind}, nil +} + +// ImageHashFromString returns an image hash from a hex representation +// +// Deprecated: Use goimagehash.LoadImageHash instead. +func ImageHashFromString(s string) (*ImageHash, error) { + var kindStr string + var hash uint64 + _, err := fmt.Sscanf(s, strFmt, &kindStr, &hash) + if err != nil { + return nil, errors.New("Couldn't parse string " + s) + } + + kind := Unknown + switch kindStr { + case "a": + kind = AHash + case "p": + kind = PHash + case "d": + kind = DHash + case "w": + kind = WHash + } + return NewImageHash(hash, kind), nil +} + +// ToString returns a hex representation of the hash +func (h *ImageHash) ToString() string { + kindStr := "" + switch h.kind { + case AHash: + kindStr = "a" + case PHash: + kindStr = "p" + case DHash: + kindStr = "d" + case WHash: + kindStr = "w" + } + return fmt.Sprintf(strFmt, kindStr, h.hash) +} + +// NewExtImageHash function creates a new big hash +func NewExtImageHash(hash []uint64, kind Kind, bits int) *ExtImageHash { + return &ExtImageHash{hash: hash, kind: kind, bits: bits} +} + +// Bits method returns an actual hash bit size +func (h *ExtImageHash) Bits() int { + return h.bits +} + +// Distance method returns a distance between two big hashes +func (h *ExtImageHash) Distance(other *ExtImageHash) (int, error) { + if h.GetKind() != other.GetKind() { + return -1, errors.New("Extended Image hashes's kind should be identical") + } + + if h.Bits() != other.Bits() { + msg := fmt.Sprintf("Extended image hash should has an identical bit size but got %v vs %v", h.Bits(), other.Bits()) + return -1, errors.New(msg) + } + + lHash := h.GetHash() + rHash := other.GetHash() + if len(lHash) != len(rHash) { + return -1, errors.New("Extended Image hashes's size should be identical") + } + + distance := 0 + for idx, lh := range lHash { + rh := rHash[idx] + hamming := lh ^ rh + distance += popcnt(hamming) + } + return distance, nil +} + +// GetHash method returns a big hash value +func (h *ExtImageHash) GetHash() []uint64 { + return h.hash +} + +// GetKind method returns a kind of big hash +func (h *ExtImageHash) GetKind() Kind { + return h.kind +} + +// Dump method writes a binary serialization into w io.Writer. +func (h *ExtImageHash) Dump(w io.Writer) error { + type D struct { + Hash []uint64 + Kind Kind + Bits int + } + enc := gob.NewEncoder(w) + err := enc.Encode(D{Hash: h.hash, Kind: h.kind, Bits: h.bits}) + if err != nil { + return err + } + return nil +} + +// LoadExtImageHash method loads a ExtImageHash from io.Reader. +func LoadExtImageHash(b io.Reader) (*ExtImageHash, error) { + type E struct { + Hash []uint64 + Kind Kind + Bits int + } + var e E + dec := gob.NewDecoder(b) + err := dec.Decode(&e) + if err != nil { + return nil, err + } + return &ExtImageHash{hash: e.Hash, kind: e.Kind, bits: e.Bits}, nil +} + +const extStrFmt = "%1s:%s" + +// ExtImageHashFromString returns a big hash from a hex representation +// +// Deprecated: Use goimagehash.LoadExtImageHash instead. +func ExtImageHashFromString(s string) (*ExtImageHash, error) { + var kindStr string + var hashStr string + _, err := fmt.Sscanf(s, extStrFmt, &kindStr, &hashStr) + if err != nil { + return nil, errors.New("Couldn't parse string " + s) + } + + hexBytes, err := hex.DecodeString(hashStr) + if err != nil { + return nil, err + } + + var hash []uint64 + lenOfByte := 8 + for i := 0; i < len(hexBytes)/lenOfByte; i++ { + startIndex := i * lenOfByte + endIndex := startIndex + lenOfByte + hashUint64 := binary.BigEndian.Uint64(hexBytes[startIndex:endIndex]) + hash = append(hash, hashUint64) + } + + kind := Unknown + switch kindStr { + case "a": + kind = AHash + case "p": + kind = PHash + case "d": + kind = DHash + case "w": + kind = WHash + } + return NewExtImageHash(hash, kind, len(hash)*64), nil +} + +// ToString returns a hex representation of big hash +func (h *ExtImageHash) ToString() string { + var hexBytes []byte + for _, hash := range h.hash { + hashBytes := make([]byte, 8) + binary.BigEndian.PutUint64(hashBytes, hash) + hexBytes = append(hexBytes, hashBytes...) + } + hexStr := hex.EncodeToString(hexBytes) + + kindStr := "" + switch h.kind { + case AHash: + kindStr = "a" + case PHash: + kindStr = "p" + case DHash: + kindStr = "d" + case WHash: + kindStr = "w" + } + return fmt.Sprintf(extStrFmt, kindStr, hexStr) +} diff --git a/vendor/github.com/corona10/goimagehash/imagehash18.go b/vendor/github.com/corona10/goimagehash/imagehash18.go new file mode 100644 index 000000000..e8d3fd62a --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/imagehash18.go @@ -0,0 +1,13 @@ +// +build !go1.9 + +package goimagehash + +func popcnt(x uint64) int { + diff := 0 + for x != 0 { + diff += int(x & 1) + x >>= 1 + } + + return diff +} diff --git a/vendor/github.com/corona10/goimagehash/imagehash19.go b/vendor/github.com/corona10/goimagehash/imagehash19.go new file mode 100644 index 000000000..c1d47be36 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/imagehash19.go @@ -0,0 +1,9 @@ +// +build go1.9 + +package goimagehash + +import ( + "math/bits" +) + +func popcnt(x uint64) int { return bits.OnesCount64(x) } diff --git a/vendor/github.com/corona10/goimagehash/transforms/dct.go b/vendor/github.com/corona10/goimagehash/transforms/dct.go new file mode 100644 index 000000000..b0976a3bc --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/transforms/dct.go @@ -0,0 +1,75 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package transforms + +import ( + "math" + "sync" +) + +// DCT1D function returns result of DCT-II. +// DCT type II, unscaled. Algorithm by Byeong Gi Lee, 1984. +func DCT1D(input []float64) []float64 { + temp := make([]float64, len(input)) + forwardTransform(input, temp, len(input)) + return input +} + +func forwardTransform(input, temp []float64, Len int) { + if Len == 1 { + return + } + + halfLen := Len / 2 + for i := 0; i < halfLen; i++ { + x, y := input[i], input[Len-1-i] + temp[i] = x + y + temp[i+halfLen] = (x - y) / (math.Cos((float64(i)+0.5)*math.Pi/float64(Len)) * 2) + } + forwardTransform(temp, input, halfLen) + forwardTransform(temp[halfLen:], input, halfLen) + for i := 0; i < halfLen-1; i++ { + input[i*2+0] = temp[i] + input[i*2+1] = temp[i+halfLen] + temp[i+halfLen+1] + } + input[Len-2], input[Len-1] = temp[halfLen-1], temp[Len-1] +} + +// DCT2D function returns a result of DCT2D by using the seperable property. +func DCT2D(input [][]float64, w int, h int) [][]float64 { + output := make([][]float64, h) + for i := range output { + output[i] = make([]float64, w) + } + + wg := new(sync.WaitGroup) + for i := 0; i < h; i++ { + wg.Add(1) + go func(i int) { + cols := DCT1D(input[i]) + output[i] = cols + wg.Done() + }(i) + } + + wg.Wait() + for i := 0; i < w; i++ { + wg.Add(1) + in := make([]float64, h) + go func(i int) { + for j := 0; j < h; j++ { + in[j] = output[j][i] + } + rows := DCT1D(in) + for j := 0; j < len(rows); j++ { + output[j][i] = rows[j] + } + wg.Done() + }(i) + } + + wg.Wait() + return output +} diff --git a/vendor/github.com/corona10/goimagehash/transforms/doc.go b/vendor/github.com/corona10/goimagehash/transforms/doc.go new file mode 100644 index 000000000..01bd0e839 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/transforms/doc.go @@ -0,0 +1,5 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package transforms diff --git a/vendor/github.com/corona10/goimagehash/transforms/pixels.go b/vendor/github.com/corona10/goimagehash/transforms/pixels.go new file mode 100644 index 000000000..378e1b559 --- /dev/null +++ b/vendor/github.com/corona10/goimagehash/transforms/pixels.go @@ -0,0 +1,39 @@ +// Copyright 2017 The goimagehash Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package transforms + +import ( + "image" +) + +// Rgb2Gray function converts RGB to a gray scale array. +func Rgb2Gray(colorImg image.Image) [][]float64 { + bounds := colorImg.Bounds() + w, h := bounds.Max.X-bounds.Min.X, bounds.Max.Y-bounds.Min.Y + pixels := make([][]float64, h) + + for i := range pixels { + pixels[i] = make([]float64, w) + for j := range pixels[i] { + color := colorImg.At(j, i) + r, g, b, _ := color.RGBA() + lum := 0.299*float64(r/257) + 0.587*float64(g/257) + 0.114*float64(b/256) + pixels[i][j] = lum + } + } + + return pixels +} + +// FlattenPixels function flattens 2d array into 1d array. +func FlattenPixels(pixels [][]float64, x int, y int) []float64 { + flattens := make([]float64, x*y) + for i := 0; i < y; i++ { + for j := 0; j < x; j++ { + flattens[y*i+j] = pixels[i][j] + } + } + return flattens +} diff --git a/vendor/github.com/nfnt/resize/.travis.yml b/vendor/github.com/nfnt/resize/.travis.yml new file mode 100644 index 000000000..5ff08e7e4 --- /dev/null +++ b/vendor/github.com/nfnt/resize/.travis.yml @@ -0,0 +1,7 @@ +language: go + +go: + - "1.x" + - "1.1" + - "1.4" + - "1.10" diff --git a/vendor/github.com/nfnt/resize/LICENSE b/vendor/github.com/nfnt/resize/LICENSE new file mode 100644 index 000000000..7836cad5f --- /dev/null +++ b/vendor/github.com/nfnt/resize/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2012, Jan Schlicht + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/vendor/github.com/nfnt/resize/README.md b/vendor/github.com/nfnt/resize/README.md new file mode 100644 index 000000000..372777d2e --- /dev/null +++ b/vendor/github.com/nfnt/resize/README.md @@ -0,0 +1,151 @@ +# This package is no longer being updated! Please look for alternatives if that bothers you. + +Resize +====== + +Image resizing for the [Go programming language](http://golang.org) with common interpolation methods. + +[![Build Status](https://travis-ci.org/nfnt/resize.svg)](https://travis-ci.org/nfnt/resize) + +Installation +------------ + +```bash +$ go get github.com/nfnt/resize +``` + +It's that easy! + +Usage +----- + +This package needs at least Go 1.1. Import package with + +```go +import "github.com/nfnt/resize" +``` + +The resize package provides 2 functions: + +* `resize.Resize` creates a scaled image with new dimensions (`width`, `height`) using the interpolation function `interp`. + If either `width` or `height` is set to 0, it will be set to an aspect ratio preserving value. +* `resize.Thumbnail` downscales an image preserving its aspect ratio to the maximum dimensions (`maxWidth`, `maxHeight`). + It will return the original image if original sizes are smaller than the provided dimensions. + +```go +resize.Resize(width, height uint, img image.Image, interp resize.InterpolationFunction) image.Image +resize.Thumbnail(maxWidth, maxHeight uint, img image.Image, interp resize.InterpolationFunction) image.Image +``` + +The provided interpolation functions are (from fast to slow execution time) + +- `NearestNeighbor`: [Nearest-neighbor interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation) +- `Bilinear`: [Bilinear interpolation](http://en.wikipedia.org/wiki/Bilinear_interpolation) +- `Bicubic`: [Bicubic interpolation](http://en.wikipedia.org/wiki/Bicubic_interpolation) +- `MitchellNetravali`: [Mitchell-Netravali interpolation](http://dl.acm.org/citation.cfm?id=378514) +- `Lanczos2`: [Lanczos resampling](http://en.wikipedia.org/wiki/Lanczos_resampling) with a=2 +- `Lanczos3`: [Lanczos resampling](http://en.wikipedia.org/wiki/Lanczos_resampling) with a=3 + +Which of these methods gives the best results depends on your use case. + +Sample usage: + +```go +package main + +import ( + "github.com/nfnt/resize" + "image/jpeg" + "log" + "os" +) + +func main() { + // open "test.jpg" + file, err := os.Open("test.jpg") + if err != nil { + log.Fatal(err) + } + + // decode jpeg into image.Image + img, err := jpeg.Decode(file) + if err != nil { + log.Fatal(err) + } + file.Close() + + // resize to width 1000 using Lanczos resampling + // and preserve aspect ratio + m := resize.Resize(1000, 0, img, resize.Lanczos3) + + out, err := os.Create("test_resized.jpg") + if err != nil { + log.Fatal(err) + } + defer out.Close() + + // write new image to file + jpeg.Encode(out, m, nil) +} +``` + +Caveats +------- + +* Optimized access routines are used for `image.RGBA`, `image.NRGBA`, `image.RGBA64`, `image.NRGBA64`, `image.YCbCr`, `image.Gray`, and `image.Gray16` types. All other image types are accessed in a generic way that will result in slow processing speed. +* JPEG images are stored in `image.YCbCr`. This image format stores data in a way that will decrease processing speed. A resize may be up to 2 times slower than with `image.RGBA`. + + +Downsizing Samples +------- + +Downsizing is not as simple as it might look like. Images have to be filtered before they are scaled down, otherwise aliasing might occur. +Filtering is highly subjective: Applying too much will blur the whole image, too little will make aliasing become apparent. +Resize tries to provide sane defaults that should suffice in most cases. + +### Artificial sample + +Original image +![Rings](http://nfnt.github.com/img/rings_lg_orig.png) + + + + + + + + + + + + + + +

Nearest-Neighbor

Bilinear

Bicubic

Mitchell-Netravali

Lanczos2

Lanczos3
+ +### Real-Life sample + +Original image +![Original](http://nfnt.github.com/img/IMG_3694_720.jpg) + + + + + + + + + + + + + + +

Nearest-Neighbor

Bilinear

Bicubic

Mitchell-Netravali

Lanczos2

Lanczos3
+ + +License +------- + +Copyright (c) 2012 Jan Schlicht +Resize is released under a MIT style license. diff --git a/vendor/github.com/nfnt/resize/converter.go b/vendor/github.com/nfnt/resize/converter.go new file mode 100644 index 000000000..f9c520d09 --- /dev/null +++ b/vendor/github.com/nfnt/resize/converter.go @@ -0,0 +1,438 @@ +/* +Copyright (c) 2012, Jan Schlicht + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. +*/ + +package resize + +import "image" + +// Keep value in [0,255] range. +func clampUint8(in int32) uint8 { + // casting a negative int to an uint will result in an overflown + // large uint. this behavior will be exploited here and in other functions + // to achieve a higher performance. + if uint32(in) < 256 { + return uint8(in) + } + if in > 255 { + return 255 + } + return 0 +} + +// Keep value in [0,65535] range. +func clampUint16(in int64) uint16 { + if uint64(in) < 65536 { + return uint16(in) + } + if in > 65535 { + return 65535 + } + return 0 +} + +func resizeGeneric(in image.Image, out *image.RGBA64, scale float64, coeffs []int32, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]int64 + var sum int64 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case xi < 0: + xi = 0 + case xi >= maxX: + xi = maxX + } + + r, g, b, a := in.At(xi+in.Bounds().Min.X, x+in.Bounds().Min.Y).RGBA() + + rgba[0] += int64(coeff) * int64(r) + rgba[1] += int64(coeff) * int64(g) + rgba[2] += int64(coeff) * int64(b) + rgba[3] += int64(coeff) * int64(a) + sum += int64(coeff) + } + } + + offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8 + + value := clampUint16(rgba[0] / sum) + out.Pix[offset+0] = uint8(value >> 8) + out.Pix[offset+1] = uint8(value) + value = clampUint16(rgba[1] / sum) + out.Pix[offset+2] = uint8(value >> 8) + out.Pix[offset+3] = uint8(value) + value = clampUint16(rgba[2] / sum) + out.Pix[offset+4] = uint8(value >> 8) + out.Pix[offset+5] = uint8(value) + value = clampUint16(rgba[3] / sum) + out.Pix[offset+6] = uint8(value >> 8) + out.Pix[offset+7] = uint8(value) + } + } +} + +func resizeRGBA(in *image.RGBA, out *image.RGBA, scale float64, coeffs []int16, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]int32 + var sum int32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 4 + case xi >= maxX: + xi = 4 * maxX + default: + xi = 0 + } + + rgba[0] += int32(coeff) * int32(row[xi+0]) + rgba[1] += int32(coeff) * int32(row[xi+1]) + rgba[2] += int32(coeff) * int32(row[xi+2]) + rgba[3] += int32(coeff) * int32(row[xi+3]) + sum += int32(coeff) + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4 + + out.Pix[xo+0] = clampUint8(rgba[0] / sum) + out.Pix[xo+1] = clampUint8(rgba[1] / sum) + out.Pix[xo+2] = clampUint8(rgba[2] / sum) + out.Pix[xo+3] = clampUint8(rgba[3] / sum) + } + } +} + +func resizeNRGBA(in *image.NRGBA, out *image.RGBA, scale float64, coeffs []int16, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]int32 + var sum int32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 4 + case xi >= maxX: + xi = 4 * maxX + default: + xi = 0 + } + + // Forward alpha-premultiplication + a := int32(row[xi+3]) + r := int32(row[xi+0]) * a + r /= 0xff + g := int32(row[xi+1]) * a + g /= 0xff + b := int32(row[xi+2]) * a + b /= 0xff + + rgba[0] += int32(coeff) * r + rgba[1] += int32(coeff) * g + rgba[2] += int32(coeff) * b + rgba[3] += int32(coeff) * a + sum += int32(coeff) + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4 + + out.Pix[xo+0] = clampUint8(rgba[0] / sum) + out.Pix[xo+1] = clampUint8(rgba[1] / sum) + out.Pix[xo+2] = clampUint8(rgba[2] / sum) + out.Pix[xo+3] = clampUint8(rgba[3] / sum) + } + } +} + +func resizeRGBA64(in *image.RGBA64, out *image.RGBA64, scale float64, coeffs []int32, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]int64 + var sum int64 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 8 + case xi >= maxX: + xi = 8 * maxX + default: + xi = 0 + } + + rgba[0] += int64(coeff) * (int64(row[xi+0])<<8 | int64(row[xi+1])) + rgba[1] += int64(coeff) * (int64(row[xi+2])<<8 | int64(row[xi+3])) + rgba[2] += int64(coeff) * (int64(row[xi+4])<<8 | int64(row[xi+5])) + rgba[3] += int64(coeff) * (int64(row[xi+6])<<8 | int64(row[xi+7])) + sum += int64(coeff) + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8 + + value := clampUint16(rgba[0] / sum) + out.Pix[xo+0] = uint8(value >> 8) + out.Pix[xo+1] = uint8(value) + value = clampUint16(rgba[1] / sum) + out.Pix[xo+2] = uint8(value >> 8) + out.Pix[xo+3] = uint8(value) + value = clampUint16(rgba[2] / sum) + out.Pix[xo+4] = uint8(value >> 8) + out.Pix[xo+5] = uint8(value) + value = clampUint16(rgba[3] / sum) + out.Pix[xo+6] = uint8(value >> 8) + out.Pix[xo+7] = uint8(value) + } + } +} + +func resizeNRGBA64(in *image.NRGBA64, out *image.RGBA64, scale float64, coeffs []int32, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]int64 + var sum int64 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 8 + case xi >= maxX: + xi = 8 * maxX + default: + xi = 0 + } + + // Forward alpha-premultiplication + a := int64(uint16(row[xi+6])<<8 | uint16(row[xi+7])) + r := int64(uint16(row[xi+0])<<8|uint16(row[xi+1])) * a + r /= 0xffff + g := int64(uint16(row[xi+2])<<8|uint16(row[xi+3])) * a + g /= 0xffff + b := int64(uint16(row[xi+4])<<8|uint16(row[xi+5])) * a + b /= 0xffff + + rgba[0] += int64(coeff) * r + rgba[1] += int64(coeff) * g + rgba[2] += int64(coeff) * b + rgba[3] += int64(coeff) * a + sum += int64(coeff) + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8 + + value := clampUint16(rgba[0] / sum) + out.Pix[xo+0] = uint8(value >> 8) + out.Pix[xo+1] = uint8(value) + value = clampUint16(rgba[1] / sum) + out.Pix[xo+2] = uint8(value >> 8) + out.Pix[xo+3] = uint8(value) + value = clampUint16(rgba[2] / sum) + out.Pix[xo+4] = uint8(value >> 8) + out.Pix[xo+5] = uint8(value) + value = clampUint16(rgba[3] / sum) + out.Pix[xo+6] = uint8(value >> 8) + out.Pix[xo+7] = uint8(value) + } + } +} + +func resizeGray(in *image.Gray, out *image.Gray, scale float64, coeffs []int16, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[(x-newBounds.Min.X)*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var gray int32 + var sum int32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case xi < 0: + xi = 0 + case xi >= maxX: + xi = maxX + } + gray += int32(coeff) * int32(row[xi]) + sum += int32(coeff) + } + } + + offset := (y-newBounds.Min.Y)*out.Stride + (x - newBounds.Min.X) + out.Pix[offset] = clampUint8(gray / sum) + } + } +} + +func resizeGray16(in *image.Gray16, out *image.Gray16, scale float64, coeffs []int32, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var gray int64 + var sum int64 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 2 + case xi >= maxX: + xi = 2 * maxX + default: + xi = 0 + } + gray += int64(coeff) * int64(uint16(row[xi+0])<<8|uint16(row[xi+1])) + sum += int64(coeff) + } + } + + offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*2 + value := clampUint16(gray / sum) + out.Pix[offset+0] = uint8(value >> 8) + out.Pix[offset+1] = uint8(value) + } + } +} + +func resizeYCbCr(in *ycc, out *ycc, scale float64, coeffs []int16, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var p [3]int32 + var sum int32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + coeff := coeffs[ci+i] + if coeff != 0 { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 3 + case xi >= maxX: + xi = 3 * maxX + default: + xi = 0 + } + p[0] += int32(coeff) * int32(row[xi+0]) + p[1] += int32(coeff) * int32(row[xi+1]) + p[2] += int32(coeff) * int32(row[xi+2]) + sum += int32(coeff) + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*3 + out.Pix[xo+0] = clampUint8(p[0] / sum) + out.Pix[xo+1] = clampUint8(p[1] / sum) + out.Pix[xo+2] = clampUint8(p[2] / sum) + } + } +} + +func nearestYCbCr(in *ycc, out *ycc, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var p [3]float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 3 + case xi >= maxX: + xi = 3 * maxX + default: + xi = 0 + } + p[0] += float32(row[xi+0]) + p[1] += float32(row[xi+1]) + p[2] += float32(row[xi+2]) + sum++ + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*3 + out.Pix[xo+0] = floatToUint8(p[0] / sum) + out.Pix[xo+1] = floatToUint8(p[1] / sum) + out.Pix[xo+2] = floatToUint8(p[2] / sum) + } + } +} diff --git a/vendor/github.com/nfnt/resize/filters.go b/vendor/github.com/nfnt/resize/filters.go new file mode 100644 index 000000000..4ce04e389 --- /dev/null +++ b/vendor/github.com/nfnt/resize/filters.go @@ -0,0 +1,143 @@ +/* +Copyright (c) 2012, Jan Schlicht + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. +*/ + +package resize + +import ( + "math" +) + +func nearest(in float64) float64 { + if in >= -0.5 && in < 0.5 { + return 1 + } + return 0 +} + +func linear(in float64) float64 { + in = math.Abs(in) + if in <= 1 { + return 1 - in + } + return 0 +} + +func cubic(in float64) float64 { + in = math.Abs(in) + if in <= 1 { + return in*in*(1.5*in-2.5) + 1.0 + } + if in <= 2 { + return in*(in*(2.5-0.5*in)-4.0) + 2.0 + } + return 0 +} + +func mitchellnetravali(in float64) float64 { + in = math.Abs(in) + if in <= 1 { + return (7.0*in*in*in - 12.0*in*in + 5.33333333333) * 0.16666666666 + } + if in <= 2 { + return (-2.33333333333*in*in*in + 12.0*in*in - 20.0*in + 10.6666666667) * 0.16666666666 + } + return 0 +} + +func sinc(x float64) float64 { + x = math.Abs(x) * math.Pi + if x >= 1.220703e-4 { + return math.Sin(x) / x + } + return 1 +} + +func lanczos2(in float64) float64 { + if in > -2 && in < 2 { + return sinc(in) * sinc(in*0.5) + } + return 0 +} + +func lanczos3(in float64) float64 { + if in > -3 && in < 3 { + return sinc(in) * sinc(in*0.3333333333333333) + } + return 0 +} + +// range [-256,256] +func createWeights8(dy, filterLength int, blur, scale float64, kernel func(float64) float64) ([]int16, []int, int) { + filterLength = filterLength * int(math.Max(math.Ceil(blur*scale), 1)) + filterFactor := math.Min(1./(blur*scale), 1) + + coeffs := make([]int16, dy*filterLength) + start := make([]int, dy) + for y := 0; y < dy; y++ { + interpX := scale*(float64(y)+0.5) - 0.5 + start[y] = int(interpX) - filterLength/2 + 1 + interpX -= float64(start[y]) + for i := 0; i < filterLength; i++ { + in := (interpX - float64(i)) * filterFactor + coeffs[y*filterLength+i] = int16(kernel(in) * 256) + } + } + + return coeffs, start, filterLength +} + +// range [-65536,65536] +func createWeights16(dy, filterLength int, blur, scale float64, kernel func(float64) float64) ([]int32, []int, int) { + filterLength = filterLength * int(math.Max(math.Ceil(blur*scale), 1)) + filterFactor := math.Min(1./(blur*scale), 1) + + coeffs := make([]int32, dy*filterLength) + start := make([]int, dy) + for y := 0; y < dy; y++ { + interpX := scale*(float64(y)+0.5) - 0.5 + start[y] = int(interpX) - filterLength/2 + 1 + interpX -= float64(start[y]) + for i := 0; i < filterLength; i++ { + in := (interpX - float64(i)) * filterFactor + coeffs[y*filterLength+i] = int32(kernel(in) * 65536) + } + } + + return coeffs, start, filterLength +} + +func createWeightsNearest(dy, filterLength int, blur, scale float64) ([]bool, []int, int) { + filterLength = filterLength * int(math.Max(math.Ceil(blur*scale), 1)) + filterFactor := math.Min(1./(blur*scale), 1) + + coeffs := make([]bool, dy*filterLength) + start := make([]int, dy) + for y := 0; y < dy; y++ { + interpX := scale*(float64(y)+0.5) - 0.5 + start[y] = int(interpX) - filterLength/2 + 1 + interpX -= float64(start[y]) + for i := 0; i < filterLength; i++ { + in := (interpX - float64(i)) * filterFactor + if in >= -0.5 && in < 0.5 { + coeffs[y*filterLength+i] = true + } else { + coeffs[y*filterLength+i] = false + } + } + } + + return coeffs, start, filterLength +} diff --git a/vendor/github.com/nfnt/resize/nearest.go b/vendor/github.com/nfnt/resize/nearest.go new file mode 100644 index 000000000..888039d85 --- /dev/null +++ b/vendor/github.com/nfnt/resize/nearest.go @@ -0,0 +1,318 @@ +/* +Copyright (c) 2014, Charlie Vieth + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. +*/ + +package resize + +import "image" + +func floatToUint8(x float32) uint8 { + // Nearest-neighbor values are always + // positive no need to check lower-bound. + if x > 0xfe { + return 0xff + } + return uint8(x) +} + +func floatToUint16(x float32) uint16 { + if x > 0xfffe { + return 0xffff + } + return uint16(x) +} + +func nearestGeneric(in image.Image, out *image.RGBA64, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case xi < 0: + xi = 0 + case xi >= maxX: + xi = maxX + } + r, g, b, a := in.At(xi+in.Bounds().Min.X, x+in.Bounds().Min.Y).RGBA() + rgba[0] += float32(r) + rgba[1] += float32(g) + rgba[2] += float32(b) + rgba[3] += float32(a) + sum++ + } + } + + offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8 + value := floatToUint16(rgba[0] / sum) + out.Pix[offset+0] = uint8(value >> 8) + out.Pix[offset+1] = uint8(value) + value = floatToUint16(rgba[1] / sum) + out.Pix[offset+2] = uint8(value >> 8) + out.Pix[offset+3] = uint8(value) + value = floatToUint16(rgba[2] / sum) + out.Pix[offset+4] = uint8(value >> 8) + out.Pix[offset+5] = uint8(value) + value = floatToUint16(rgba[3] / sum) + out.Pix[offset+6] = uint8(value >> 8) + out.Pix[offset+7] = uint8(value) + } + } +} + +func nearestRGBA(in *image.RGBA, out *image.RGBA, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 4 + case xi >= maxX: + xi = 4 * maxX + default: + xi = 0 + } + rgba[0] += float32(row[xi+0]) + rgba[1] += float32(row[xi+1]) + rgba[2] += float32(row[xi+2]) + rgba[3] += float32(row[xi+3]) + sum++ + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4 + out.Pix[xo+0] = floatToUint8(rgba[0] / sum) + out.Pix[xo+1] = floatToUint8(rgba[1] / sum) + out.Pix[xo+2] = floatToUint8(rgba[2] / sum) + out.Pix[xo+3] = floatToUint8(rgba[3] / sum) + } + } +} + +func nearestNRGBA(in *image.NRGBA, out *image.NRGBA, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 4 + case xi >= maxX: + xi = 4 * maxX + default: + xi = 0 + } + rgba[0] += float32(row[xi+0]) + rgba[1] += float32(row[xi+1]) + rgba[2] += float32(row[xi+2]) + rgba[3] += float32(row[xi+3]) + sum++ + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4 + out.Pix[xo+0] = floatToUint8(rgba[0] / sum) + out.Pix[xo+1] = floatToUint8(rgba[1] / sum) + out.Pix[xo+2] = floatToUint8(rgba[2] / sum) + out.Pix[xo+3] = floatToUint8(rgba[3] / sum) + } + } +} + +func nearestRGBA64(in *image.RGBA64, out *image.RGBA64, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 8 + case xi >= maxX: + xi = 8 * maxX + default: + xi = 0 + } + rgba[0] += float32(uint16(row[xi+0])<<8 | uint16(row[xi+1])) + rgba[1] += float32(uint16(row[xi+2])<<8 | uint16(row[xi+3])) + rgba[2] += float32(uint16(row[xi+4])<<8 | uint16(row[xi+5])) + rgba[3] += float32(uint16(row[xi+6])<<8 | uint16(row[xi+7])) + sum++ + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8 + value := floatToUint16(rgba[0] / sum) + out.Pix[xo+0] = uint8(value >> 8) + out.Pix[xo+1] = uint8(value) + value = floatToUint16(rgba[1] / sum) + out.Pix[xo+2] = uint8(value >> 8) + out.Pix[xo+3] = uint8(value) + value = floatToUint16(rgba[2] / sum) + out.Pix[xo+4] = uint8(value >> 8) + out.Pix[xo+5] = uint8(value) + value = floatToUint16(rgba[3] / sum) + out.Pix[xo+6] = uint8(value >> 8) + out.Pix[xo+7] = uint8(value) + } + } +} + +func nearestNRGBA64(in *image.NRGBA64, out *image.NRGBA64, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var rgba [4]float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 8 + case xi >= maxX: + xi = 8 * maxX + default: + xi = 0 + } + rgba[0] += float32(uint16(row[xi+0])<<8 | uint16(row[xi+1])) + rgba[1] += float32(uint16(row[xi+2])<<8 | uint16(row[xi+3])) + rgba[2] += float32(uint16(row[xi+4])<<8 | uint16(row[xi+5])) + rgba[3] += float32(uint16(row[xi+6])<<8 | uint16(row[xi+7])) + sum++ + } + } + + xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8 + value := floatToUint16(rgba[0] / sum) + out.Pix[xo+0] = uint8(value >> 8) + out.Pix[xo+1] = uint8(value) + value = floatToUint16(rgba[1] / sum) + out.Pix[xo+2] = uint8(value >> 8) + out.Pix[xo+3] = uint8(value) + value = floatToUint16(rgba[2] / sum) + out.Pix[xo+4] = uint8(value >> 8) + out.Pix[xo+5] = uint8(value) + value = floatToUint16(rgba[3] / sum) + out.Pix[xo+6] = uint8(value >> 8) + out.Pix[xo+7] = uint8(value) + } + } +} + +func nearestGray(in *image.Gray, out *image.Gray, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var gray float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case xi < 0: + xi = 0 + case xi >= maxX: + xi = maxX + } + gray += float32(row[xi]) + sum++ + } + } + + offset := (y-newBounds.Min.Y)*out.Stride + (x - newBounds.Min.X) + out.Pix[offset] = floatToUint8(gray / sum) + } + } +} + +func nearestGray16(in *image.Gray16, out *image.Gray16, scale float64, coeffs []bool, offset []int, filterLength int) { + newBounds := out.Bounds() + maxX := in.Bounds().Dx() - 1 + + for x := newBounds.Min.X; x < newBounds.Max.X; x++ { + row := in.Pix[x*in.Stride:] + for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ { + var gray float32 + var sum float32 + start := offset[y] + ci := y * filterLength + for i := 0; i < filterLength; i++ { + if coeffs[ci+i] { + xi := start + i + switch { + case uint(xi) < uint(maxX): + xi *= 2 + case xi >= maxX: + xi = 2 * maxX + default: + xi = 0 + } + gray += float32(uint16(row[xi+0])<<8 | uint16(row[xi+1])) + sum++ + } + } + + offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*2 + value := floatToUint16(gray / sum) + out.Pix[offset+0] = uint8(value >> 8) + out.Pix[offset+1] = uint8(value) + } + } +} diff --git a/vendor/github.com/nfnt/resize/resize.go b/vendor/github.com/nfnt/resize/resize.go new file mode 100644 index 000000000..0d7fbf69a --- /dev/null +++ b/vendor/github.com/nfnt/resize/resize.go @@ -0,0 +1,620 @@ +/* +Copyright (c) 2012, Jan Schlicht + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. +*/ + +// Package resize implements various image resizing methods. +// +// The package works with the Image interface described in the image package. +// Various interpolation methods are provided and multiple processors may be +// utilized in the computations. +// +// Example: +// imgResized := resize.Resize(1000, 0, imgOld, resize.MitchellNetravali) +package resize + +import ( + "image" + "runtime" + "sync" +) + +// An InterpolationFunction provides the parameters that describe an +// interpolation kernel. It returns the number of samples to take +// and the kernel function to use for sampling. +type InterpolationFunction int + +// InterpolationFunction constants +const ( + // Nearest-neighbor interpolation + NearestNeighbor InterpolationFunction = iota + // Bilinear interpolation + Bilinear + // Bicubic interpolation (with cubic hermite spline) + Bicubic + // Mitchell-Netravali interpolation + MitchellNetravali + // Lanczos interpolation (a=2) + Lanczos2 + // Lanczos interpolation (a=3) + Lanczos3 +) + +// kernal, returns an InterpolationFunctions taps and kernel. +func (i InterpolationFunction) kernel() (int, func(float64) float64) { + switch i { + case Bilinear: + return 2, linear + case Bicubic: + return 4, cubic + case MitchellNetravali: + return 4, mitchellnetravali + case Lanczos2: + return 4, lanczos2 + case Lanczos3: + return 6, lanczos3 + default: + // Default to NearestNeighbor. + return 2, nearest + } +} + +// values <1 will sharpen the image +var blur = 1.0 + +// Resize scales an image to new width and height using the interpolation function interp. +// A new image with the given dimensions will be returned. +// If one of the parameters width or height is set to 0, its size will be calculated so that +// the aspect ratio is that of the originating image. +// The resizing algorithm uses channels for parallel computation. +// If the input image has width or height of 0, it is returned unchanged. +func Resize(width, height uint, img image.Image, interp InterpolationFunction) image.Image { + scaleX, scaleY := calcFactors(width, height, float64(img.Bounds().Dx()), float64(img.Bounds().Dy())) + if width == 0 { + width = uint(0.7 + float64(img.Bounds().Dx())/scaleX) + } + if height == 0 { + height = uint(0.7 + float64(img.Bounds().Dy())/scaleY) + } + + // Trivial case: return input image + if int(width) == img.Bounds().Dx() && int(height) == img.Bounds().Dy() { + return img + } + + // Input image has no pixels + if img.Bounds().Dx() <= 0 || img.Bounds().Dy() <= 0 { + return img + } + + if interp == NearestNeighbor { + return resizeNearest(width, height, scaleX, scaleY, img, interp) + } + + taps, kernel := interp.kernel() + cpus := runtime.GOMAXPROCS(0) + wg := sync.WaitGroup{} + + // Generic access to image.Image is slow in tight loops. + // The optimal access has to be determined from the concrete image type. + switch input := img.(type) { + case *image.RGBA: + // 8-bit precision + temp := image.NewRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewRGBA(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA) + go func() { + defer wg.Done() + resizeRGBA(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA) + go func() { + defer wg.Done() + resizeRGBA(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.NRGBA: + // 8-bit precision + temp := image.NewRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewRGBA(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA) + go func() { + defer wg.Done() + resizeNRGBA(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA) + go func() { + defer wg.Done() + resizeRGBA(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + + case *image.YCbCr: + // 8-bit precision + // accessing the YCbCr arrays in a tight loop is slow. + // converting the image to ycc increases performance by 2x. + temp := newYCC(image.Rect(0, 0, input.Bounds().Dy(), int(width)), input.SubsampleRatio) + result := newYCC(image.Rect(0, 0, int(width), int(height)), image.YCbCrSubsampleRatio444) + + coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + in := imageYCbCrToYCC(input) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*ycc) + go func() { + defer wg.Done() + resizeYCbCr(in, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*ycc) + go func() { + defer wg.Done() + resizeYCbCr(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result.YCbCr() + case *image.RGBA64: + // 16-bit precision + temp := image.NewRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + resizeRGBA64(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + resizeRGBA64(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.NRGBA64: + // 16-bit precision + temp := image.NewRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + resizeNRGBA64(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + resizeRGBA64(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.Gray: + // 8-bit precision + temp := image.NewGray(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewGray(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.Gray) + go func() { + defer wg.Done() + resizeGray(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.Gray) + go func() { + defer wg.Done() + resizeGray(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.Gray16: + // 16-bit precision + temp := image.NewGray16(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewGray16(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.Gray16) + go func() { + defer wg.Done() + resizeGray16(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.Gray16) + go func() { + defer wg.Done() + resizeGray16(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + default: + // 16-bit precision + temp := image.NewRGBA64(image.Rect(0, 0, img.Bounds().Dy(), int(width))) + result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + resizeGeneric(img, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + resizeRGBA64(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + } +} + +func resizeNearest(width, height uint, scaleX, scaleY float64, img image.Image, interp InterpolationFunction) image.Image { + taps, _ := interp.kernel() + cpus := runtime.GOMAXPROCS(0) + wg := sync.WaitGroup{} + + switch input := img.(type) { + case *image.RGBA: + // 8-bit precision + temp := image.NewRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewRGBA(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA) + go func() { + defer wg.Done() + nearestRGBA(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA) + go func() { + defer wg.Done() + nearestRGBA(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.NRGBA: + // 8-bit precision + temp := image.NewNRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewNRGBA(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.NRGBA) + go func() { + defer wg.Done() + nearestNRGBA(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.NRGBA) + go func() { + defer wg.Done() + nearestNRGBA(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.YCbCr: + // 8-bit precision + // accessing the YCbCr arrays in a tight loop is slow. + // converting the image to ycc increases performance by 2x. + temp := newYCC(image.Rect(0, 0, input.Bounds().Dy(), int(width)), input.SubsampleRatio) + result := newYCC(image.Rect(0, 0, int(width), int(height)), image.YCbCrSubsampleRatio444) + + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + in := imageYCbCrToYCC(input) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*ycc) + go func() { + defer wg.Done() + nearestYCbCr(in, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*ycc) + go func() { + defer wg.Done() + nearestYCbCr(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result.YCbCr() + case *image.RGBA64: + // 16-bit precision + temp := image.NewRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + nearestRGBA64(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + nearestRGBA64(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.NRGBA64: + // 16-bit precision + temp := image.NewNRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewNRGBA64(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.NRGBA64) + go func() { + defer wg.Done() + nearestNRGBA64(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.NRGBA64) + go func() { + defer wg.Done() + nearestNRGBA64(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.Gray: + // 8-bit precision + temp := image.NewGray(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewGray(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.Gray) + go func() { + defer wg.Done() + nearestGray(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.Gray) + go func() { + defer wg.Done() + nearestGray(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + case *image.Gray16: + // 16-bit precision + temp := image.NewGray16(image.Rect(0, 0, input.Bounds().Dy(), int(width))) + result := image.NewGray16(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.Gray16) + go func() { + defer wg.Done() + nearestGray16(input, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.Gray16) + go func() { + defer wg.Done() + nearestGray16(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + default: + // 16-bit precision + temp := image.NewRGBA64(image.Rect(0, 0, img.Bounds().Dy(), int(width))) + result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height))) + + // horizontal filter, results in transposed temporary image + coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(temp, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + nearestGeneric(img, slice, scaleX, coeffs, offset, filterLength) + }() + } + wg.Wait() + + // horizontal filter on transposed image, result is not transposed + coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY) + wg.Add(cpus) + for i := 0; i < cpus; i++ { + slice := makeSlice(result, i, cpus).(*image.RGBA64) + go func() { + defer wg.Done() + nearestRGBA64(temp, slice, scaleY, coeffs, offset, filterLength) + }() + } + wg.Wait() + return result + } + +} + +// Calculates scaling factors using old and new image dimensions. +func calcFactors(width, height uint, oldWidth, oldHeight float64) (scaleX, scaleY float64) { + if width == 0 { + if height == 0 { + scaleX = 1.0 + scaleY = 1.0 + } else { + scaleY = oldHeight / float64(height) + scaleX = scaleY + } + } else { + scaleX = oldWidth / float64(width) + if height == 0 { + scaleY = scaleX + } else { + scaleY = oldHeight / float64(height) + } + } + return +} + +type imageWithSubImage interface { + image.Image + SubImage(image.Rectangle) image.Image +} + +func makeSlice(img imageWithSubImage, i, n int) image.Image { + return img.SubImage(image.Rect(img.Bounds().Min.X, img.Bounds().Min.Y+i*img.Bounds().Dy()/n, img.Bounds().Max.X, img.Bounds().Min.Y+(i+1)*img.Bounds().Dy()/n)) +} diff --git a/vendor/github.com/nfnt/resize/thumbnail.go b/vendor/github.com/nfnt/resize/thumbnail.go new file mode 100644 index 000000000..9efc246be --- /dev/null +++ b/vendor/github.com/nfnt/resize/thumbnail.go @@ -0,0 +1,55 @@ +/* +Copyright (c) 2012, Jan Schlicht + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. +*/ + +package resize + +import ( + "image" +) + +// Thumbnail will downscale provided image to max width and height preserving +// original aspect ratio and using the interpolation function interp. +// It will return original image, without processing it, if original sizes +// are already smaller than provided constraints. +func Thumbnail(maxWidth, maxHeight uint, img image.Image, interp InterpolationFunction) image.Image { + origBounds := img.Bounds() + origWidth := uint(origBounds.Dx()) + origHeight := uint(origBounds.Dy()) + newWidth, newHeight := origWidth, origHeight + + // Return original image if it have same or smaller size as constraints + if maxWidth >= origWidth && maxHeight >= origHeight { + return img + } + + // Preserve aspect ratio + if origWidth > maxWidth { + newHeight = uint(origHeight * maxWidth / origWidth) + if newHeight < 1 { + newHeight = 1 + } + newWidth = maxWidth + } + + if newHeight > maxHeight { + newWidth = uint(newWidth * maxHeight / newHeight) + if newWidth < 1 { + newWidth = 1 + } + newHeight = maxHeight + } + return Resize(newWidth, newHeight, img, interp) +} diff --git a/vendor/github.com/nfnt/resize/ycc.go b/vendor/github.com/nfnt/resize/ycc.go new file mode 100644 index 000000000..143e4d06a --- /dev/null +++ b/vendor/github.com/nfnt/resize/ycc.go @@ -0,0 +1,387 @@ +/* +Copyright (c) 2014, Charlie Vieth + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. +*/ + +package resize + +import ( + "image" + "image/color" +) + +// ycc is an in memory YCbCr image. The Y, Cb and Cr samples are held in a +// single slice to increase resizing performance. +type ycc struct { + // Pix holds the image's pixels, in Y, Cb, Cr order. The pixel at + // (x, y) starts at Pix[(y-Rect.Min.Y)*Stride + (x-Rect.Min.X)*3]. + Pix []uint8 + // Stride is the Pix stride (in bytes) between vertically adjacent pixels. + Stride int + // Rect is the image's bounds. + Rect image.Rectangle + // SubsampleRatio is the subsample ratio of the original YCbCr image. + SubsampleRatio image.YCbCrSubsampleRatio +} + +// PixOffset returns the index of the first element of Pix that corresponds to +// the pixel at (x, y). +func (p *ycc) PixOffset(x, y int) int { + return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*3 +} + +func (p *ycc) Bounds() image.Rectangle { + return p.Rect +} + +func (p *ycc) ColorModel() color.Model { + return color.YCbCrModel +} + +func (p *ycc) At(x, y int) color.Color { + if !(image.Point{x, y}.In(p.Rect)) { + return color.YCbCr{} + } + i := p.PixOffset(x, y) + return color.YCbCr{ + p.Pix[i+0], + p.Pix[i+1], + p.Pix[i+2], + } +} + +func (p *ycc) Opaque() bool { + return true +} + +// SubImage returns an image representing the portion of the image p visible +// through r. The returned value shares pixels with the original image. +func (p *ycc) SubImage(r image.Rectangle) image.Image { + r = r.Intersect(p.Rect) + if r.Empty() { + return &ycc{SubsampleRatio: p.SubsampleRatio} + } + i := p.PixOffset(r.Min.X, r.Min.Y) + return &ycc{ + Pix: p.Pix[i:], + Stride: p.Stride, + Rect: r, + SubsampleRatio: p.SubsampleRatio, + } +} + +// newYCC returns a new ycc with the given bounds and subsample ratio. +func newYCC(r image.Rectangle, s image.YCbCrSubsampleRatio) *ycc { + w, h := r.Dx(), r.Dy() + buf := make([]uint8, 3*w*h) + return &ycc{Pix: buf, Stride: 3 * w, Rect: r, SubsampleRatio: s} +} + +// Copy of image.YCbCrSubsampleRatio constants - this allows us to support +// older versions of Go where these constants are not defined (i.e. Go 1.4) +const ( + ycbcrSubsampleRatio444 image.YCbCrSubsampleRatio = iota + ycbcrSubsampleRatio422 + ycbcrSubsampleRatio420 + ycbcrSubsampleRatio440 + ycbcrSubsampleRatio411 + ycbcrSubsampleRatio410 +) + +// YCbCr converts ycc to a YCbCr image with the same subsample ratio +// as the YCbCr image that ycc was generated from. +func (p *ycc) YCbCr() *image.YCbCr { + ycbcr := image.NewYCbCr(p.Rect, p.SubsampleRatio) + switch ycbcr.SubsampleRatio { + case ycbcrSubsampleRatio422: + return p.ycbcr422(ycbcr) + case ycbcrSubsampleRatio420: + return p.ycbcr420(ycbcr) + case ycbcrSubsampleRatio440: + return p.ycbcr440(ycbcr) + case ycbcrSubsampleRatio444: + return p.ycbcr444(ycbcr) + case ycbcrSubsampleRatio411: + return p.ycbcr411(ycbcr) + case ycbcrSubsampleRatio410: + return p.ycbcr410(ycbcr) + } + return ycbcr +} + +// imageYCbCrToYCC converts a YCbCr image to a ycc image for resizing. +func imageYCbCrToYCC(in *image.YCbCr) *ycc { + w, h := in.Rect.Dx(), in.Rect.Dy() + p := ycc{ + Pix: make([]uint8, 3*w*h), + Stride: 3 * w, + Rect: image.Rect(0, 0, w, h), + SubsampleRatio: in.SubsampleRatio, + } + switch in.SubsampleRatio { + case ycbcrSubsampleRatio422: + return convertToYCC422(in, &p) + case ycbcrSubsampleRatio420: + return convertToYCC420(in, &p) + case ycbcrSubsampleRatio440: + return convertToYCC440(in, &p) + case ycbcrSubsampleRatio444: + return convertToYCC444(in, &p) + case ycbcrSubsampleRatio411: + return convertToYCC411(in, &p) + case ycbcrSubsampleRatio410: + return convertToYCC410(in, &p) + } + return &p +} + +func (p *ycc) ycbcr422(ycbcr *image.YCbCr) *image.YCbCr { + var off int + Pix := p.Pix + Y := ycbcr.Y + Cb := ycbcr.Cb + Cr := ycbcr.Cr + for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ { + yy := y * ycbcr.YStride + cy := y * ycbcr.CStride + for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ { + ci := cy + x/2 + Y[yy+x] = Pix[off+0] + Cb[ci] = Pix[off+1] + Cr[ci] = Pix[off+2] + off += 3 + } + } + return ycbcr +} + +func (p *ycc) ycbcr420(ycbcr *image.YCbCr) *image.YCbCr { + var off int + Pix := p.Pix + Y := ycbcr.Y + Cb := ycbcr.Cb + Cr := ycbcr.Cr + for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ { + yy := y * ycbcr.YStride + cy := (y / 2) * ycbcr.CStride + for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ { + ci := cy + x/2 + Y[yy+x] = Pix[off+0] + Cb[ci] = Pix[off+1] + Cr[ci] = Pix[off+2] + off += 3 + } + } + return ycbcr +} + +func (p *ycc) ycbcr440(ycbcr *image.YCbCr) *image.YCbCr { + var off int + Pix := p.Pix + Y := ycbcr.Y + Cb := ycbcr.Cb + Cr := ycbcr.Cr + for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ { + yy := y * ycbcr.YStride + cy := (y / 2) * ycbcr.CStride + for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ { + ci := cy + x + Y[yy+x] = Pix[off+0] + Cb[ci] = Pix[off+1] + Cr[ci] = Pix[off+2] + off += 3 + } + } + return ycbcr +} + +func (p *ycc) ycbcr444(ycbcr *image.YCbCr) *image.YCbCr { + var off int + Pix := p.Pix + Y := ycbcr.Y + Cb := ycbcr.Cb + Cr := ycbcr.Cr + for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ { + yy := y * ycbcr.YStride + cy := y * ycbcr.CStride + for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ { + ci := cy + x + Y[yy+x] = Pix[off+0] + Cb[ci] = Pix[off+1] + Cr[ci] = Pix[off+2] + off += 3 + } + } + return ycbcr +} + +func (p *ycc) ycbcr411(ycbcr *image.YCbCr) *image.YCbCr { + var off int + Pix := p.Pix + Y := ycbcr.Y + Cb := ycbcr.Cb + Cr := ycbcr.Cr + for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ { + yy := y * ycbcr.YStride + cy := y * ycbcr.CStride + for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ { + ci := cy + x/4 + Y[yy+x] = Pix[off+0] + Cb[ci] = Pix[off+1] + Cr[ci] = Pix[off+2] + off += 3 + } + } + return ycbcr +} + +func (p *ycc) ycbcr410(ycbcr *image.YCbCr) *image.YCbCr { + var off int + Pix := p.Pix + Y := ycbcr.Y + Cb := ycbcr.Cb + Cr := ycbcr.Cr + for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ { + yy := y * ycbcr.YStride + cy := (y / 2) * ycbcr.CStride + for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ { + ci := cy + x/4 + Y[yy+x] = Pix[off+0] + Cb[ci] = Pix[off+1] + Cr[ci] = Pix[off+2] + off += 3 + } + } + return ycbcr +} + +func convertToYCC422(in *image.YCbCr, p *ycc) *ycc { + var off int + Pix := p.Pix + Y := in.Y + Cb := in.Cb + Cr := in.Cr + for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ { + yy := y * in.YStride + cy := y * in.CStride + for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ { + ci := cy + x/2 + Pix[off+0] = Y[yy+x] + Pix[off+1] = Cb[ci] + Pix[off+2] = Cr[ci] + off += 3 + } + } + return p +} + +func convertToYCC420(in *image.YCbCr, p *ycc) *ycc { + var off int + Pix := p.Pix + Y := in.Y + Cb := in.Cb + Cr := in.Cr + for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ { + yy := y * in.YStride + cy := (y / 2) * in.CStride + for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ { + ci := cy + x/2 + Pix[off+0] = Y[yy+x] + Pix[off+1] = Cb[ci] + Pix[off+2] = Cr[ci] + off += 3 + } + } + return p +} + +func convertToYCC440(in *image.YCbCr, p *ycc) *ycc { + var off int + Pix := p.Pix + Y := in.Y + Cb := in.Cb + Cr := in.Cr + for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ { + yy := y * in.YStride + cy := (y / 2) * in.CStride + for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ { + ci := cy + x + Pix[off+0] = Y[yy+x] + Pix[off+1] = Cb[ci] + Pix[off+2] = Cr[ci] + off += 3 + } + } + return p +} + +func convertToYCC444(in *image.YCbCr, p *ycc) *ycc { + var off int + Pix := p.Pix + Y := in.Y + Cb := in.Cb + Cr := in.Cr + for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ { + yy := y * in.YStride + cy := y * in.CStride + for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ { + ci := cy + x + Pix[off+0] = Y[yy+x] + Pix[off+1] = Cb[ci] + Pix[off+2] = Cr[ci] + off += 3 + } + } + return p +} + +func convertToYCC411(in *image.YCbCr, p *ycc) *ycc { + var off int + Pix := p.Pix + Y := in.Y + Cb := in.Cb + Cr := in.Cr + for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ { + yy := y * in.YStride + cy := y * in.CStride + for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ { + ci := cy + x/4 + Pix[off+0] = Y[yy+x] + Pix[off+1] = Cb[ci] + Pix[off+2] = Cr[ci] + off += 3 + } + } + return p +} + +func convertToYCC410(in *image.YCbCr, p *ycc) *ycc { + var off int + Pix := p.Pix + Y := in.Y + Cb := in.Cb + Cr := in.Cr + for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ { + yy := y * in.YStride + cy := (y / 2) * in.CStride + for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ { + ci := cy + x/4 + Pix[off+0] = Y[yy+x] + Pix[off+1] = Cb[ci] + Pix[off+2] = Cr[ci] + off += 3 + } + } + return p +} diff --git a/vendor/modules.txt b/vendor/modules.txt index b9ea5e2e0..27c01e28e 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -90,6 +90,10 @@ github.com/chromedp/cdproto/webauthn github.com/chromedp/chromedp github.com/chromedp/chromedp/device github.com/chromedp/chromedp/kb +# github.com/corona10/goimagehash v1.0.3 +github.com/corona10/goimagehash +github.com/corona10/goimagehash/etcs +github.com/corona10/goimagehash/transforms # github.com/cpuguy83/go-md2man/v2 v2.0.0 github.com/cpuguy83/go-md2man/v2/md2man # github.com/davecgh/go-spew v1.1.1 @@ -228,6 +232,8 @@ github.com/modern-go/concurrent github.com/modern-go/reflect2 # github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 github.com/natefinch/pie +# github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 +github.com/nfnt/resize # github.com/pelletier/go-toml v1.2.0 github.com/pelletier/go-toml # github.com/pkg/errors v0.9.1