Add phash generation and dupe checking (#1158)

This commit is contained in:
InfiniteTF 2021-04-12 01:04:40 +02:00 committed by GitHub
parent a2582047ca
commit c38660d209
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
70 changed files with 4342 additions and 214 deletions

1
go.mod
View file

@ -6,6 +6,7 @@ require (
github.com/antchfx/htmlquery v1.2.3 github.com/antchfx/htmlquery v1.2.3
github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c github.com/chromedp/cdproto v0.0.0-20200608134039-8a80cdaf865c
github.com/chromedp/chromedp v0.5.3 github.com/chromedp/chromedp v0.5.3
github.com/corona10/goimagehash v1.0.3
github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/disintegration/imaging v1.6.0 github.com/disintegration/imaging v1.6.0
github.com/fvbommel/sortorder v1.0.2 github.com/fvbommel/sortorder v1.0.2

4
go.sum
View file

@ -83,6 +83,8 @@ github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee
github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/corona10/goimagehash v1.0.3 h1:NZM518aKLmoNluluhfHGxT3LGOnrojrxhGn63DR/CZA=
github.com/corona10/goimagehash v1.0.3/go.mod h1:VkvE0mLn84L4aF8vCb6mafVajEb6QYMHl2ZJLn0mOGI=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
@ -573,6 +575,8 @@ github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 h1:Ohgj9L0EYOgXxkDp+
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ= github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007/go.mod h1:wKCOWMb6iNlvKiOToY2cNuaovSXvIiv1zDi9QDR7aGQ=
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo=
github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q= github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=

View file

@ -10,6 +10,7 @@ fragment SlimSceneData on Scene {
o_counter o_counter
organized organized
path path
phash
file { file {
size size
@ -29,6 +30,7 @@ fragment SlimSceneData on Scene {
webp webp
vtt vtt
chapters_vtt chapters_vtt
sprite
} }
scene_markers { scene_markers {

View file

@ -10,6 +10,7 @@ fragment SceneData on Scene {
o_counter o_counter
organized organized
path path
phash
file { file {
size size

View file

@ -16,6 +16,12 @@ query FindScenesByPathRegex($filter: FindFilterType) {
} }
} }
query FindDuplicateScenes($distance: Int) {
findDuplicateScenes(distance: $distance) {
...SlimSceneData
}
}
query FindScene($id: ID!, $checksum: String) { query FindScene($id: ID!, $checksum: String) {
findScene(id: $id, checksum: $checksum) { findScene(id: $id, checksum: $checksum) {
...SceneData ...SceneData

View file

@ -9,6 +9,9 @@ type Query {
findScenesByPathRegex(filter: FindFilterType): FindScenesResultType! findScenesByPathRegex(filter: FindFilterType): FindScenesResultType!
""" Returns any groups of scenes that are perceptual duplicates within the queried distance """
findDuplicateScenes(distance: Int): [[Scene!]!]!
"""Return valid stream paths""" """Return valid stream paths"""
sceneStreams(id: ID): [SceneStreamEndpoint!]! sceneStreams(id: ID): [SceneStreamEndpoint!]!

View file

@ -7,6 +7,7 @@ input GenerateMetadataInput {
previewOptions: GeneratePreviewOptionsInput previewOptions: GeneratePreviewOptionsInput
markers: Boolean! markers: Boolean!
transcodes: Boolean! transcodes: Boolean!
phashes: Boolean!
"""scene ids to generate for""" """scene ids to generate for"""
sceneIDs: [ID!] sceneIDs: [ID!]
@ -42,6 +43,8 @@ input ScanMetadataInput {
scanGenerateImagePreviews: Boolean scanGenerateImagePreviews: Boolean
"""Generate sprites during scan""" """Generate sprites during scan"""
scanGenerateSprites: Boolean scanGenerateSprites: Boolean
"""Generate phashes during scan"""
scanGeneratePhashes: Boolean
} }
input CleanMetadataInput { input CleanMetadataInput {

View file

@ -16,6 +16,7 @@ type ScenePathsType {
webp: String # Resolver webp: String # Resolver
vtt: String # Resolver vtt: String # Resolver
chapters_vtt: String # Resolver chapters_vtt: String # Resolver
sprite: String # Resolver
} }
type SceneMovie { type SceneMovie {
@ -35,6 +36,7 @@ type Scene {
organized: Boolean! organized: Boolean!
o_counter: Int o_counter: Int
path: String! path: String!
phash: String
file: SceneFileType! # Resolver file: SceneFileType! # Resolver
paths: ScenePathsType! # Resolver paths: ScenePathsType! # Resolver

View file

@ -83,6 +83,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
streamPath := builder.GetStreamURL() streamPath := builder.GetStreamURL()
webpPath := builder.GetStreamPreviewImageURL() webpPath := builder.GetStreamPreviewImageURL()
vttPath := builder.GetSpriteVTTURL() vttPath := builder.GetSpriteVTTURL()
spritePath := builder.GetSpriteURL()
chaptersVttPath := builder.GetChaptersVTTURL() chaptersVttPath := builder.GetChaptersVTTURL()
return &models.ScenePathsType{ return &models.ScenePathsType{
Screenshot: &screenshotPath, Screenshot: &screenshotPath,
@ -91,6 +92,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
Webp: &webpPath, Webp: &webpPath,
Vtt: &vttPath, Vtt: &vttPath,
ChaptersVtt: &chaptersVttPath, ChaptersVtt: &chaptersVttPath,
Sprite: &spritePath,
}, nil }, nil
} }
@ -200,3 +202,11 @@ func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []
return ret, nil return ret, nil
} }
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Phash.Valid {
hexval := utils.PhashToString(obj.Phash.Int64)
return &hexval, nil
}
return nil, nil
}

View file

@ -151,3 +151,18 @@ func (r *queryResolver) ParseSceneFilenames(ctx context.Context, filter *models.
return ret, nil return ret, nil
} }
func (r *queryResolver) FindDuplicateScenes(ctx context.Context, distance *int) (ret [][]*models.Scene, err error) {
dist := 0
if distance != nil {
dist = *distance
}
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Scene().FindDuplicates(dist)
return err
}); err != nil {
return nil, err
}
return ret, nil
}

View file

@ -33,6 +33,10 @@ func (b SceneURLBuilder) GetSpriteVTTURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt" return b.BaseURL + "/scene/" + b.SceneID + "_thumbs.vtt"
} }
func (b SceneURLBuilder) GetSpriteURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "_sprite.jpg"
}
func (b SceneURLBuilder) GetScreenshotURL(updateTime time.Time) string { func (b SceneURLBuilder) GetScreenshotURL(updateTime time.Time) string {
return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?" + strconv.FormatInt(updateTime.Unix(), 10) return b.BaseURL + "/scene/" + b.SceneID + "/screenshot?" + strconv.FormatInt(updateTime.Unix(), 10)
} }

View file

@ -23,7 +23,7 @@ import (
var DB *sqlx.DB var DB *sqlx.DB
var WriteMu *sync.Mutex var WriteMu *sync.Mutex
var dbPath string var dbPath string
var appSchemaVersion uint = 19 var appSchemaVersion uint = 20
var databaseSchemaVersion uint var databaseSchemaVersion uint
const sqlite3Driver = "sqlite3ex" const sqlite3Driver = "sqlite3ex"

View file

@ -0,0 +1 @@
ALTER TABLE `scenes` ADD COLUMN `phash` blob;

View file

@ -0,0 +1,121 @@
package manager
import (
"fmt"
"image"
"image/color"
"math"
"os"
"sort"
"github.com/corona10/goimagehash"
"github.com/disintegration/imaging"
"github.com/fvbommel/sortorder"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/utils"
)
type PhashGenerator struct {
Info *GeneratorInfo
VideoChecksum string
Columns int
Rows int
}
func NewPhashGenerator(videoFile ffmpeg.VideoFile, checksum string) (*PhashGenerator, error) {
exists, err := utils.FileExists(videoFile.Path)
if !exists {
return nil, err
}
generator, err := newGeneratorInfo(videoFile)
if err != nil {
return nil, err
}
return &PhashGenerator{
Info: generator,
VideoChecksum: checksum,
Columns: 5,
Rows: 5,
}, nil
}
func (g *PhashGenerator) Generate() (*uint64, error) {
encoder := ffmpeg.NewEncoder(instance.FFMPEGPath)
sprite, err := g.generateSprite(&encoder)
if err != nil {
return nil, err
}
hash, err := goimagehash.PerceptionHash(sprite)
if err != nil {
return nil, err
}
hashValue := hash.GetHash()
return &hashValue, nil
}
func (g *PhashGenerator) generateSprite(encoder *ffmpeg.Encoder) (image.Image, error) {
logger.Infof("[generator] generating phash sprite for %s", g.Info.VideoFile.Path)
// Generate sprite image offset by 5% on each end to avoid intro/outros
chunkCount := g.Columns * g.Rows
offset := 0.05 * g.Info.VideoFile.Duration
stepSize := (0.9 * g.Info.VideoFile.Duration) / float64(chunkCount)
for i := 0; i < chunkCount; i++ {
time := offset + (float64(i) * stepSize)
num := fmt.Sprintf("%.3d", i)
filename := "phash_" + g.VideoChecksum + "_" + num + ".bmp"
options := ffmpeg.ScreenshotOptions{
OutputPath: instance.Paths.Generated.GetTmpPath(filename),
Time: time,
Width: 160,
}
if err := encoder.Screenshot(g.Info.VideoFile, options); err != nil {
return nil, err
}
}
// Combine all of the thumbnails into a sprite image
pattern := fmt.Sprintf("phash_%s_.+\\.bmp$", g.VideoChecksum)
imagePaths, err := utils.MatchEntries(instance.Paths.Generated.Tmp, pattern)
if err != nil {
return nil, err
}
sort.Sort(sortorder.Natural(imagePaths))
var images []image.Image
for _, imagePath := range imagePaths {
img, err := imaging.Open(imagePath)
if err != nil {
return nil, err
}
images = append(images, img)
}
if len(images) == 0 {
return nil, fmt.Errorf("images slice is empty, failed to generate phash sprite for %s", g.Info.VideoFile.Path)
}
width := images[0].Bounds().Size().X
height := images[0].Bounds().Size().Y
canvasWidth := width * g.Columns
canvasHeight := height * g.Rows
montage := imaging.New(canvasWidth, canvasHeight, color.NRGBA{})
for index := 0; index < len(images); index++ {
x := width * (index % g.Columns)
y := height * int(math.Floor(float64(index)/float64(g.Rows)))
img := images[index]
montage = imaging.Paste(montage, img, image.Pt(x, y))
}
for _, imagePath := range imagePaths {
os.Remove(imagePath)
}
return montage, nil
}

View file

@ -39,6 +39,7 @@ type Scene struct {
Title string `json:"title,omitempty"` Title string `json:"title,omitempty"`
Checksum string `json:"checksum,omitempty"` Checksum string `json:"checksum,omitempty"`
OSHash string `json:"oshash,omitempty"` OSHash string `json:"oshash,omitempty"`
Phash string `json:"phash,omitempty"`
Studio string `json:"studio,omitempty"` Studio string `json:"studio,omitempty"`
URL string `json:"url,omitempty"` URL string `json:"url,omitempty"`
Date string `json:"date,omitempty"` Date string `json:"date,omitempty"`

View file

@ -222,6 +222,7 @@ func (s *singleton) Scan(input models.ScanMetadataInput) {
GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews), GeneratePreview: utils.IsTrue(input.ScanGeneratePreviews),
GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews), GenerateImagePreview: utils.IsTrue(input.ScanGenerateImagePreviews),
GenerateSprite: utils.IsTrue(input.ScanGenerateSprites), GenerateSprite: utils.IsTrue(input.ScanGenerateSprites),
GeneratePhash: utils.IsTrue(input.ScanGeneratePhashes),
} }
go task.Start(&wg) go task.Start(&wg)
@ -427,7 +428,7 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
logger.Infof("Taking too long to count content. Skipping...") logger.Infof("Taking too long to count content. Skipping...")
logger.Infof("Generating content") logger.Infof("Generating content")
} else { } else {
logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes) logger.Infof("Generating %d sprites %d previews %d image previews %d markers %d transcodes %d phashes", totalsNeeded.sprites, totalsNeeded.previews, totalsNeeded.imagePreviews, totalsNeeded.markers, totalsNeeded.transcodes, totalsNeeded.phashes)
} }
fileNamingAlgo := config.GetVideoFileNamingAlgorithm() fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
@ -501,6 +502,16 @@ func (s *singleton) Generate(input models.GenerateMetadataInput) {
} }
go task.Start(&wg) go task.Start(&wg)
} }
if input.Phashes {
task := GeneratePhashTask{
Scene: *scene,
fileNamingAlgorithm: fileNamingAlgo,
txnManager: s.TxnManager,
}
wg.Add()
go task.Start(&wg)
}
} }
wg.Wait() wg.Wait()
@ -992,6 +1003,7 @@ type totalsGenerate struct {
imagePreviews int64 imagePreviews int64
markers int64 markers int64
transcodes int64 transcodes int64
phashes int64
} }
func (s *singleton) neededGenerate(scenes []*models.Scene, input models.GenerateMetadataInput) *totalsGenerate { func (s *singleton) neededGenerate(scenes []*models.Scene, input models.GenerateMetadataInput) *totalsGenerate {
@ -1065,6 +1077,17 @@ func (s *singleton) neededGenerate(scenes []*models.Scene, input models.Generate
totals.transcodes++ totals.transcodes++
} }
} }
if input.Phashes {
task := GeneratePhashTask{
Scene: *scene,
fileNamingAlgorithm: fileNamingAlgo,
}
if task.shouldGenerate() {
totals.phashes++
}
}
} }
//check for timeout //check for timeout
select { select {

View file

@ -0,0 +1,62 @@
package manager
import (
"github.com/remeh/sizedwaitgroup"
"context"
"database/sql"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
type GeneratePhashTask struct {
Scene models.Scene
fileNamingAlgorithm models.HashAlgorithm
txnManager models.TransactionManager
}
func (t *GeneratePhashTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
defer wg.Done()
if !t.shouldGenerate() {
return
}
videoFile, err := ffmpeg.NewVideoFile(instance.FFProbePath, t.Scene.Path, false)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return
}
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)
generator, err := NewPhashGenerator(*videoFile, sceneHash)
if err != nil {
logger.Errorf("error creating phash generator: %s", err.Error())
return
}
hash, err := generator.Generate()
if err != nil {
logger.Errorf("error generating phash: %s", err.Error())
return
}
if err := t.txnManager.WithTxn(context.TODO(), func(r models.Repository) error {
qb := r.Scene()
hashValue := sql.NullInt64{Int64: int64(*hash), Valid: true}
scenePartial := models.ScenePartial{
ID: t.Scene.ID,
Phash: &hashValue,
}
_, err := qb.Update(scenePartial)
return err
}); err != nil {
logger.Error(err.Error())
}
}
func (t *GeneratePhashTask) shouldGenerate() bool {
return !t.Scene.Phash.Valid
}

View file

@ -31,6 +31,7 @@ type ScanTask struct {
calculateMD5 bool calculateMD5 bool
fileNamingAlgorithm models.HashAlgorithm fileNamingAlgorithm models.HashAlgorithm
GenerateSprite bool GenerateSprite bool
GeneratePhash bool
GeneratePreview bool GeneratePreview bool
GenerateImagePreview bool GenerateImagePreview bool
zipGallery *models.Gallery zipGallery *models.Gallery
@ -55,6 +56,16 @@ func (t *ScanTask) Start(wg *sizedwaitgroup.SizedWaitGroup) {
go taskSprite.Start(&iwg) go taskSprite.Start(&iwg)
} }
if t.GeneratePhash {
iwg.Add()
taskPhash := GeneratePhashTask{
Scene: *s,
fileNamingAlgorithm: t.fileNamingAlgorithm,
txnManager: t.TxnManager,
}
go taskPhash.Start(&iwg)
}
if t.GeneratePreview { if t.GeneratePreview {
iwg.Add() iwg.Add()

View file

@ -438,6 +438,30 @@ func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) {
return r0, r1 return r0, r1
} }
// FindDuplicates provides a mock function with given fields: distance
func (_m *SceneReaderWriter) FindDuplicates(distance int) ([][]*models.Scene, error) {
ret := _m.Called(distance)
var r0 [][]*models.Scene
if rf, ok := ret.Get(0).(func(int) [][]*models.Scene); ok {
r0 = rf(distance)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([][]*models.Scene)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(distance)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetCover provides a mock function with given fields: sceneID // GetCover provides a mock function with given fields: sceneID
func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) { func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) {
ret := _m.Called(sceneID) ret := _m.Called(sceneID)

View file

@ -29,6 +29,7 @@ type Scene struct {
Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"` Bitrate sql.NullInt64 `db:"bitrate" json:"bitrate"`
StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` StudioID sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"` FileModTime NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
Phash sql.NullInt64 `db:"phash,omitempty" json:"phash"`
CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"` CreatedAt SQLiteTimestamp `db:"created_at" json:"created_at"`
UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"` UpdatedAt SQLiteTimestamp `db:"updated_at" json:"updated_at"`
} }
@ -58,6 +59,7 @@ type ScenePartial struct {
StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"` StudioID *sql.NullInt64 `db:"studio_id,omitempty" json:"studio_id"`
MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"` MovieID *sql.NullInt64 `db:"movie_id,omitempty" json:"movie_id"`
FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"` FileModTime *NullSQLiteTimestamp `db:"file_mod_time" json:"file_mod_time"`
Phash *sql.NullInt64 `db:"phash,omitempty" json:"phash"`
CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"` CreatedAt *SQLiteTimestamp `db:"created_at" json:"created_at"`
UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"` UpdatedAt *SQLiteTimestamp `db:"updated_at" json:"updated_at"`
} }

View file

@ -8,6 +8,7 @@ type SceneReader interface {
FindByPath(path string) (*Scene, error) FindByPath(path string) (*Scene, error)
FindByPerformerID(performerID int) ([]*Scene, error) FindByPerformerID(performerID int) ([]*Scene, error)
FindByGalleryID(performerID int) ([]*Scene, error) FindByGalleryID(performerID int) ([]*Scene, error)
FindDuplicates(distance int) ([][]*Scene, error)
CountByPerformerID(performerID int) (int, error) CountByPerformerID(performerID int) (int, error)
// FindByStudioID(studioID int) ([]*Scene, error) // FindByStudioID(studioID int) ([]*Scene, error)
FindByMovieID(movieID int) ([]*Scene, error) FindByMovieID(movieID int) ([]*Scene, error)

View file

@ -27,6 +27,10 @@ func ToBasicJSON(reader models.SceneReader, scene *models.Scene) (*jsonschema.Sc
newSceneJSON.OSHash = scene.OSHash.String newSceneJSON.OSHash = scene.OSHash.String
} }
if scene.Phash.Valid {
newSceneJSON.Phash = utils.PhashToString(scene.Phash.Int64)
}
if scene.Title.Valid { if scene.Title.Valid {
newSceneJSON.Title = scene.Title.String newSceneJSON.Title = scene.Title.String
} }

View file

@ -7,6 +7,7 @@ import (
"github.com/stashapp/stash/pkg/manager/jsonschema" "github.com/stashapp/stash/pkg/manager/jsonschema"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks" "github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/utils"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"testing" "testing"
@ -43,6 +44,7 @@ const (
checksum = "checksum" checksum = "checksum"
oshash = "oshash" oshash = "oshash"
title = "title" title = "title"
phash = -3846826108889195
date = "2001-01-01" date = "2001-01-01"
rating = 5 rating = 5
ocounter = 2 ocounter = 2
@ -112,6 +114,7 @@ func createFullScene(id int) models.Scene {
Height: models.NullInt64(height), Height: models.NullInt64(height),
OCounter: ocounter, OCounter: ocounter,
OSHash: models.NullString(oshash), OSHash: models.NullString(oshash),
Phash: models.NullInt64(phash),
Rating: models.NullInt64(rating), Rating: models.NullInt64(rating),
Organized: organized, Organized: organized,
Size: models.NullString(size), Size: models.NullString(size),
@ -147,6 +150,7 @@ func createFullJSONScene(image string) *jsonschema.Scene {
Details: details, Details: details,
OCounter: ocounter, OCounter: ocounter,
OSHash: oshash, OSHash: oshash,
Phash: utils.PhashToString(phash),
Rating: rating, Rating: rating,
Organized: organized, Organized: organized,
URL: url, URL: url,

View file

@ -73,6 +73,11 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene {
Path: i.Path, Path: i.Path,
} }
if sceneJSON.Phash != "" {
hash, err := strconv.ParseUint(sceneJSON.Phash, 16, 64)
newScene.Phash = sql.NullInt64{Int64: int64(hash), Valid: err == nil}
}
if sceneJSON.Title != "" { if sceneJSON.Title != "" {
newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true} newScene.Title = sql.NullString{String: sceneJSON.Title, Valid: true}
} }

View file

@ -18,56 +18,67 @@ func NewClient(cli *http.Client, baseURL string, options ...client.HTTPRequestOp
} }
type Query struct { type Query struct {
FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\"" FindPerformer *Performer "json:\"findPerformer\" graphql:\"findPerformer\""
QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\"" QueryPerformers QueryPerformersResultType "json:\"queryPerformers\" graphql:\"queryPerformers\""
FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\"" FindStudio *Studio "json:\"findStudio\" graphql:\"findStudio\""
QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\"" QueryStudios QueryStudiosResultType "json:\"queryStudios\" graphql:\"queryStudios\""
FindTag *Tag "json:\"findTag\" graphql:\"findTag\"" FindTag *Tag "json:\"findTag\" graphql:\"findTag\""
QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\"" QueryTags QueryTagsResultType "json:\"queryTags\" graphql:\"queryTags\""
FindScene *Scene "json:\"findScene\" graphql:\"findScene\"" FindTagCategory *TagCategory "json:\"findTagCategory\" graphql:\"findTagCategory\""
FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\"" QueryTagCategories QueryTagCategoriesResultType "json:\"queryTagCategories\" graphql:\"queryTagCategories\""
FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\"" FindScene *Scene "json:\"findScene\" graphql:\"findScene\""
QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\"" FindSceneByFingerprint []*Scene "json:\"findSceneByFingerprint\" graphql:\"findSceneByFingerprint\""
FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\"" FindScenesByFingerprints []*Scene "json:\"findScenesByFingerprints\" graphql:\"findScenesByFingerprints\""
QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\"" QueryScenes QueryScenesResultType "json:\"queryScenes\" graphql:\"queryScenes\""
FindUser *User "json:\"findUser\" graphql:\"findUser\"" FindEdit *Edit "json:\"findEdit\" graphql:\"findEdit\""
QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\"" QueryEdits QueryEditsResultType "json:\"queryEdits\" graphql:\"queryEdits\""
Me *User "json:\"me\" graphql:\"me\"" FindUser *User "json:\"findUser\" graphql:\"findUser\""
SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\"" QueryUsers QueryUsersResultType "json:\"queryUsers\" graphql:\"queryUsers\""
SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\"" Me *User "json:\"me\" graphql:\"me\""
Version Version "json:\"version\" graphql:\"version\"" SearchPerformer []*Performer "json:\"searchPerformer\" graphql:\"searchPerformer\""
SearchScene []*Scene "json:\"searchScene\" graphql:\"searchScene\""
Version Version "json:\"version\" graphql:\"version\""
} }
type Mutation struct { type Mutation struct {
SceneCreate *Scene "json:\"sceneCreate\" graphql:\"sceneCreate\"" SceneCreate *Scene "json:\"sceneCreate\" graphql:\"sceneCreate\""
SceneUpdate *Scene "json:\"sceneUpdate\" graphql:\"sceneUpdate\"" SceneUpdate *Scene "json:\"sceneUpdate\" graphql:\"sceneUpdate\""
SceneDestroy bool "json:\"sceneDestroy\" graphql:\"sceneDestroy\"" SceneDestroy bool "json:\"sceneDestroy\" graphql:\"sceneDestroy\""
PerformerCreate *Performer "json:\"performerCreate\" graphql:\"performerCreate\"" PerformerCreate *Performer "json:\"performerCreate\" graphql:\"performerCreate\""
PerformerUpdate *Performer "json:\"performerUpdate\" graphql:\"performerUpdate\"" PerformerUpdate *Performer "json:\"performerUpdate\" graphql:\"performerUpdate\""
PerformerDestroy bool "json:\"performerDestroy\" graphql:\"performerDestroy\"" PerformerDestroy bool "json:\"performerDestroy\" graphql:\"performerDestroy\""
StudioCreate *Studio "json:\"studioCreate\" graphql:\"studioCreate\"" StudioCreate *Studio "json:\"studioCreate\" graphql:\"studioCreate\""
StudioUpdate *Studio "json:\"studioUpdate\" graphql:\"studioUpdate\"" StudioUpdate *Studio "json:\"studioUpdate\" graphql:\"studioUpdate\""
StudioDestroy bool "json:\"studioDestroy\" graphql:\"studioDestroy\"" StudioDestroy bool "json:\"studioDestroy\" graphql:\"studioDestroy\""
TagCreate *Tag "json:\"tagCreate\" graphql:\"tagCreate\"" TagCreate *Tag "json:\"tagCreate\" graphql:\"tagCreate\""
TagUpdate *Tag "json:\"tagUpdate\" graphql:\"tagUpdate\"" TagUpdate *Tag "json:\"tagUpdate\" graphql:\"tagUpdate\""
TagDestroy bool "json:\"tagDestroy\" graphql:\"tagDestroy\"" TagDestroy bool "json:\"tagDestroy\" graphql:\"tagDestroy\""
UserCreate *User "json:\"userCreate\" graphql:\"userCreate\"" UserCreate *User "json:\"userCreate\" graphql:\"userCreate\""
UserUpdate *User "json:\"userUpdate\" graphql:\"userUpdate\"" UserUpdate *User "json:\"userUpdate\" graphql:\"userUpdate\""
UserDestroy bool "json:\"userDestroy\" graphql:\"userDestroy\"" UserDestroy bool "json:\"userDestroy\" graphql:\"userDestroy\""
ImageCreate *Image "json:\"imageCreate\" graphql:\"imageCreate\"" ImageCreate *Image "json:\"imageCreate\" graphql:\"imageCreate\""
ImageUpdate *Image "json:\"imageUpdate\" graphql:\"imageUpdate\"" ImageDestroy bool "json:\"imageDestroy\" graphql:\"imageDestroy\""
ImageDestroy bool "json:\"imageDestroy\" graphql:\"imageDestroy\"" NewUser *string "json:\"newUser\" graphql:\"newUser\""
RegenerateAPIKey string "json:\"regenerateAPIKey\" graphql:\"regenerateAPIKey\"" ActivateNewUser *User "json:\"activateNewUser\" graphql:\"activateNewUser\""
ChangePassword bool "json:\"changePassword\" graphql:\"changePassword\"" GenerateInviteCode string "json:\"generateInviteCode\" graphql:\"generateInviteCode\""
SceneEdit Edit "json:\"sceneEdit\" graphql:\"sceneEdit\"" RescindInviteCode bool "json:\"rescindInviteCode\" graphql:\"rescindInviteCode\""
PerformerEdit Edit "json:\"performerEdit\" graphql:\"performerEdit\"" GrantInvite int "json:\"grantInvite\" graphql:\"grantInvite\""
StudioEdit Edit "json:\"studioEdit\" graphql:\"studioEdit\"" RevokeInvite int "json:\"revokeInvite\" graphql:\"revokeInvite\""
TagEdit Edit "json:\"tagEdit\" graphql:\"tagEdit\"" TagCategoryCreate *TagCategory "json:\"tagCategoryCreate\" graphql:\"tagCategoryCreate\""
EditVote Edit "json:\"editVote\" graphql:\"editVote\"" TagCategoryUpdate *TagCategory "json:\"tagCategoryUpdate\" graphql:\"tagCategoryUpdate\""
EditComment Edit "json:\"editComment\" graphql:\"editComment\"" TagCategoryDestroy bool "json:\"tagCategoryDestroy\" graphql:\"tagCategoryDestroy\""
ApplyEdit Edit "json:\"applyEdit\" graphql:\"applyEdit\"" RegenerateAPIKey string "json:\"regenerateAPIKey\" graphql:\"regenerateAPIKey\""
CancelEdit Edit "json:\"cancelEdit\" graphql:\"cancelEdit\"" ResetPassword bool "json:\"resetPassword\" graphql:\"resetPassword\""
SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\"" ChangePassword bool "json:\"changePassword\" graphql:\"changePassword\""
SceneEdit Edit "json:\"sceneEdit\" graphql:\"sceneEdit\""
PerformerEdit Edit "json:\"performerEdit\" graphql:\"performerEdit\""
StudioEdit Edit "json:\"studioEdit\" graphql:\"studioEdit\""
TagEdit Edit "json:\"tagEdit\" graphql:\"tagEdit\""
EditVote Edit "json:\"editVote\" graphql:\"editVote\""
EditComment Edit "json:\"editComment\" graphql:\"editComment\""
ApplyEdit Edit "json:\"applyEdit\" graphql:\"applyEdit\""
CancelEdit Edit "json:\"cancelEdit\" graphql:\"cancelEdit\""
SubmitFingerprint bool "json:\"submitFingerprint\" graphql:\"submitFingerprint\""
} }
type URLFragment struct { type URLFragment struct {
URL string "json:\"url\" graphql:\"url\"" URL string "json:\"url\" graphql:\"url\""
@ -76,8 +87,8 @@ type URLFragment struct {
type ImageFragment struct { type ImageFragment struct {
ID string "json:\"id\" graphql:\"id\"" ID string "json:\"id\" graphql:\"id\""
URL string "json:\"url\" graphql:\"url\"" URL string "json:\"url\" graphql:\"url\""
Width *int "json:\"width\" graphql:\"width\"" Width int "json:\"width\" graphql:\"width\""
Height *int "json:\"height\" graphql:\"height\"" Height int "json:\"height\" graphql:\"height\""
} }
type StudioFragment struct { type StudioFragment struct {
Name string "json:\"name\" graphql:\"name\"" Name string "json:\"name\" graphql:\"name\""
@ -189,9 +200,21 @@ fragment SceneFragment on Scene {
... FingerprintFragment ... FingerprintFragment
} }
} }
fragment URLFragment on URL { fragment ImageFragment on Image {
id
url url
type width
height
}
fragment StudioFragment on Studio {
name
id
urls {
... URLFragment
}
images {
... ImageFragment
}
} }
fragment PerformerAppearanceFragment on PerformerAppearance { fragment PerformerAppearanceFragment on PerformerAppearance {
as as
@ -199,6 +222,24 @@ fragment PerformerAppearanceFragment on PerformerAppearance {
... PerformerFragment ... PerformerFragment
} }
} }
fragment FuzzyDateFragment on FuzzyDate {
date
accuracy
}
fragment MeasurementsFragment on Measurements {
band_size
cup_size
waist
hip
}
fragment URLFragment on URL {
url
type
}
fragment TagFragment on Tag {
name
id
}
fragment PerformerFragment on Performer { fragment PerformerFragment on Performer {
id id
name name
@ -232,45 +273,15 @@ fragment PerformerFragment on Performer {
... BodyModificationFragment ... BodyModificationFragment
} }
} }
fragment FuzzyDateFragment on FuzzyDate { fragment BodyModificationFragment on BodyModification {
date location
accuracy description
} }
fragment FingerprintFragment on Fingerprint { fragment FingerprintFragment on Fingerprint {
algorithm algorithm
hash hash
duration duration
} }
fragment ImageFragment on Image {
id
url
width
height
}
fragment StudioFragment on Studio {
name
id
urls {
... URLFragment
}
images {
... ImageFragment
}
}
fragment TagFragment on Tag {
name
id
}
fragment MeasurementsFragment on Measurements {
band_size
cup_size
waist
hip
}
fragment BodyModificationFragment on BodyModification {
location
description
}
` `
func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, httpRequestOptions ...client.HTTPRequestOption) (*FindSceneByFingerprint, error) { func (c *Client) FindSceneByFingerprint(ctx context.Context, fingerprint FingerprintQueryInput, httpRequestOptions ...client.HTTPRequestOption) (*FindSceneByFingerprint, error) {
@ -291,11 +302,29 @@ const FindScenesByFingerprintsQuery = `query FindScenesByFingerprints ($fingerpr
... SceneFragment ... SceneFragment
} }
} }
fragment PerformerAppearanceFragment on PerformerAppearance { fragment BodyModificationFragment on BodyModification {
as location
performer { description
... PerformerFragment }
fragment ImageFragment on Image {
id
url
width
height
}
fragment StudioFragment on Studio {
name
id
urls {
... URLFragment
} }
images {
... ImageFragment
}
}
fragment TagFragment on Tag {
name
id
} }
fragment PerformerFragment on Performer { fragment PerformerFragment on Performer {
id id
@ -336,11 +365,6 @@ fragment MeasurementsFragment on Measurements {
waist waist
hip hip
} }
fragment FingerprintFragment on Fingerprint {
algorithm
hash
duration
}
fragment SceneFragment on Scene { fragment SceneFragment on Scene {
id id
title title
@ -370,33 +394,20 @@ fragment URLFragment on URL {
url url
type type
} }
fragment ImageFragment on Image { fragment PerformerAppearanceFragment on PerformerAppearance {
id as
url performer {
width ... PerformerFragment
height
}
fragment TagFragment on Tag {
name
id
}
fragment StudioFragment on Studio {
name
id
urls {
... URLFragment
}
images {
... ImageFragment
} }
} }
fragment FuzzyDateFragment on FuzzyDate { fragment FuzzyDateFragment on FuzzyDate {
date date
accuracy accuracy
} }
fragment BodyModificationFragment on BodyModification { fragment FingerprintFragment on Fingerprint {
location algorithm
description hash
duration
} }
` `
@ -418,16 +429,50 @@ const SearchSceneQuery = `query SearchScene ($term: String!) {
... SceneFragment ... SceneFragment
} }
} }
fragment PerformerAppearanceFragment on PerformerAppearance {
as
performer {
... PerformerFragment
}
}
fragment BodyModificationFragment on BodyModification {
location
description
}
fragment FingerprintFragment on Fingerprint {
algorithm
hash
duration
}
fragment SceneFragment on Scene {
id
title
details
duration
date
urls {
... URLFragment
}
images {
... ImageFragment
}
studio {
... StudioFragment
}
tags {
... TagFragment
}
performers {
... PerformerAppearanceFragment
}
fingerprints {
... FingerprintFragment
}
}
fragment URLFragment on URL { fragment URLFragment on URL {
url url
type type
} }
fragment ImageFragment on Image {
id
url
width
height
}
fragment TagFragment on Tag { fragment TagFragment on Tag {
name name
id id
@ -475,30 +520,11 @@ fragment MeasurementsFragment on Measurements {
waist waist
hip hip
} }
fragment SceneFragment on Scene { fragment ImageFragment on Image {
id id
title url
details width
duration height
date
urls {
... URLFragment
}
images {
... ImageFragment
}
studio {
... StudioFragment
}
tags {
... TagFragment
}
performers {
... PerformerAppearanceFragment
}
fingerprints {
... FingerprintFragment
}
} }
fragment StudioFragment on Studio { fragment StudioFragment on Studio {
name name
@ -510,21 +536,6 @@ fragment StudioFragment on Studio {
... ImageFragment ... ImageFragment
} }
} }
fragment PerformerAppearanceFragment on PerformerAppearance {
as
performer {
... PerformerFragment
}
}
fragment BodyModificationFragment on BodyModification {
location
description
}
fragment FingerprintFragment on Fingerprint {
algorithm
hash
duration
}
` `
func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchScene, error) { func (c *Client) SearchScene(ctx context.Context, term string, httpRequestOptions ...client.HTTPRequestOption) (*SearchScene, error) {

View file

@ -7,6 +7,8 @@ import (
"io" "io"
"strconv" "strconv"
"time" "time"
"github.com/99designs/gqlgen/graphql"
) )
type EditDetails interface { type EditDetails interface {
@ -17,6 +19,13 @@ type EditTarget interface {
IsEditTarget() IsEditTarget()
} }
type ActivateNewUserInput struct {
Name string `json:"name"`
Email string `json:"email"`
ActivationKey string `json:"activation_key"`
Password string `json:"password"`
}
type ApplyEditInput struct { type ApplyEditInput struct {
ID string `json:"id"` ID string `json:"id"`
} }
@ -58,11 +67,15 @@ type Edit struct {
Target EditTarget `json:"target"` Target EditTarget `json:"target"`
TargetType TargetTypeEnum `json:"target_type"` TargetType TargetTypeEnum `json:"target_type"`
// Objects to merge with the target. Only applicable to merges // Objects to merge with the target. Only applicable to merges
MergeSources []EditTarget `json:"merge_sources"` MergeSources []EditTarget `json:"merge_sources"`
Operation OperationEnum `json:"operation"` Operation OperationEnum `json:"operation"`
Details EditDetails `json:"details"` Details EditDetails `json:"details"`
Comments []*EditComment `json:"comments"` // Previous state of fields being modified - null if operation is create or delete.
Votes []*VoteComment `json:"votes"` OldDetails EditDetails `json:"old_details"`
// Entity specific options
Options *PerformerEditOptions `json:"options"`
Comments []*EditComment `json:"comments"`
Votes []*VoteComment `json:"votes"`
// = Accepted - Rejected // = Accepted - Rejected
VoteCount int `json:"vote_count"` VoteCount int `json:"vote_count"`
Status VoteStatusEnum `json:"status"` Status VoteStatusEnum `json:"status"`
@ -115,11 +128,6 @@ type EditVoteInput struct {
Type VoteTypeEnum `json:"type"` Type VoteTypeEnum `json:"type"`
} }
type EthnicityCriterionInput struct {
Value *EthnicityEnum `json:"value"`
Modifier CriterionModifier `json:"modifier"`
}
type EyeColorCriterionInput struct { type EyeColorCriterionInput struct {
Value *EyeColorEnum `json:"value"` Value *EyeColorEnum `json:"value"`
Modifier CriterionModifier `json:"modifier"` Modifier CriterionModifier `json:"modifier"`
@ -157,6 +165,11 @@ type FuzzyDateInput struct {
Accuracy DateAccuracyEnum `json:"accuracy"` Accuracy DateAccuracyEnum `json:"accuracy"`
} }
type GrantInviteInput struct {
UserID string `json:"user_id"`
Amount int `json:"amount"`
}
type HairColorCriterionInput struct { type HairColorCriterionInput struct {
Value *HairColorEnum `json:"value"` Value *HairColorEnum `json:"value"`
Modifier CriterionModifier `json:"modifier"` Modifier CriterionModifier `json:"modifier"`
@ -170,12 +183,13 @@ type IDCriterionInput struct {
type Image struct { type Image struct {
ID string `json:"id"` ID string `json:"id"`
URL string `json:"url"` URL string `json:"url"`
Width *int `json:"width"` Width int `json:"width"`
Height *int `json:"height"` Height int `json:"height"`
} }
type ImageCreateInput struct { type ImageCreateInput struct {
URL string `json:"url"` URL *string `json:"url"`
File *graphql.Upload `json:"file"`
} }
type ImageDestroyInput struct { type ImageDestroyInput struct {
@ -183,8 +197,8 @@ type ImageDestroyInput struct {
} }
type ImageUpdateInput struct { type ImageUpdateInput struct {
ID string `json:"id"` ID string `json:"id"`
URL string `json:"url"` URL *string `json:"url"`
} }
type IntCriterionInput struct { type IntCriterionInput struct {
@ -211,6 +225,11 @@ type MultiIDCriterionInput struct {
Modifier CriterionModifier `json:"modifier"` Modifier CriterionModifier `json:"modifier"`
} }
type NewUserInput struct {
Email string `json:"email"`
InviteKey *string `json:"invite_key"`
}
type Performer struct { type Performer struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
@ -234,6 +253,8 @@ type Performer struct {
Piercings []*BodyModification `json:"piercings"` Piercings []*BodyModification `json:"piercings"`
Images []*Image `json:"images"` Images []*Image `json:"images"`
Deleted bool `json:"deleted"` Deleted bool `json:"deleted"`
Edits []*Edit `json:"edits"`
SceneCount int `json:"scene_count"`
} }
func (Performer) IsEditTarget() {} func (Performer) IsEditTarget() {}
@ -276,21 +297,25 @@ type PerformerDestroyInput struct {
} }
type PerformerEdit struct { type PerformerEdit struct {
Name *string `json:"name"` Name *string `json:"name"`
Disambiguation *string `json:"disambiguation"` Disambiguation *string `json:"disambiguation"`
AddedAliases []string `json:"added_aliases"` AddedAliases []string `json:"added_aliases"`
RemovedAliases []string `json:"removed_aliases"` RemovedAliases []string `json:"removed_aliases"`
Gender *GenderEnum `json:"gender"` Gender *GenderEnum `json:"gender"`
AddedUrls []*URL `json:"added_urls"` AddedUrls []*URL `json:"added_urls"`
RemovedUrls []*URL `json:"removed_urls"` RemovedUrls []*URL `json:"removed_urls"`
Birthdate *FuzzyDate `json:"birthdate"` Birthdate *string `json:"birthdate"`
Ethnicity *EthnicityEnum `json:"ethnicity"` BirthdateAccuracy *string `json:"birthdate_accuracy"`
Country *string `json:"country"` Ethnicity *EthnicityEnum `json:"ethnicity"`
EyeColor *EyeColorEnum `json:"eye_color"` Country *string `json:"country"`
HairColor *HairColorEnum `json:"hair_color"` EyeColor *EyeColorEnum `json:"eye_color"`
HairColor *HairColorEnum `json:"hair_color"`
// Height in cm // Height in cm
Height *int `json:"height"` Height *int `json:"height"`
Measurements *Measurements `json:"measurements"` CupSize *string `json:"cup_size"`
BandSize *int `json:"band_size"`
WaistSize *int `json:"waist_size"`
HipSize *int `json:"hip_size"`
BreastType *BreastTypeEnum `json:"breast_type"` BreastType *BreastTypeEnum `json:"breast_type"`
CareerStartYear *int `json:"career_start_year"` CareerStartYear *int `json:"career_start_year"`
CareerEndYear *int `json:"career_end_year"` CareerEndYear *int `json:"career_end_year"`
@ -329,6 +354,22 @@ type PerformerEditInput struct {
Edit *EditInput `json:"edit"` Edit *EditInput `json:"edit"`
// Not required for destroy type // Not required for destroy type
Details *PerformerEditDetailsInput `json:"details"` Details *PerformerEditDetailsInput `json:"details"`
// Controls aliases modification for merges and name modifications
Options *PerformerEditOptionsInput `json:"options"`
}
type PerformerEditOptions struct {
// Set performer alias on scenes without alias to old name if name is changed
SetModifyAliases bool `json:"set_modify_aliases"`
// Set performer alias on scenes attached to merge sources to old name
SetMergeAliases bool `json:"set_merge_aliases"`
}
type PerformerEditOptionsInput struct {
// Set performer alias on scenes without alias to old name if name is changed
SetModifyAliases *bool `json:"set_modify_aliases"`
// Set performer alias on scenes attached to merge sources to old name
SetMergeAliases *bool `json:"set_merge_aliases"`
} }
type PerformerFilterType struct { type PerformerFilterType struct {
@ -339,13 +380,13 @@ type PerformerFilterType struct {
// Search aliases only - assumes like query unless quoted // Search aliases only - assumes like query unless quoted
Alias *string `json:"alias"` Alias *string `json:"alias"`
Disambiguation *StringCriterionInput `json:"disambiguation"` Disambiguation *StringCriterionInput `json:"disambiguation"`
Gender *GenderEnum `json:"gender"` Gender *GenderFilterEnum `json:"gender"`
// Filter to search urls - assumes like query unless quoted // Filter to search urls - assumes like query unless quoted
URL *string `json:"url"` URL *string `json:"url"`
Birthdate *DateCriterionInput `json:"birthdate"` Birthdate *DateCriterionInput `json:"birthdate"`
BirthYear *IntCriterionInput `json:"birth_year"` BirthYear *IntCriterionInput `json:"birth_year"`
Age *IntCriterionInput `json:"age"` Age *IntCriterionInput `json:"age"`
Ethnicity *EthnicityCriterionInput `json:"ethnicity"` Ethnicity *EthnicityFilterEnum `json:"ethnicity"`
Country *StringCriterionInput `json:"country"` Country *StringCriterionInput `json:"country"`
EyeColor *EyeColorCriterionInput `json:"eye_color"` EyeColor *EyeColorCriterionInput `json:"eye_color"`
HairColor *HairColorCriterionInput `json:"hair_color"` HairColor *HairColorCriterionInput `json:"hair_color"`
@ -410,6 +451,11 @@ type QueryStudiosResultType struct {
Studios []*Studio `json:"studios"` Studios []*Studio `json:"studios"`
} }
type QueryTagCategoriesResultType struct {
Count int `json:"count"`
TagCategories []*TagCategory `json:"tag_categories"`
}
type QueryTagsResultType struct { type QueryTagsResultType struct {
Count int `json:"count"` Count int `json:"count"`
Tags []*Tag `json:"tags"` Tags []*Tag `json:"tags"`
@ -420,6 +466,15 @@ type QueryUsersResultType struct {
Users []*User `json:"users"` Users []*User `json:"users"`
} }
type ResetPasswordInput struct {
Email string `json:"email"`
}
type RevokeInviteInput struct {
UserID string `json:"user_id"`
Amount int `json:"amount"`
}
type RoleCriterionInput struct { type RoleCriterionInput struct {
Value []RoleEnum `json:"value"` Value []RoleEnum `json:"value"`
Modifier CriterionModifier `json:"modifier"` Modifier CriterionModifier `json:"modifier"`
@ -515,6 +570,8 @@ type SceneFilterType struct {
Date *DateCriterionInput `json:"date"` Date *DateCriterionInput `json:"date"`
// Filter to only include scenes with this studio // Filter to only include scenes with this studio
Studios *MultiIDCriterionInput `json:"studios"` Studios *MultiIDCriterionInput `json:"studios"`
// Filter to only include scenes with this studio as primary or parent
ParentStudio *string `json:"parentStudio"`
// Filter to only include scenes with these tags // Filter to only include scenes with these tags
Tags *MultiIDCriterionInput `json:"tags"` Tags *MultiIDCriterionInput `json:"tags"`
// Filter to only include scenes with these performers // Filter to only include scenes with these performers
@ -598,9 +655,12 @@ type StudioEditInput struct {
type StudioFilterType struct { type StudioFilterType struct {
// Filter to search name - assumes like query unless quoted // Filter to search name - assumes like query unless quoted
Name *string `json:"name"` Name *string `json:"name"`
// Filter to search studio and parent studio name - assumes like query unless quoted
Names *string `json:"names"`
// Filter to search url - assumes like query unless quoted // Filter to search url - assumes like query unless quoted
URL *string `json:"url"` URL *string `json:"url"`
Parent *IDCriterionInput `json:"parent"` Parent *IDCriterionInput `json:"parent"`
HasParent *bool `json:"has_parent"`
} }
type StudioUpdateInput struct { type StudioUpdateInput struct {
@ -613,20 +673,46 @@ type StudioUpdateInput struct {
} }
type Tag struct { type Tag struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Description *string `json:"description"` Description *string `json:"description"`
Aliases []string `json:"aliases"` Aliases []string `json:"aliases"`
Deleted bool `json:"deleted"` Deleted bool `json:"deleted"`
Edits []*Edit `json:"edits"` Edits []*Edit `json:"edits"`
Category *TagCategory `json:"category"`
} }
func (Tag) IsEditTarget() {} func (Tag) IsEditTarget() {}
type TagCategory struct {
ID string `json:"id"`
Name string `json:"name"`
Group TagGroupEnum `json:"group"`
Description *string `json:"description"`
}
type TagCategoryCreateInput struct {
Name string `json:"name"`
Group TagGroupEnum `json:"group"`
Description *string `json:"description"`
}
type TagCategoryDestroyInput struct {
ID string `json:"id"`
}
type TagCategoryUpdateInput struct {
ID string `json:"id"`
Name *string `json:"name"`
Group *TagGroupEnum `json:"group"`
Description *string `json:"description"`
}
type TagCreateInput struct { type TagCreateInput struct {
Name string `json:"name"` Name string `json:"name"`
Description *string `json:"description"` Description *string `json:"description"`
Aliases []string `json:"aliases"` Aliases []string `json:"aliases"`
CategoryID *string `json:"category_id"`
} }
type TagDestroyInput struct { type TagDestroyInput struct {
@ -638,6 +724,7 @@ type TagEdit struct {
Description *string `json:"description"` Description *string `json:"description"`
AddedAliases []string `json:"added_aliases"` AddedAliases []string `json:"added_aliases"`
RemovedAliases []string `json:"removed_aliases"` RemovedAliases []string `json:"removed_aliases"`
CategoryID *string `json:"category_id"`
} }
func (TagEdit) IsEditDetails() {} func (TagEdit) IsEditDetails() {}
@ -646,6 +733,7 @@ type TagEditDetailsInput struct {
Name *string `json:"name"` Name *string `json:"name"`
Description *string `json:"description"` Description *string `json:"description"`
Aliases []string `json:"aliases"` Aliases []string `json:"aliases"`
CategoryID *string `json:"category_id"`
} }
type TagEditInput struct { type TagEditInput struct {
@ -661,6 +749,8 @@ type TagFilterType struct {
Names *string `json:"names"` Names *string `json:"names"`
// Filter to search name - assumes like query unless quoted // Filter to search name - assumes like query unless quoted
Name *string `json:"name"` Name *string `json:"name"`
// Filter to category ID
CategoryID *string `json:"category_id"`
} }
type TagUpdateInput struct { type TagUpdateInput struct {
@ -668,6 +758,7 @@ type TagUpdateInput struct {
Name *string `json:"name"` Name *string `json:"name"`
Description *string `json:"description"` Description *string `json:"description"`
Aliases []string `json:"aliases"` Aliases []string `json:"aliases"`
CategoryID *string `json:"category_id"`
} }
type URL struct { type URL struct {
@ -695,21 +786,26 @@ type User struct {
// Votes on unsuccessful edits // Votes on unsuccessful edits
UnsuccessfulVotes int `json:"unsuccessful_votes"` UnsuccessfulVotes int `json:"unsuccessful_votes"`
// Calls to the API from this user over a configurable time period // Calls to the API from this user over a configurable time period
APICalls int `json:"api_calls"` APICalls int `json:"api_calls"`
InvitedBy *User `json:"invited_by"`
InviteTokens *int `json:"invite_tokens"`
ActiveInviteCodes []string `json:"active_invite_codes"`
} }
type UserChangePasswordInput struct { type UserChangePasswordInput struct {
// Password in plain text // Password in plain text
ExistingPassword string `json:"existing_password"` ExistingPassword *string `json:"existing_password"`
NewPassword string `json:"new_password"` NewPassword string `json:"new_password"`
ResetKey *string `json:"reset_key"`
} }
type UserCreateInput struct { type UserCreateInput struct {
Name string `json:"name"` Name string `json:"name"`
// Password in plain text // Password in plain text
Password string `json:"password"` Password string `json:"password"`
Roles []RoleEnum `json:"roles"` Roles []RoleEnum `json:"roles"`
Email string `json:"email"` Email string `json:"email"`
InvitedByID *string `json:"invited_by_id"`
} }
type UserDestroyInput struct { type UserDestroyInput struct {
@ -735,6 +831,8 @@ type UserFilterType struct {
UnsuccessfulVotes *IntCriterionInput `json:"unsuccessful_votes"` UnsuccessfulVotes *IntCriterionInput `json:"unsuccessful_votes"`
// Filter by number of API calls // Filter by number of API calls
APICalls *IntCriterionInput `json:"api_calls"` APICalls *IntCriterionInput `json:"api_calls"`
// Filter by user that invited
InvitedBy *string `json:"invited_by"`
} }
type UserUpdateInput struct { type UserUpdateInput struct {
@ -960,6 +1058,61 @@ func (e EthnicityEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String())) fmt.Fprint(w, strconv.Quote(e.String()))
} }
type EthnicityFilterEnum string
const (
EthnicityFilterEnumUnknown EthnicityFilterEnum = "UNKNOWN"
EthnicityFilterEnumCaucasian EthnicityFilterEnum = "CAUCASIAN"
EthnicityFilterEnumBlack EthnicityFilterEnum = "BLACK"
EthnicityFilterEnumAsian EthnicityFilterEnum = "ASIAN"
EthnicityFilterEnumIndian EthnicityFilterEnum = "INDIAN"
EthnicityFilterEnumLatin EthnicityFilterEnum = "LATIN"
EthnicityFilterEnumMiddleEastern EthnicityFilterEnum = "MIDDLE_EASTERN"
EthnicityFilterEnumMixed EthnicityFilterEnum = "MIXED"
EthnicityFilterEnumOther EthnicityFilterEnum = "OTHER"
)
var AllEthnicityFilterEnum = []EthnicityFilterEnum{
EthnicityFilterEnumUnknown,
EthnicityFilterEnumCaucasian,
EthnicityFilterEnumBlack,
EthnicityFilterEnumAsian,
EthnicityFilterEnumIndian,
EthnicityFilterEnumLatin,
EthnicityFilterEnumMiddleEastern,
EthnicityFilterEnumMixed,
EthnicityFilterEnumOther,
}
func (e EthnicityFilterEnum) IsValid() bool {
switch e {
case EthnicityFilterEnumUnknown, EthnicityFilterEnumCaucasian, EthnicityFilterEnumBlack, EthnicityFilterEnumAsian, EthnicityFilterEnumIndian, EthnicityFilterEnumLatin, EthnicityFilterEnumMiddleEastern, EthnicityFilterEnumMixed, EthnicityFilterEnumOther:
return true
}
return false
}
func (e EthnicityFilterEnum) String() string {
return string(e)
}
func (e *EthnicityFilterEnum) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = EthnicityFilterEnum(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid EthnicityFilterEnum", str)
}
return nil
}
func (e EthnicityFilterEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type EyeColorEnum string type EyeColorEnum string
const ( const (
@ -1014,16 +1167,18 @@ type FingerprintAlgorithm string
const ( const (
FingerprintAlgorithmMd5 FingerprintAlgorithm = "MD5" FingerprintAlgorithmMd5 FingerprintAlgorithm = "MD5"
FingerprintAlgorithmOshash FingerprintAlgorithm = "OSHASH" FingerprintAlgorithmOshash FingerprintAlgorithm = "OSHASH"
FingerprintAlgorithmPhash FingerprintAlgorithm = "PHASH"
) )
var AllFingerprintAlgorithm = []FingerprintAlgorithm{ var AllFingerprintAlgorithm = []FingerprintAlgorithm{
FingerprintAlgorithmMd5, FingerprintAlgorithmMd5,
FingerprintAlgorithmOshash, FingerprintAlgorithmOshash,
FingerprintAlgorithmPhash,
} }
func (e FingerprintAlgorithm) IsValid() bool { func (e FingerprintAlgorithm) IsValid() bool {
switch e { switch e {
case FingerprintAlgorithmMd5, FingerprintAlgorithmOshash: case FingerprintAlgorithmMd5, FingerprintAlgorithmOshash, FingerprintAlgorithmPhash:
return true return true
} }
return false return false
@ -1097,6 +1252,55 @@ func (e GenderEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String())) fmt.Fprint(w, strconv.Quote(e.String()))
} }
type GenderFilterEnum string
const (
GenderFilterEnumUnknown GenderFilterEnum = "UNKNOWN"
GenderFilterEnumMale GenderFilterEnum = "MALE"
GenderFilterEnumFemale GenderFilterEnum = "FEMALE"
GenderFilterEnumTransgenderMale GenderFilterEnum = "TRANSGENDER_MALE"
GenderFilterEnumTransgenderFemale GenderFilterEnum = "TRANSGENDER_FEMALE"
GenderFilterEnumIntersex GenderFilterEnum = "INTERSEX"
)
var AllGenderFilterEnum = []GenderFilterEnum{
GenderFilterEnumUnknown,
GenderFilterEnumMale,
GenderFilterEnumFemale,
GenderFilterEnumTransgenderMale,
GenderFilterEnumTransgenderFemale,
GenderFilterEnumIntersex,
}
func (e GenderFilterEnum) IsValid() bool {
switch e {
case GenderFilterEnumUnknown, GenderFilterEnumMale, GenderFilterEnumFemale, GenderFilterEnumTransgenderMale, GenderFilterEnumTransgenderFemale, GenderFilterEnumIntersex:
return true
}
return false
}
func (e GenderFilterEnum) String() string {
return string(e)
}
func (e *GenderFilterEnum) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = GenderFilterEnum(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid GenderFilterEnum", str)
}
return nil
}
func (e GenderFilterEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type HairColorEnum string type HairColorEnum string
const ( const (
@ -1205,6 +1409,10 @@ const (
RoleEnumEdit RoleEnum = "EDIT" RoleEnumEdit RoleEnum = "EDIT"
RoleEnumModify RoleEnum = "MODIFY" RoleEnumModify RoleEnum = "MODIFY"
RoleEnumAdmin RoleEnum = "ADMIN" RoleEnumAdmin RoleEnum = "ADMIN"
// May generate invites without tokens
RoleEnumInvite RoleEnum = "INVITE"
// May grant and rescind invite tokens and resind invite keys
RoleEnumManageInvites RoleEnum = "MANAGE_INVITES"
) )
var AllRoleEnum = []RoleEnum{ var AllRoleEnum = []RoleEnum{
@ -1213,11 +1421,13 @@ var AllRoleEnum = []RoleEnum{
RoleEnumEdit, RoleEnumEdit,
RoleEnumModify, RoleEnumModify,
RoleEnumAdmin, RoleEnumAdmin,
RoleEnumInvite,
RoleEnumManageInvites,
} }
func (e RoleEnum) IsValid() bool { func (e RoleEnum) IsValid() bool {
switch e { switch e {
case RoleEnumRead, RoleEnumVote, RoleEnumEdit, RoleEnumModify, RoleEnumAdmin: case RoleEnumRead, RoleEnumVote, RoleEnumEdit, RoleEnumModify, RoleEnumAdmin, RoleEnumInvite, RoleEnumManageInvites:
return true return true
} }
return false return false
@ -1285,6 +1495,49 @@ func (e SortDirectionEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String())) fmt.Fprint(w, strconv.Quote(e.String()))
} }
type TagGroupEnum string
const (
TagGroupEnumPeople TagGroupEnum = "PEOPLE"
TagGroupEnumScene TagGroupEnum = "SCENE"
TagGroupEnumAction TagGroupEnum = "ACTION"
)
var AllTagGroupEnum = []TagGroupEnum{
TagGroupEnumPeople,
TagGroupEnumScene,
TagGroupEnumAction,
}
func (e TagGroupEnum) IsValid() bool {
switch e {
case TagGroupEnumPeople, TagGroupEnumScene, TagGroupEnumAction:
return true
}
return false
}
func (e TagGroupEnum) String() string {
return string(e)
}
func (e *TagGroupEnum) UnmarshalGQL(v interface{}) error {
str, ok := v.(string)
if !ok {
return fmt.Errorf("enums must be strings")
}
*e = TagGroupEnum(str)
if !e.IsValid() {
return fmt.Errorf("%s is not a valid TagGroupEnum", str)
}
return nil
}
func (e TagGroupEnum) MarshalGQL(w io.Writer) {
fmt.Fprint(w, strconv.Quote(e.String()))
}
type TargetTypeEnum string type TargetTypeEnum string
const ( const (

View file

@ -66,7 +66,7 @@ func (c Client) QueryStashBoxScene(queryStr string) ([]*models.ScrapedScene, err
} }
// FindStashBoxScenesByFingerprints queries stash-box for scenes using every // FindStashBoxScenesByFingerprints queries stash-box for scenes using every
// scene's MD5 checksum and/or oshash. // scene's MD5/OSHASH checksum, or PHash
func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.ScrapedScene, error) { func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.ScrapedScene, error) {
ids, err := utils.StringSliceToIntSlice(sceneIDs) ids, err := utils.StringSliceToIntSlice(sceneIDs)
if err != nil { if err != nil {
@ -95,6 +95,10 @@ func (c Client) FindStashBoxScenesByFingerprints(sceneIDs []string) ([]*models.S
if scene.OSHash.Valid { if scene.OSHash.Valid {
fingerprints = append(fingerprints, scene.OSHash.String) fingerprints = append(fingerprints, scene.OSHash.String)
} }
if scene.Phash.Valid {
fingerprints = append(fingerprints, utils.PhashToString(scene.Phash.Int64))
}
} }
return nil return nil
@ -189,6 +193,18 @@ func (c Client) SubmitStashBoxFingerprints(sceneIDs []string, endpoint string) (
Fingerprint: &fingerprint, Fingerprint: &fingerprint,
}) })
} }
if scene.Phash.Valid && scene.Duration.Valid {
fingerprint := graphql.FingerprintInput{
Hash: utils.PhashToString(scene.Phash.Int64),
Algorithm: graphql.FingerprintAlgorithmPhash,
Duration: int(scene.Duration.Float64),
}
fingerprints = append(fingerprints, graphql.FingerprintSubmission{
SceneID: sceneStashID,
Fingerprint: &fingerprint,
})
}
} }
} }

View file

@ -4,9 +4,11 @@ import (
"database/sql" "database/sql"
"fmt" "fmt"
"strconv" "strconv"
"strings"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
) )
const sceneTable = "scenes" const sceneTable = "scenes"
@ -61,6 +63,20 @@ SELECT id FROM scenes
WHERE scenes.oshash is null WHERE scenes.oshash is null
` `
var findExactDuplicateQuery = `
SELECT GROUP_CONCAT(id) as ids
FROM scenes
WHERE phash IS NOT NULL
GROUP BY phash
HAVING COUNT(*) > 1;
`
var findAllPhashesQuery = `
SELECT id, phash
FROM scenes
WHERE phash IS NOT NULL
`
type sceneQueryBuilder struct { type sceneQueryBuilder struct {
repository repository
} }
@ -824,3 +840,51 @@ func (qb *sceneQueryBuilder) GetStashIDs(sceneID int) ([]*models.StashID, error)
func (qb *sceneQueryBuilder) UpdateStashIDs(sceneID int, stashIDs []models.StashID) error { func (qb *sceneQueryBuilder) UpdateStashIDs(sceneID int, stashIDs []models.StashID) error {
return qb.stashIDRepository().replace(sceneID, stashIDs) return qb.stashIDRepository().replace(sceneID, stashIDs)
} }
func (qb *sceneQueryBuilder) FindDuplicates(distance int) ([][]*models.Scene, error) {
var dupeIds [][]int
if distance == 0 {
var ids []string
if err := qb.tx.Select(&ids, findExactDuplicateQuery); err != nil {
return nil, err
}
for _, id := range ids {
strIds := strings.Split(id, ",")
var sceneIds []int
for _, strId := range strIds {
if intId, err := strconv.Atoi(strId); err == nil {
sceneIds = append(sceneIds, intId)
}
}
dupeIds = append(dupeIds, sceneIds)
}
} else {
var hashes []*utils.Phash
if err := qb.queryFunc(findAllPhashesQuery, nil, func(rows *sqlx.Rows) error {
phash := utils.Phash{
Bucket: -1,
}
if err := rows.StructScan(&phash); err != nil {
return err
}
hashes = append(hashes, &phash)
return nil
}); err != nil {
return nil, err
}
dupeIds = utils.FindDuplicates(hashes, distance)
}
var duplicates [][]*models.Scene
for _, sceneIds := range dupeIds {
if scenes, err := qb.FindMany(sceneIds); err == nil {
duplicates = append(duplicates, scenes)
}
}
return duplicates, nil
}

57
pkg/utils/phash.go Normal file
View file

@ -0,0 +1,57 @@
package utils
import (
"strconv"
"github.com/corona10/goimagehash"
)
type Phash struct {
SceneID int `db:"id"`
Hash int64 `db:"phash"`
Neighbors []int
Bucket int
}
func FindDuplicates(hashes []*Phash, distance int) [][]int {
for i, scene := range hashes {
sceneHash := goimagehash.NewImageHash(uint64(scene.Hash), goimagehash.PHash)
for j, neighbor := range hashes {
if i != j {
neighborHash := goimagehash.NewImageHash(uint64(neighbor.Hash), goimagehash.PHash)
neighborDistance, _ := sceneHash.Distance(neighborHash)
if neighborDistance <= distance {
scene.Neighbors = append(scene.Neighbors, j)
}
}
}
}
var buckets [][]int
for _, scene := range hashes {
if len(scene.Neighbors) > 0 && scene.Bucket == -1 {
bucket := len(buckets)
scenes := []int{scene.SceneID}
scene.Bucket = bucket
findNeighbors(bucket, scene.Neighbors, hashes, &scenes)
buckets = append(buckets, scenes)
}
}
return buckets
}
func findNeighbors(bucket int, neighbors []int, hashes []*Phash, scenes *[]int) {
for _, id := range neighbors {
hash := hashes[id]
if hash.Bucket == -1 {
hash.Bucket = bucket
*scenes = append(*scenes, hash.SceneID)
findNeighbors(bucket, hash.Neighbors, hashes, scenes)
}
}
}
func PhashToString(phash int64) string {
return strconv.FormatUint(uint64(phash), 16)
}

View file

@ -1,4 +1,5 @@
### ✨ New Features ### ✨ New Features
* Added [perceptual dupe checker](/settings?tab=duplicates).
* Support access to system without logging in via API key. * Support access to system without logging in via API key.
* Added scene queue. * Added scene queue.

View file

@ -14,6 +14,7 @@ import Contributing from "src/docs/en/Contributing.md";
import SceneFilenameParser from "src/docs/en/SceneFilenameParser.md"; import SceneFilenameParser from "src/docs/en/SceneFilenameParser.md";
import KeyboardShortcuts from "src/docs/en/KeyboardShortcuts.md"; import KeyboardShortcuts from "src/docs/en/KeyboardShortcuts.md";
import Help from "src/docs/en/Help.md"; import Help from "src/docs/en/Help.md";
import Deduplication from "src/docs/en/Deduplication.md";
import { MarkdownPage } from "../Shared/MarkdownPage"; import { MarkdownPage } from "../Shared/MarkdownPage";
interface IManualProps { interface IManualProps {
@ -86,6 +87,11 @@ export const Manual: React.FC<IManualProps> = ({
title: "Scene Tagger", title: "Scene Tagger",
content: Tagger, content: Tagger,
}, },
{
key: "Deduplication.md",
title: "Dupe Checker",
content: Deduplication,
},
{ {
key: "KeyboardShortcuts.md", key: "KeyboardShortcuts.md",
title: "Keyboard Shortcuts", title: "Keyboard Shortcuts",

View file

@ -6,6 +6,14 @@
padding-left: 15px; padding-left: 15px;
padding-right: 15px; padding-right: 15px;
transition: none; transition: none;
&:first-child {
border-left: none;
}
&:last-child {
border-right: none;
}
} }
} }

View file

@ -219,10 +219,24 @@ export const SceneFileInfoPanel: React.FC<ISceneFileInfoPanelProps> = (
); );
} }
function renderPhash() {
if (props.scene.phash) {
return (
<div className="row">
<abbr className="col-4" title="Perceptual hash">
PHash
</abbr>
<TruncatedText className="col-8" text={props.scene.phash} />
</div>
);
}
}
return ( return (
<div className="container scene-file-info"> <div className="container scene-file-info">
{renderOSHash()} {renderOSHash()}
{renderChecksum()} {renderChecksum()}
{renderPhash()}
{renderPath()} {renderPath()}
{renderStream()} {renderStream()}
{renderFileSize()} {renderFileSize()}

View file

@ -19,6 +19,7 @@ export const SceneGenerateDialog: React.FC<ISceneGenerateDialogProps> = (
const { data, error, loading } = useConfiguration(); const { data, error, loading } = useConfiguration();
const [sprites, setSprites] = useState(true); const [sprites, setSprites] = useState(true);
const [phashes, setPhashes] = useState(true);
const [previews, setPreviews] = useState(true); const [previews, setPreviews] = useState(true);
const [markers, setMarkers] = useState(true); const [markers, setMarkers] = useState(true);
const [transcodes, setTranscodes] = useState(false); const [transcodes, setTranscodes] = useState(false);
@ -60,6 +61,7 @@ export const SceneGenerateDialog: React.FC<ISceneGenerateDialogProps> = (
try { try {
await mutateMetadataGenerate({ await mutateMetadataGenerate({
sprites, sprites,
phashes,
previews, previews,
imagePreviews: previews && imagePreviews, imagePreviews: previews && imagePreviews,
markers, markers,
@ -243,6 +245,12 @@ export const SceneGenerateDialog: React.FC<ISceneGenerateDialogProps> = (
label="Transcodes (MP4 conversions of unsupported video formats)" label="Transcodes (MP4 conversions of unsupported video formats)"
onChange={() => setTranscodes(!transcodes)} onChange={() => setTranscodes(!transcodes)}
/> />
<Form.Check
id="phash-task"
checked={phashes}
label="Perceptual hashes (for deduplication)"
onChange={() => setPhashes(!phashes)}
/>
<hr /> <hr />
<Form.Check <Form.Check

View file

@ -9,6 +9,7 @@ import { SettingsLogsPanel } from "./SettingsLogsPanel";
import { SettingsTasksPanel } from "./SettingsTasksPanel/SettingsTasksPanel"; import { SettingsTasksPanel } from "./SettingsTasksPanel/SettingsTasksPanel";
import { SettingsPluginsPanel } from "./SettingsPluginsPanel"; import { SettingsPluginsPanel } from "./SettingsPluginsPanel";
import { SettingsScrapersPanel } from "./SettingsScrapersPanel"; import { SettingsScrapersPanel } from "./SettingsScrapersPanel";
import { SettingsDuplicatePanel } from "./SettingsDuplicatePanel";
export const Settings: React.FC = () => { export const Settings: React.FC = () => {
const location = useLocation(); const location = useLocation();
@ -45,6 +46,9 @@ export const Settings: React.FC = () => {
<Nav.Item> <Nav.Item>
<Nav.Link eventKey="logs">Logs</Nav.Link> <Nav.Link eventKey="logs">Logs</Nav.Link>
</Nav.Item> </Nav.Item>
<Nav.Item>
<Nav.Link eventKey="duplicates">Dupe Checker</Nav.Link>
</Nav.Item>
<Nav.Item> <Nav.Item>
<Nav.Link eventKey="about">About</Nav.Link> <Nav.Link eventKey="about">About</Nav.Link>
</Nav.Item> </Nav.Item>
@ -71,6 +75,9 @@ export const Settings: React.FC = () => {
<Tab.Pane eventKey="logs"> <Tab.Pane eventKey="logs">
<SettingsLogsPanel /> <SettingsLogsPanel />
</Tab.Pane> </Tab.Pane>
<Tab.Pane eventKey="duplicates">
<SettingsDuplicatePanel />
</Tab.Pane>
<Tab.Pane eventKey="about"> <Tab.Pane eventKey="about">
<SettingsAboutPanel /> <SettingsAboutPanel />
</Tab.Pane> </Tab.Pane>

View file

@ -0,0 +1,270 @@
import React, { useState } from "react";
import { Button, Col, Form, Row, Table } from "react-bootstrap";
import { Link, useHistory } from "react-router-dom";
import { FormattedNumber } from "react-intl";
import querystring from "query-string";
import * as GQL from "src/core/generated-graphql";
import {
LoadingIndicator,
ErrorMessage,
HoverPopover,
} from "src/components/Shared";
import { Pagination } from "src/components/List/Pagination";
import { TextUtils } from "src/utils";
import { DeleteScenesDialog } from "src/components/Scenes/DeleteScenesDialog";
const CLASSNAME = "DuplicateChecker";
export const SettingsDuplicatePanel: React.FC = () => {
const history = useHistory();
const { page, size, distance } = querystring.parse(history.location.search);
const currentPage = Number.parseInt(
Array.isArray(page) ? page[0] : page ?? "1",
10
);
const pageSize = Number.parseInt(
Array.isArray(size) ? size[0] : size ?? "20",
10
);
const hashDistance = Number.parseInt(
Array.isArray(distance) ? distance[0] : distance ?? "0",
10
);
const [isMultiDelete, setIsMultiDelete] = useState(false);
const [checkedScenes, setCheckedScenes] = useState<Record<string, boolean>>(
{}
);
const { data, loading, refetch } = GQL.useFindDuplicateScenesQuery({
fetchPolicy: "no-cache",
variables: { distance: hashDistance },
});
const [deletingScene, setDeletingScene] = useState<
GQL.SlimSceneDataFragment[] | null
>(null);
if (loading) return <LoadingIndicator />;
if (!data) return <ErrorMessage error="Error searching for duplicates." />;
const scenes = data?.findDuplicateScenes ?? [];
const filteredScenes = scenes.slice(
(currentPage - 1) * pageSize,
currentPage * pageSize
);
const checkCount = Object.keys(checkedScenes).filter(
(id) => checkedScenes[id]
).length;
const setQuery = (q: Record<string, string | number | undefined>) => {
history.push({
search: querystring.stringify({
...querystring.parse(history.location.search),
...q,
}),
});
};
function onDeleteDialogClosed(deleted: boolean) {
setDeletingScene(null);
if (deleted) {
refetch();
if (isMultiDelete) setCheckedScenes({});
}
}
const handleCheck = (checked: boolean, sceneID: string) => {
setCheckedScenes({ ...checkedScenes, [sceneID]: checked });
};
const handleDeleteChecked = () => {
setDeletingScene(scenes.flat().filter((s) => checkedScenes[s.id]));
setIsMultiDelete(true);
};
const handleDeleteScene = (scene: GQL.SlimSceneDataFragment) => {
setDeletingScene([scene]);
setIsMultiDelete(false);
};
const renderFilesize = (filesize: string | null | undefined) => {
const { size: parsedSize, unit } = TextUtils.fileSize(
Number.parseInt(filesize ?? "0", 10)
);
return (
<FormattedNumber
value={parsedSize}
style="unit"
unit={unit}
unitDisplay="narrow"
maximumFractionDigits={2}
/>
);
};
return (
<div className={CLASSNAME}>
{deletingScene && (
<DeleteScenesDialog
selected={deletingScene}
onClose={onDeleteDialogClosed}
/>
)}
<h4>Duplicate Scenes</h4>
<Form.Group>
<Row noGutters>
<Form.Label>Search Accuracy</Form.Label>
<Col xs={2}>
<Form.Control
as="select"
onChange={(e) =>
setQuery({
distance:
e.currentTarget.value === "0"
? undefined
: e.currentTarget.value,
page: undefined,
})
}
defaultValue={distance ?? 0}
className="ml-4"
>
<option value={0}>Exact</option>
<option value={4}>High</option>
<option value={8}>Medium</option>
<option value={10}>Low</option>
</Form.Control>
</Col>
</Row>
<Form.Text>
Levels below &ldquo;Exact&rdquo; can take longer to calculate. False
positives might also be returned on lower accuracy levels.
</Form.Text>
</Form.Group>
<div className="d-flex mb-2">
<h6 className="mr-auto align-self-center">
{scenes.length} sets of duplicates found.
</h6>
{checkCount > 0 && (
<Button
className="edit-button"
variant="danger"
onClick={handleDeleteChecked}
>
Delete {checkCount} scene{checkCount > 1 && "s"}
</Button>
)}
<Pagination
itemsPerPage={pageSize}
currentPage={currentPage}
totalItems={scenes.length}
onChangePage={(newPage) =>
setQuery({ page: newPage === 1 ? undefined : newPage })
}
/>
<Form.Control
as="select"
className="w-auto ml-2 btn-secondary"
defaultValue={pageSize}
onChange={(e) =>
setQuery({
size:
e.currentTarget.value === "20"
? undefined
: e.currentTarget.value,
})
}
>
<option value={10}>10</option>
<option value={20}>20</option>
<option value={40}>40</option>
<option value={60}>60</option>
<option value={80}>80</option>
</Form.Control>
</div>
<Table striped className={`${CLASSNAME}-table`}>
<colgroup>
<col className={`${CLASSNAME}-checkbox`} />
<col className={`${CLASSNAME}-sprite`} />
<col className={`${CLASSNAME}-title`} />
<col className={`${CLASSNAME}-duration`} />
<col className={`${CLASSNAME}-filesize`} />
<col className={`${CLASSNAME}-resolution`} />
<col className={`${CLASSNAME}-bitrate`} />
<col className={`${CLASSNAME}-codec`} />
<col className={`${CLASSNAME}-operations`} />
</colgroup>
<thead>
<tr>
<th> </th>
<th> </th>
<th>Title</th>
<th>Duration</th>
<th>Filesize</th>
<th>Resolution</th>
<th>Bitrate</th>
<th>Codec</th>
<th>Delete</th>
</tr>
</thead>
<tbody>
{filteredScenes.map((group) =>
group.map((scene, i) => (
<tr className={i === 0 ? "duplicate-group" : ""} key={scene.id}>
<td>
<Form.Check
onChange={(e) =>
handleCheck(e.currentTarget.checked, scene.id)
}
/>
</td>
<td>
<HoverPopover
content={
<img src={scene.paths.sprite ?? ""} alt="" width={600} />
}
placement="right"
>
<img src={scene.paths.sprite ?? ""} alt="" width={100} />
</HoverPopover>
</td>
<td className="text-left">
<Link to={`/scenes/${scene.id}`}>
{scene.title ?? TextUtils.fileNameFromPath(scene.path)}
</Link>
</td>
<td>
{scene.file.duration &&
TextUtils.secondsToTimestamp(scene.file.duration)}
</td>
<td>{renderFilesize(scene.file.size)}</td>
<td>{`${scene.file.width}x${scene.file.height}`}</td>
<td>
<FormattedNumber
value={(scene.file.bitrate ?? 0) / 1000000}
maximumFractionDigits={2}
/>
&nbsp;mbps
</td>
<td>{scene.file.video_codec}</td>
<td>
<Button
className="edit-button"
variant="danger"
onClick={() => handleDeleteScene(scene)}
>
Delete
</Button>
</td>
</tr>
))
)}
</tbody>
</Table>
{scenes.length === 0 && (
<h4 className="text-center mt-4">
No duplicates found. Make sure the phash task has been run.
</h4>
)}
</div>
);
};

View file

@ -6,6 +6,7 @@ import { useToast } from "src/hooks";
export const GenerateButton: React.FC = () => { export const GenerateButton: React.FC = () => {
const Toast = useToast(); const Toast = useToast();
const [sprites, setSprites] = useState(true); const [sprites, setSprites] = useState(true);
const [phashes, setPhashes] = useState(true);
const [previews, setPreviews] = useState(true); const [previews, setPreviews] = useState(true);
const [markers, setMarkers] = useState(true); const [markers, setMarkers] = useState(true);
const [transcodes, setTranscodes] = useState(false); const [transcodes, setTranscodes] = useState(false);
@ -15,6 +16,7 @@ export const GenerateButton: React.FC = () => {
try { try {
await mutateMetadataGenerate({ await mutateMetadataGenerate({
sprites, sprites,
phashes,
previews, previews,
imagePreviews: previews && imagePreviews, imagePreviews: previews && imagePreviews,
markers, markers,
@ -64,6 +66,12 @@ export const GenerateButton: React.FC = () => {
label="Transcodes (MP4 conversions of unsupported video formats)" label="Transcodes (MP4 conversions of unsupported video formats)"
onChange={() => setTranscodes(!transcodes)} onChange={() => setTranscodes(!transcodes)}
/> />
<Form.Check
id="phash-task"
checked={phashes}
label="Phashes (for deduplication and scene identification)"
onChange={() => setPhashes(!phashes)}
/>
</Form.Group> </Form.Group>
<Form.Group> <Form.Group>
<Button <Button

View file

@ -44,6 +44,9 @@ export const SettingsTasksPanel: React.FC = () => {
const [scanGenerateSprites, setScanGenerateSprites] = useState<boolean>( const [scanGenerateSprites, setScanGenerateSprites] = useState<boolean>(
false false
); );
const [scanGeneratePhashes, setScanGeneratePhashes] = useState<boolean>(
false
);
const [cleanDryRun, setCleanDryRun] = useState<boolean>(false); const [cleanDryRun, setCleanDryRun] = useState<boolean>(false);
const [ const [
scanGenerateImagePreviews, scanGenerateImagePreviews,
@ -206,6 +209,7 @@ export const SettingsTasksPanel: React.FC = () => {
scanGeneratePreviews, scanGeneratePreviews,
scanGenerateImagePreviews, scanGenerateImagePreviews,
scanGenerateSprites, scanGenerateSprites,
scanGeneratePhashes,
}); });
Toast.success({ content: "Started scan" }); Toast.success({ content: "Started scan" });
jobStatus.refetch(); jobStatus.refetch();
@ -414,6 +418,12 @@ export const SettingsTasksPanel: React.FC = () => {
label="Generate sprites during scan (for the scene scrubber)" label="Generate sprites during scan (for the scene scrubber)"
onChange={() => setScanGenerateSprites(!scanGenerateSprites)} onChange={() => setScanGenerateSprites(!scanGenerateSprites)}
/> />
<Form.Check
id="scan-generate-phashes"
checked={scanGeneratePhashes}
label="Generate phashes during scan (for deduplication and scene identification)"
onChange={() => setScanGeneratePhashes(!scanGeneratePhashes)}
/>
</Form.Group> </Form.Group>
<Form.Group> <Form.Group>
<Button <Button

View file

@ -70,3 +70,56 @@
list-style: none; list-style: none;
} }
} }
.DuplicateChecker {
min-width: 768px;
.filter-container {
margin: 0;
}
.duplicate-group {
border-top: 50px solid #30404d;
&:first-child {
border-top: none;
}
}
&-table {
table-layout: fixed;
width: 100%;
}
&-checkbox {
width: 10px;
}
&-sprite {
width: 110px;
}
&-duration {
width: 80px;
}
&-filesize {
width: 90px;
}
&-resolution {
width: 100px;
}
&-bitrate {
width: 100px;
}
&-codec {
width: 70px;
}
&-operations {
width: 70px;
}
}

View file

@ -43,13 +43,18 @@ const getDurationStatus = (
const getFingerprintStatus = ( const getFingerprintStatus = (
scene: IStashBoxScene, scene: IStashBoxScene,
stashChecksum?: string stashScene: GQL.SlimSceneDataFragment
) => { ) => {
if (scene.fingerprints.some((f) => f.hash === stashChecksum)) const checksum = stashScene.checksum ?? stashScene.oshash ?? undefined;
const checksumMatch = scene.fingerprints.some((f) => f.hash === checksum);
const phashMatch = scene.fingerprints.some(
(f) => f.hash === stashScene.phash
);
if (checksumMatch || phashMatch)
return ( return (
<div className="font-weight-bold"> <div className="font-weight-bold">
<SuccessIcon className="mr-2" /> <SuccessIcon className="mr-2" />
Checksum is a match {phashMatch ? "PHash" : "Checksum"} is a match
</div> </div>
); );
}; };
@ -374,10 +379,7 @@ const StashSearchResult: React.FC<IStashSearchResultProps> = ({
Performers: {scene?.performers?.map((p) => p.name).join(", ")} Performers: {scene?.performers?.map((p) => p.name).join(", ")}
</div> </div>
{getDurationStatus(scene, stashScene.file?.duration)} {getDurationStatus(scene, stashScene.file?.duration)}
{getFingerprintStatus( {getFingerprintStatus(scene, stashScene)}
scene,
stashScene.checksum ?? stashScene.oshash ?? undefined
)}
</div> </div>
</div> </div>
</div> </div>

View file

@ -0,0 +1,9 @@
# Dupe Checker
[The dupe checker](/settings?tab=duplicates) searches your collection for scenes that are perceptually similar. This means that the files don't need to be identical, and will be identified even with different bitrates, resolutions, and intros/outros.
To achieve this stash needs to generate what's called a phash, or perceptual hash. Similar to sprite generation stash will generate a set of 25 images from fixed points in the scene. These images will be stitched together, and then hashed using the phash algorithm. The phash can then be used to find scenes that are the same or similar to others in the database. Phash generation can be run during scan, or as a separate task. Note that generation can take a while due to the work involved with extracting screenshots.
The dupe checker can be run with four different levels of accuracy. `Exact` looks for scenes that have exactly the same phash. This is a fast and accurate operation that should not yield any false positives except in very rare cases. The other accuracy levels look for duplicate files within a set distance of each other. This means the scenes don't have exactly the same phash, but are very similar. `High` and `Medium` should still yield very good results with few or no false positives. `Low` is likely to produce some false positives, but might still be useful for finding dupes.
Note that to generate a phash stash requires an uncorrupted file. If any errors are encountered during sprite generation the phash will not be generated. This is to prevent false positives.

14
vendor/github.com/corona10/goimagehash/.gitignore generated vendored Normal file
View file

@ -0,0 +1,14 @@
# Binaries for programs and plugins
*.exe
*.dll
*.so
*.dylib
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
.glide/

5
vendor/github.com/corona10/goimagehash/AUTHORS.md generated vendored Normal file
View file

@ -0,0 +1,5 @@
## AUTHORS
- [Dominik Honnef](https://github.com/dominikh) dominik@honnef.co
- [Dong-hee Na](https://github.com/corona10/) donghee.na92@gmail.com
- [Gustavo Brunoro](https://github.com/brunoro/) git@hitnail.net
- [Alex Higashino](https://github.com/TokyoWolFrog/) TokyoWolFrog@mayxyou.com

1
vendor/github.com/corona10/goimagehash/CODEOWNERS generated vendored Normal file
View file

@ -0,0 +1 @@
*.go @corona10

17
vendor/github.com/corona10/goimagehash/Gopkg.lock generated vendored Normal file
View file

@ -0,0 +1,17 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
branch = "master"
digest = "1:34534b73e925d20cc72cf202f8b482fdcbe3a1b113e19375f31aadabd0f0f97d"
name = "github.com/nfnt/resize"
packages = ["."]
pruneopts = "UT"
revision = "83c6a9932646f83e3267f353373d47347b6036b2"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = ["github.com/nfnt/resize"]
solver-name = "gps-cdcl"
solver-version = 1

34
vendor/github.com/corona10/goimagehash/Gopkg.toml generated vendored Normal file
View file

@ -0,0 +1,34 @@
# Gopkg.toml example
#
# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
#
# [prune]
# non-go = false
# go-tests = true
# unused-packages = true
[[constraint]]
branch = "master"
name = "github.com/nfnt/resize"
[prune]
go-tests = true
unused-packages = true

25
vendor/github.com/corona10/goimagehash/LICENSE generated vendored Normal file
View file

@ -0,0 +1,25 @@
BSD 2-Clause License
Copyright (c) 2017, Dong-hee Na
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

93
vendor/github.com/corona10/goimagehash/README.md generated vendored Normal file
View file

@ -0,0 +1,93 @@
![GitHub Action](https://github.com/corona10/goimagehash/workflows/goimagehash%20workflow/badge.svg)
[![GoDoc](https://godoc.org/github.com/corona10/goimagehash?status.svg)](https://godoc.org/github.com/corona10/goimagehash)
[![Go Report Card](https://goreportcard.com/badge/github.com/corona10/goimagehash)](https://goreportcard.com/report/github.com/corona10/goimagehash)
# goimagehash
> Inspired by [imagehash](https://github.com/JohannesBuchner/imagehash)
A image hashing library written in Go. ImageHash supports:
* [Average hashing](http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html)
* [Difference hashing](http://www.hackerfactor.com/blog/index.php?/archives/529-Kind-of-Like-That.html)
* [Perception hashing](http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html)
* [Wavelet hashing](https://fullstackml.com/wavelet-image-hash-in-python-3504fdd282b5) [TODO]
## Installation
```
go get github.com/corona10/goimagehash
```
## Special thanks to
* [Haeun Kim](https://github.com/haeungun/)
## Usage
``` Go
func main() {
file1, _ := os.Open("sample1.jpg")
file2, _ := os.Open("sample2.jpg")
defer file1.Close()
defer file2.Close()
img1, _ := jpeg.Decode(file1)
img2, _ := jpeg.Decode(file2)
hash1, _ := goimagehash.AverageHash(img1)
hash2, _ := goimagehash.AverageHash(img2)
distance, _ := hash1.Distance(hash2)
fmt.Printf("Distance between images: %v\n", distance)
hash1, _ = goimagehash.DifferenceHash(img1)
hash2, _ = goimagehash.DifferenceHash(img2)
distance, _ = hash1.Distance(hash2)
fmt.Printf("Distance between images: %v\n", distance)
width, height := 8, 8
hash3, _ = goimagehash.ExtAverageHash(img1, width, height)
hash4, _ = goimagehash.ExtAverageHash(img2, width, height)
distance, _ = hash3.Distance(hash4)
fmt.Printf("Distance between images: %v\n", distance)
fmt.Printf("hash3 bit size: %v\n", hash3.Bits())
fmt.Printf("hash4 bit size: %v\n", hash4.Bits())
var b bytes.Buffer
foo := bufio.NewWriter(&b)
_ = hash4.Dump(foo)
foo.Flush()
bar := bufio.NewReader(&b)
hash5, _ := goimagehash.LoadExtImageHash(bar)
}
```
## Release Note
### v1.0.3
- Add workflow for GithubAction
- Fix typo on the GoDoc for LoadImageHash
### v1.0.2
- go.mod is now used for install goimagehash
### v1.0.1
- Perception/ExtPerception hash creation times are reduced
### v1.0.0
**IMPORTANT**
goimagehash v1.0.0 does not have compatible with the before version for future features
- More flexible extended hash APIs are provided ([ExtAverageHash](https://godoc.org/github.com/corona10/goimagehash#ExtAverageHash), [ExtPerceptionHash](https://godoc.org/github.com/corona10/goimagehash#ExtPerceptionHash), [ExtDifferenceHash](https://godoc.org/github.com/corona10/goimagehash#ExtDifferenceHash))
- New serialization APIs are provided([ImageHash.Dump](https://godoc.org/github.com/corona10/goimagehash#ImageHash.Dump), [ExtImageHash.Dump](https://godoc.org/github.com/corona10/goimagehash#ExtImageHash.Dump))
- [ExtImageHashFromString](https://godoc.org/github.com/corona10/goimagehash#ExtImageHashFromString), [ImageHashFromString](https://godoc.org/github.com/corona10/goimagehash#ImageHashFromString) is deprecated and will be removed
- New deserialization APIs are provided([LoadImageHash](https://godoc.org/github.com/corona10/goimagehash#LoadImageHash), [LoadExtImageHash](https://godoc.org/github.com/corona10/goimagehash#LoadExtImageHash))
- Bits APIs are provided to measure actual bit size of hash
### v0.3.0
- Support DifferenceHashExtend.
- Support AverageHashExtend.
- Support PerceptionHashExtend by @TokyoWolFrog.
### v0.2.0
- Perception Hash is updated.
- Fix a critical bug of finding median value.
### v0.1.0
- Support Average hashing
- Support Difference hashing
- Support Perception hashing
- Use bits.OnesCount64 for computing Hamming distance by @dominikh
- Support hex serialization methods to ImageHash by @brunoro

5
vendor/github.com/corona10/goimagehash/doc.go generated vendored Normal file
View file

@ -0,0 +1,5 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package goimagehash

5
vendor/github.com/corona10/goimagehash/etcs/doc.go generated vendored Normal file
View file

@ -0,0 +1,5 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package etcs

61
vendor/github.com/corona10/goimagehash/etcs/utils.go generated vendored Normal file
View file

@ -0,0 +1,61 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package etcs
// MeanOfPixels function returns a mean of pixels.
func MeanOfPixels(pixels []float64) float64 {
m := 0.0
lens := len(pixels)
if lens == 0 {
return 0
}
for _, p := range pixels {
m += p
}
return m / float64(lens)
}
// MedianOfPixels function returns a median value of pixels.
// It uses quick selection algorithm.
func MedianOfPixels(pixels []float64) float64 {
tmp := make([]float64, len(pixels))
copy(tmp, pixels)
l := len(tmp)
pos := l / 2
v := quickSelectMedian(tmp, 0, l-1, pos)
return v
}
func quickSelectMedian(sequence []float64, low int, hi int, k int) float64 {
if low == hi {
return sequence[k]
}
for low < hi {
pivot := low/2 + hi/2
pivotValue := sequence[pivot]
storeIdx := low
sequence[pivot], sequence[hi] = sequence[hi], sequence[pivot]
for i := low; i < hi; i++ {
if sequence[i] < pivotValue {
sequence[storeIdx], sequence[i] = sequence[i], sequence[storeIdx]
storeIdx++
}
}
sequence[hi], sequence[storeIdx] = sequence[storeIdx], sequence[hi]
if k <= storeIdx {
hi = storeIdx
} else {
low = storeIdx + 1
}
}
if len(sequence)%2 == 0 {
return sequence[k-1]/2 + sequence[k]/2
}
return sequence[k]
}

3
vendor/github.com/corona10/goimagehash/go.mod generated vendored Normal file
View file

@ -0,0 +1,3 @@
module github.com/corona10/goimagehash
require github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646

2
vendor/github.com/corona10/goimagehash/go.sum generated vendored Normal file
View file

@ -0,0 +1,2 @@
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=

183
vendor/github.com/corona10/goimagehash/hashcompute.go generated vendored Normal file
View file

@ -0,0 +1,183 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package goimagehash
import (
"errors"
"image"
"github.com/corona10/goimagehash/etcs"
"github.com/corona10/goimagehash/transforms"
"github.com/nfnt/resize"
)
// AverageHash fuction returns a hash computation of average hash.
// Implementation follows
// http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html
func AverageHash(img image.Image) (*ImageHash, error) {
if img == nil {
return nil, errors.New("Image object can not be nil")
}
// Create 64bits hash.
ahash := NewImageHash(0, AHash)
resized := resize.Resize(8, 8, img, resize.Bilinear)
pixels := transforms.Rgb2Gray(resized)
flattens := transforms.FlattenPixels(pixels, 8, 8)
avg := etcs.MeanOfPixels(flattens)
for idx, p := range flattens {
if p > avg {
ahash.leftShiftSet(len(flattens) - idx - 1)
}
}
return ahash, nil
}
// DifferenceHash function returns a hash computation of difference hash.
// Implementation follows
// http://www.hackerfactor.com/blog/?/archives/529-Kind-of-Like-That.html
func DifferenceHash(img image.Image) (*ImageHash, error) {
if img == nil {
return nil, errors.New("Image object can not be nil")
}
dhash := NewImageHash(0, DHash)
resized := resize.Resize(9, 8, img, resize.Bilinear)
pixels := transforms.Rgb2Gray(resized)
idx := 0
for i := 0; i < len(pixels); i++ {
for j := 0; j < len(pixels[i])-1; j++ {
if pixels[i][j] < pixels[i][j+1] {
dhash.leftShiftSet(64 - idx - 1)
}
idx++
}
}
return dhash, nil
}
// PerceptionHash function returns a hash computation of phash.
// Implementation follows
// http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html
func PerceptionHash(img image.Image) (*ImageHash, error) {
if img == nil {
return nil, errors.New("Image object can not be nil")
}
phash := NewImageHash(0, PHash)
resized := resize.Resize(64, 64, img, resize.Bilinear)
pixels := transforms.Rgb2Gray(resized)
dct := transforms.DCT2D(pixels, 64, 64)
flattens := transforms.FlattenPixels(dct, 8, 8)
median := etcs.MedianOfPixels(flattens)
for idx, p := range flattens {
if p > median {
phash.leftShiftSet(len(flattens) - idx - 1)
}
}
return phash, nil
}
// ExtPerceptionHash function returns phash of which the size can be set larger than uint64
// Some variable name refer to https://github.com/JohannesBuchner/imagehash/blob/master/imagehash/__init__.py
// Support 64bits phash (width=8, height=8) and 256bits phash (width=16, height=16)
// Important: width * height should be the power of 2
func ExtPerceptionHash(img image.Image, width, height int) (*ExtImageHash, error) {
imgSize := width * height
if img == nil {
return nil, errors.New("Image object can not be nil")
}
if imgSize <= 0 || imgSize&(imgSize-1) != 0 {
return nil, errors.New("width * height should be power of 2")
}
var phash []uint64
resized := resize.Resize(uint(imgSize), uint(imgSize), img, resize.Bilinear)
pixels := transforms.Rgb2Gray(resized)
dct := transforms.DCT2D(pixels, imgSize, imgSize)
flattens := transforms.FlattenPixels(dct, width, height)
median := etcs.MedianOfPixels(flattens)
lenOfUnit := 64
if imgSize%lenOfUnit == 0 {
phash = make([]uint64, imgSize/lenOfUnit)
} else {
phash = make([]uint64, imgSize/lenOfUnit+1)
}
for idx, p := range flattens {
indexOfArray := idx / lenOfUnit
indexOfBit := lenOfUnit - idx%lenOfUnit - 1
if p > median {
phash[indexOfArray] |= 1 << uint(indexOfBit)
}
}
return NewExtImageHash(phash, PHash, imgSize), nil
}
// ExtAverageHash function returns ahash of which the size can be set larger than uint64
// Support 64bits ahash (width=8, height=8) and 256bits ahash (width=16, height=16)
func ExtAverageHash(img image.Image, width, height int) (*ExtImageHash, error) {
if img == nil {
return nil, errors.New("Image object can not be nil")
}
var ahash []uint64
imgSize := width * height
resized := resize.Resize(uint(width), uint(height), img, resize.Bilinear)
pixels := transforms.Rgb2Gray(resized)
flattens := transforms.FlattenPixels(pixels, width, height)
avg := etcs.MeanOfPixels(flattens)
lenOfUnit := 64
if imgSize%lenOfUnit == 0 {
ahash = make([]uint64, imgSize/lenOfUnit)
} else {
ahash = make([]uint64, imgSize/lenOfUnit+1)
}
for idx, p := range flattens {
indexOfArray := idx / lenOfUnit
indexOfBit := lenOfUnit - idx%lenOfUnit - 1
if p > avg {
ahash[indexOfArray] |= 1 << uint(indexOfBit)
}
}
return NewExtImageHash(ahash, AHash, imgSize), nil
}
// ExtDifferenceHash function returns dhash of which the size can be set larger than uint64
// Support 64bits dhash (width=8, height=8) and 256bits dhash (width=16, height=16)
func ExtDifferenceHash(img image.Image, width, height int) (*ExtImageHash, error) {
if img == nil {
return nil, errors.New("Image object can not be nil")
}
var dhash []uint64
imgSize := width * height
resized := resize.Resize(uint(width)+1, uint(height), img, resize.Bilinear)
pixels := transforms.Rgb2Gray(resized)
lenOfUnit := 64
if imgSize%lenOfUnit == 0 {
dhash = make([]uint64, imgSize/lenOfUnit)
} else {
dhash = make([]uint64, imgSize/lenOfUnit+1)
}
idx := 0
for i := 0; i < len(pixels); i++ {
for j := 0; j < len(pixels[i])-1; j++ {
indexOfArray := idx / lenOfUnit
indexOfBit := lenOfUnit - idx%lenOfUnit - 1
if pixels[i][j] < pixels[i][j+1] {
dhash[indexOfArray] |= 1 << uint(indexOfBit)
}
idx++
}
}
return NewExtImageHash(dhash, DHash, imgSize), nil
}

294
vendor/github.com/corona10/goimagehash/imagehash.go generated vendored Normal file
View file

@ -0,0 +1,294 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package goimagehash
import (
"encoding/binary"
"encoding/gob"
"encoding/hex"
"errors"
"fmt"
"io"
)
// Kind describes the kinds of hash.
type Kind int
// ImageHash is a struct of hash computation.
type ImageHash struct {
hash uint64
kind Kind
}
// ExtImageHash is a struct of big hash computation.
type ExtImageHash struct {
hash []uint64
kind Kind
bits int
}
const (
// Unknown is a enum value of the unknown hash.
Unknown Kind = iota
// AHash is a enum value of the average hash.
AHash
//PHash is a enum value of the perceptual hash.
PHash
// DHash is a enum value of the difference hash.
DHash
// WHash is a enum value of the wavelet hash.
WHash
)
// NewImageHash function creates a new image hash.
func NewImageHash(hash uint64, kind Kind) *ImageHash {
return &ImageHash{hash: hash, kind: kind}
}
// Bits method returns an actual hash bit size
func (h *ImageHash) Bits() int {
return 64
}
// Distance method returns a distance between two hashes.
func (h *ImageHash) Distance(other *ImageHash) (int, error) {
if h.GetKind() != other.GetKind() {
return -1, errors.New("Image hashes's kind should be identical")
}
lhash := h.GetHash()
rhash := other.GetHash()
hamming := lhash ^ rhash
return popcnt(hamming), nil
}
// GetHash method returns a 64bits hash value.
func (h *ImageHash) GetHash() uint64 {
return h.hash
}
// GetKind method returns a kind of image hash.
func (h *ImageHash) GetKind() Kind {
return h.kind
}
func (h *ImageHash) leftShiftSet(idx int) {
h.hash |= 1 << uint(idx)
}
const strFmt = "%1s:%016x"
// Dump method writes a binary serialization into w io.Writer.
func (h *ImageHash) Dump(w io.Writer) error {
type D struct {
Hash uint64
Kind Kind
}
enc := gob.NewEncoder(w)
err := enc.Encode(D{Hash: h.hash, Kind: h.kind})
if err != nil {
return err
}
return nil
}
// LoadImageHash method loads a ImageHash from io.Reader.
func LoadImageHash(b io.Reader) (*ImageHash, error) {
type E struct {
Hash uint64
Kind Kind
}
var e E
dec := gob.NewDecoder(b)
err := dec.Decode(&e)
if err != nil {
return nil, err
}
return &ImageHash{hash: e.Hash, kind: e.Kind}, nil
}
// ImageHashFromString returns an image hash from a hex representation
//
// Deprecated: Use goimagehash.LoadImageHash instead.
func ImageHashFromString(s string) (*ImageHash, error) {
var kindStr string
var hash uint64
_, err := fmt.Sscanf(s, strFmt, &kindStr, &hash)
if err != nil {
return nil, errors.New("Couldn't parse string " + s)
}
kind := Unknown
switch kindStr {
case "a":
kind = AHash
case "p":
kind = PHash
case "d":
kind = DHash
case "w":
kind = WHash
}
return NewImageHash(hash, kind), nil
}
// ToString returns a hex representation of the hash
func (h *ImageHash) ToString() string {
kindStr := ""
switch h.kind {
case AHash:
kindStr = "a"
case PHash:
kindStr = "p"
case DHash:
kindStr = "d"
case WHash:
kindStr = "w"
}
return fmt.Sprintf(strFmt, kindStr, h.hash)
}
// NewExtImageHash function creates a new big hash
func NewExtImageHash(hash []uint64, kind Kind, bits int) *ExtImageHash {
return &ExtImageHash{hash: hash, kind: kind, bits: bits}
}
// Bits method returns an actual hash bit size
func (h *ExtImageHash) Bits() int {
return h.bits
}
// Distance method returns a distance between two big hashes
func (h *ExtImageHash) Distance(other *ExtImageHash) (int, error) {
if h.GetKind() != other.GetKind() {
return -1, errors.New("Extended Image hashes's kind should be identical")
}
if h.Bits() != other.Bits() {
msg := fmt.Sprintf("Extended image hash should has an identical bit size but got %v vs %v", h.Bits(), other.Bits())
return -1, errors.New(msg)
}
lHash := h.GetHash()
rHash := other.GetHash()
if len(lHash) != len(rHash) {
return -1, errors.New("Extended Image hashes's size should be identical")
}
distance := 0
for idx, lh := range lHash {
rh := rHash[idx]
hamming := lh ^ rh
distance += popcnt(hamming)
}
return distance, nil
}
// GetHash method returns a big hash value
func (h *ExtImageHash) GetHash() []uint64 {
return h.hash
}
// GetKind method returns a kind of big hash
func (h *ExtImageHash) GetKind() Kind {
return h.kind
}
// Dump method writes a binary serialization into w io.Writer.
func (h *ExtImageHash) Dump(w io.Writer) error {
type D struct {
Hash []uint64
Kind Kind
Bits int
}
enc := gob.NewEncoder(w)
err := enc.Encode(D{Hash: h.hash, Kind: h.kind, Bits: h.bits})
if err != nil {
return err
}
return nil
}
// LoadExtImageHash method loads a ExtImageHash from io.Reader.
func LoadExtImageHash(b io.Reader) (*ExtImageHash, error) {
type E struct {
Hash []uint64
Kind Kind
Bits int
}
var e E
dec := gob.NewDecoder(b)
err := dec.Decode(&e)
if err != nil {
return nil, err
}
return &ExtImageHash{hash: e.Hash, kind: e.Kind, bits: e.Bits}, nil
}
const extStrFmt = "%1s:%s"
// ExtImageHashFromString returns a big hash from a hex representation
//
// Deprecated: Use goimagehash.LoadExtImageHash instead.
func ExtImageHashFromString(s string) (*ExtImageHash, error) {
var kindStr string
var hashStr string
_, err := fmt.Sscanf(s, extStrFmt, &kindStr, &hashStr)
if err != nil {
return nil, errors.New("Couldn't parse string " + s)
}
hexBytes, err := hex.DecodeString(hashStr)
if err != nil {
return nil, err
}
var hash []uint64
lenOfByte := 8
for i := 0; i < len(hexBytes)/lenOfByte; i++ {
startIndex := i * lenOfByte
endIndex := startIndex + lenOfByte
hashUint64 := binary.BigEndian.Uint64(hexBytes[startIndex:endIndex])
hash = append(hash, hashUint64)
}
kind := Unknown
switch kindStr {
case "a":
kind = AHash
case "p":
kind = PHash
case "d":
kind = DHash
case "w":
kind = WHash
}
return NewExtImageHash(hash, kind, len(hash)*64), nil
}
// ToString returns a hex representation of big hash
func (h *ExtImageHash) ToString() string {
var hexBytes []byte
for _, hash := range h.hash {
hashBytes := make([]byte, 8)
binary.BigEndian.PutUint64(hashBytes, hash)
hexBytes = append(hexBytes, hashBytes...)
}
hexStr := hex.EncodeToString(hexBytes)
kindStr := ""
switch h.kind {
case AHash:
kindStr = "a"
case PHash:
kindStr = "p"
case DHash:
kindStr = "d"
case WHash:
kindStr = "w"
}
return fmt.Sprintf(extStrFmt, kindStr, hexStr)
}

13
vendor/github.com/corona10/goimagehash/imagehash18.go generated vendored Normal file
View file

@ -0,0 +1,13 @@
// +build !go1.9
package goimagehash
func popcnt(x uint64) int {
diff := 0
for x != 0 {
diff += int(x & 1)
x >>= 1
}
return diff
}

View file

@ -0,0 +1,9 @@
// +build go1.9
package goimagehash
import (
"math/bits"
)
func popcnt(x uint64) int { return bits.OnesCount64(x) }

View file

@ -0,0 +1,75 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package transforms
import (
"math"
"sync"
)
// DCT1D function returns result of DCT-II.
// DCT type II, unscaled. Algorithm by Byeong Gi Lee, 1984.
func DCT1D(input []float64) []float64 {
temp := make([]float64, len(input))
forwardTransform(input, temp, len(input))
return input
}
func forwardTransform(input, temp []float64, Len int) {
if Len == 1 {
return
}
halfLen := Len / 2
for i := 0; i < halfLen; i++ {
x, y := input[i], input[Len-1-i]
temp[i] = x + y
temp[i+halfLen] = (x - y) / (math.Cos((float64(i)+0.5)*math.Pi/float64(Len)) * 2)
}
forwardTransform(temp, input, halfLen)
forwardTransform(temp[halfLen:], input, halfLen)
for i := 0; i < halfLen-1; i++ {
input[i*2+0] = temp[i]
input[i*2+1] = temp[i+halfLen] + temp[i+halfLen+1]
}
input[Len-2], input[Len-1] = temp[halfLen-1], temp[Len-1]
}
// DCT2D function returns a result of DCT2D by using the seperable property.
func DCT2D(input [][]float64, w int, h int) [][]float64 {
output := make([][]float64, h)
for i := range output {
output[i] = make([]float64, w)
}
wg := new(sync.WaitGroup)
for i := 0; i < h; i++ {
wg.Add(1)
go func(i int) {
cols := DCT1D(input[i])
output[i] = cols
wg.Done()
}(i)
}
wg.Wait()
for i := 0; i < w; i++ {
wg.Add(1)
in := make([]float64, h)
go func(i int) {
for j := 0; j < h; j++ {
in[j] = output[j][i]
}
rows := DCT1D(in)
for j := 0; j < len(rows); j++ {
output[j][i] = rows[j]
}
wg.Done()
}(i)
}
wg.Wait()
return output
}

View file

@ -0,0 +1,5 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package transforms

View file

@ -0,0 +1,39 @@
// Copyright 2017 The goimagehash Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package transforms
import (
"image"
)
// Rgb2Gray function converts RGB to a gray scale array.
func Rgb2Gray(colorImg image.Image) [][]float64 {
bounds := colorImg.Bounds()
w, h := bounds.Max.X-bounds.Min.X, bounds.Max.Y-bounds.Min.Y
pixels := make([][]float64, h)
for i := range pixels {
pixels[i] = make([]float64, w)
for j := range pixels[i] {
color := colorImg.At(j, i)
r, g, b, _ := color.RGBA()
lum := 0.299*float64(r/257) + 0.587*float64(g/257) + 0.114*float64(b/256)
pixels[i][j] = lum
}
}
return pixels
}
// FlattenPixels function flattens 2d array into 1d array.
func FlattenPixels(pixels [][]float64, x int, y int) []float64 {
flattens := make([]float64, x*y)
for i := 0; i < y; i++ {
for j := 0; j < x; j++ {
flattens[y*i+j] = pixels[i][j]
}
}
return flattens
}

7
vendor/github.com/nfnt/resize/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,7 @@
language: go
go:
- "1.x"
- "1.1"
- "1.4"
- "1.10"

13
vendor/github.com/nfnt/resize/LICENSE generated vendored Normal file
View file

@ -0,0 +1,13 @@
Copyright (c) 2012, Jan Schlicht <jan.schlicht@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.

151
vendor/github.com/nfnt/resize/README.md generated vendored Normal file
View file

@ -0,0 +1,151 @@
# This package is no longer being updated! Please look for alternatives if that bothers you.
Resize
======
Image resizing for the [Go programming language](http://golang.org) with common interpolation methods.
[![Build Status](https://travis-ci.org/nfnt/resize.svg)](https://travis-ci.org/nfnt/resize)
Installation
------------
```bash
$ go get github.com/nfnt/resize
```
It's that easy!
Usage
-----
This package needs at least Go 1.1. Import package with
```go
import "github.com/nfnt/resize"
```
The resize package provides 2 functions:
* `resize.Resize` creates a scaled image with new dimensions (`width`, `height`) using the interpolation function `interp`.
If either `width` or `height` is set to 0, it will be set to an aspect ratio preserving value.
* `resize.Thumbnail` downscales an image preserving its aspect ratio to the maximum dimensions (`maxWidth`, `maxHeight`).
It will return the original image if original sizes are smaller than the provided dimensions.
```go
resize.Resize(width, height uint, img image.Image, interp resize.InterpolationFunction) image.Image
resize.Thumbnail(maxWidth, maxHeight uint, img image.Image, interp resize.InterpolationFunction) image.Image
```
The provided interpolation functions are (from fast to slow execution time)
- `NearestNeighbor`: [Nearest-neighbor interpolation](http://en.wikipedia.org/wiki/Nearest-neighbor_interpolation)
- `Bilinear`: [Bilinear interpolation](http://en.wikipedia.org/wiki/Bilinear_interpolation)
- `Bicubic`: [Bicubic interpolation](http://en.wikipedia.org/wiki/Bicubic_interpolation)
- `MitchellNetravali`: [Mitchell-Netravali interpolation](http://dl.acm.org/citation.cfm?id=378514)
- `Lanczos2`: [Lanczos resampling](http://en.wikipedia.org/wiki/Lanczos_resampling) with a=2
- `Lanczos3`: [Lanczos resampling](http://en.wikipedia.org/wiki/Lanczos_resampling) with a=3
Which of these methods gives the best results depends on your use case.
Sample usage:
```go
package main
import (
"github.com/nfnt/resize"
"image/jpeg"
"log"
"os"
)
func main() {
// open "test.jpg"
file, err := os.Open("test.jpg")
if err != nil {
log.Fatal(err)
}
// decode jpeg into image.Image
img, err := jpeg.Decode(file)
if err != nil {
log.Fatal(err)
}
file.Close()
// resize to width 1000 using Lanczos resampling
// and preserve aspect ratio
m := resize.Resize(1000, 0, img, resize.Lanczos3)
out, err := os.Create("test_resized.jpg")
if err != nil {
log.Fatal(err)
}
defer out.Close()
// write new image to file
jpeg.Encode(out, m, nil)
}
```
Caveats
-------
* Optimized access routines are used for `image.RGBA`, `image.NRGBA`, `image.RGBA64`, `image.NRGBA64`, `image.YCbCr`, `image.Gray`, and `image.Gray16` types. All other image types are accessed in a generic way that will result in slow processing speed.
* JPEG images are stored in `image.YCbCr`. This image format stores data in a way that will decrease processing speed. A resize may be up to 2 times slower than with `image.RGBA`.
Downsizing Samples
-------
Downsizing is not as simple as it might look like. Images have to be filtered before they are scaled down, otherwise aliasing might occur.
Filtering is highly subjective: Applying too much will blur the whole image, too little will make aliasing become apparent.
Resize tries to provide sane defaults that should suffice in most cases.
### Artificial sample
Original image
![Rings](http://nfnt.github.com/img/rings_lg_orig.png)
<table>
<tr>
<th><img src="http://nfnt.github.com/img/rings_300_NearestNeighbor.png" /><br>Nearest-Neighbor</th>
<th><img src="http://nfnt.github.com/img/rings_300_Bilinear.png" /><br>Bilinear</th>
</tr>
<tr>
<th><img src="http://nfnt.github.com/img/rings_300_Bicubic.png" /><br>Bicubic</th>
<th><img src="http://nfnt.github.com/img/rings_300_MitchellNetravali.png" /><br>Mitchell-Netravali</th>
</tr>
<tr>
<th><img src="http://nfnt.github.com/img/rings_300_Lanczos2.png" /><br>Lanczos2</th>
<th><img src="http://nfnt.github.com/img/rings_300_Lanczos3.png" /><br>Lanczos3</th>
</tr>
</table>
### Real-Life sample
Original image
![Original](http://nfnt.github.com/img/IMG_3694_720.jpg)
<table>
<tr>
<th><img src="http://nfnt.github.com/img/IMG_3694_300_NearestNeighbor.png" /><br>Nearest-Neighbor</th>
<th><img src="http://nfnt.github.com/img/IMG_3694_300_Bilinear.png" /><br>Bilinear</th>
</tr>
<tr>
<th><img src="http://nfnt.github.com/img/IMG_3694_300_Bicubic.png" /><br>Bicubic</th>
<th><img src="http://nfnt.github.com/img/IMG_3694_300_MitchellNetravali.png" /><br>Mitchell-Netravali</th>
</tr>
<tr>
<th><img src="http://nfnt.github.com/img/IMG_3694_300_Lanczos2.png" /><br>Lanczos2</th>
<th><img src="http://nfnt.github.com/img/IMG_3694_300_Lanczos3.png" /><br>Lanczos3</th>
</tr>
</table>
License
-------
Copyright (c) 2012 Jan Schlicht <janschlicht@gmail.com>
Resize is released under a MIT style license.

438
vendor/github.com/nfnt/resize/converter.go generated vendored Normal file
View file

@ -0,0 +1,438 @@
/*
Copyright (c) 2012, Jan Schlicht <jan.schlicht@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
*/
package resize
import "image"
// Keep value in [0,255] range.
func clampUint8(in int32) uint8 {
// casting a negative int to an uint will result in an overflown
// large uint. this behavior will be exploited here and in other functions
// to achieve a higher performance.
if uint32(in) < 256 {
return uint8(in)
}
if in > 255 {
return 255
}
return 0
}
// Keep value in [0,65535] range.
func clampUint16(in int64) uint16 {
if uint64(in) < 65536 {
return uint16(in)
}
if in > 65535 {
return 65535
}
return 0
}
func resizeGeneric(in image.Image, out *image.RGBA64, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case xi < 0:
xi = 0
case xi >= maxX:
xi = maxX
}
r, g, b, a := in.At(xi+in.Bounds().Min.X, x+in.Bounds().Min.Y).RGBA()
rgba[0] += int64(coeff) * int64(r)
rgba[1] += int64(coeff) * int64(g)
rgba[2] += int64(coeff) * int64(b)
rgba[3] += int64(coeff) * int64(a)
sum += int64(coeff)
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := clampUint16(rgba[0] / sum)
out.Pix[offset+0] = uint8(value >> 8)
out.Pix[offset+1] = uint8(value)
value = clampUint16(rgba[1] / sum)
out.Pix[offset+2] = uint8(value >> 8)
out.Pix[offset+3] = uint8(value)
value = clampUint16(rgba[2] / sum)
out.Pix[offset+4] = uint8(value >> 8)
out.Pix[offset+5] = uint8(value)
value = clampUint16(rgba[3] / sum)
out.Pix[offset+6] = uint8(value >> 8)
out.Pix[offset+7] = uint8(value)
}
}
}
func resizeRGBA(in *image.RGBA, out *image.RGBA, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 4
case xi >= maxX:
xi = 4 * maxX
default:
xi = 0
}
rgba[0] += int32(coeff) * int32(row[xi+0])
rgba[1] += int32(coeff) * int32(row[xi+1])
rgba[2] += int32(coeff) * int32(row[xi+2])
rgba[3] += int32(coeff) * int32(row[xi+3])
sum += int32(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4
out.Pix[xo+0] = clampUint8(rgba[0] / sum)
out.Pix[xo+1] = clampUint8(rgba[1] / sum)
out.Pix[xo+2] = clampUint8(rgba[2] / sum)
out.Pix[xo+3] = clampUint8(rgba[3] / sum)
}
}
}
func resizeNRGBA(in *image.NRGBA, out *image.RGBA, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 4
case xi >= maxX:
xi = 4 * maxX
default:
xi = 0
}
// Forward alpha-premultiplication
a := int32(row[xi+3])
r := int32(row[xi+0]) * a
r /= 0xff
g := int32(row[xi+1]) * a
g /= 0xff
b := int32(row[xi+2]) * a
b /= 0xff
rgba[0] += int32(coeff) * r
rgba[1] += int32(coeff) * g
rgba[2] += int32(coeff) * b
rgba[3] += int32(coeff) * a
sum += int32(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4
out.Pix[xo+0] = clampUint8(rgba[0] / sum)
out.Pix[xo+1] = clampUint8(rgba[1] / sum)
out.Pix[xo+2] = clampUint8(rgba[2] / sum)
out.Pix[xo+3] = clampUint8(rgba[3] / sum)
}
}
}
func resizeRGBA64(in *image.RGBA64, out *image.RGBA64, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 8
case xi >= maxX:
xi = 8 * maxX
default:
xi = 0
}
rgba[0] += int64(coeff) * (int64(row[xi+0])<<8 | int64(row[xi+1]))
rgba[1] += int64(coeff) * (int64(row[xi+2])<<8 | int64(row[xi+3]))
rgba[2] += int64(coeff) * (int64(row[xi+4])<<8 | int64(row[xi+5]))
rgba[3] += int64(coeff) * (int64(row[xi+6])<<8 | int64(row[xi+7]))
sum += int64(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := clampUint16(rgba[0] / sum)
out.Pix[xo+0] = uint8(value >> 8)
out.Pix[xo+1] = uint8(value)
value = clampUint16(rgba[1] / sum)
out.Pix[xo+2] = uint8(value >> 8)
out.Pix[xo+3] = uint8(value)
value = clampUint16(rgba[2] / sum)
out.Pix[xo+4] = uint8(value >> 8)
out.Pix[xo+5] = uint8(value)
value = clampUint16(rgba[3] / sum)
out.Pix[xo+6] = uint8(value >> 8)
out.Pix[xo+7] = uint8(value)
}
}
}
func resizeNRGBA64(in *image.NRGBA64, out *image.RGBA64, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 8
case xi >= maxX:
xi = 8 * maxX
default:
xi = 0
}
// Forward alpha-premultiplication
a := int64(uint16(row[xi+6])<<8 | uint16(row[xi+7]))
r := int64(uint16(row[xi+0])<<8|uint16(row[xi+1])) * a
r /= 0xffff
g := int64(uint16(row[xi+2])<<8|uint16(row[xi+3])) * a
g /= 0xffff
b := int64(uint16(row[xi+4])<<8|uint16(row[xi+5])) * a
b /= 0xffff
rgba[0] += int64(coeff) * r
rgba[1] += int64(coeff) * g
rgba[2] += int64(coeff) * b
rgba[3] += int64(coeff) * a
sum += int64(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := clampUint16(rgba[0] / sum)
out.Pix[xo+0] = uint8(value >> 8)
out.Pix[xo+1] = uint8(value)
value = clampUint16(rgba[1] / sum)
out.Pix[xo+2] = uint8(value >> 8)
out.Pix[xo+3] = uint8(value)
value = clampUint16(rgba[2] / sum)
out.Pix[xo+4] = uint8(value >> 8)
out.Pix[xo+5] = uint8(value)
value = clampUint16(rgba[3] / sum)
out.Pix[xo+6] = uint8(value >> 8)
out.Pix[xo+7] = uint8(value)
}
}
}
func resizeGray(in *image.Gray, out *image.Gray, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[(x-newBounds.Min.X)*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var gray int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case xi < 0:
xi = 0
case xi >= maxX:
xi = maxX
}
gray += int32(coeff) * int32(row[xi])
sum += int32(coeff)
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x - newBounds.Min.X)
out.Pix[offset] = clampUint8(gray / sum)
}
}
}
func resizeGray16(in *image.Gray16, out *image.Gray16, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var gray int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 2
case xi >= maxX:
xi = 2 * maxX
default:
xi = 0
}
gray += int64(coeff) * int64(uint16(row[xi+0])<<8|uint16(row[xi+1]))
sum += int64(coeff)
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*2
value := clampUint16(gray / sum)
out.Pix[offset+0] = uint8(value >> 8)
out.Pix[offset+1] = uint8(value)
}
}
}
func resizeYCbCr(in *ycc, out *ycc, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var p [3]int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 3
case xi >= maxX:
xi = 3 * maxX
default:
xi = 0
}
p[0] += int32(coeff) * int32(row[xi+0])
p[1] += int32(coeff) * int32(row[xi+1])
p[2] += int32(coeff) * int32(row[xi+2])
sum += int32(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*3
out.Pix[xo+0] = clampUint8(p[0] / sum)
out.Pix[xo+1] = clampUint8(p[1] / sum)
out.Pix[xo+2] = clampUint8(p[2] / sum)
}
}
}
func nearestYCbCr(in *ycc, out *ycc, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var p [3]float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 3
case xi >= maxX:
xi = 3 * maxX
default:
xi = 0
}
p[0] += float32(row[xi+0])
p[1] += float32(row[xi+1])
p[2] += float32(row[xi+2])
sum++
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*3
out.Pix[xo+0] = floatToUint8(p[0] / sum)
out.Pix[xo+1] = floatToUint8(p[1] / sum)
out.Pix[xo+2] = floatToUint8(p[2] / sum)
}
}
}

143
vendor/github.com/nfnt/resize/filters.go generated vendored Normal file
View file

@ -0,0 +1,143 @@
/*
Copyright (c) 2012, Jan Schlicht <jan.schlicht@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
*/
package resize
import (
"math"
)
func nearest(in float64) float64 {
if in >= -0.5 && in < 0.5 {
return 1
}
return 0
}
func linear(in float64) float64 {
in = math.Abs(in)
if in <= 1 {
return 1 - in
}
return 0
}
func cubic(in float64) float64 {
in = math.Abs(in)
if in <= 1 {
return in*in*(1.5*in-2.5) + 1.0
}
if in <= 2 {
return in*(in*(2.5-0.5*in)-4.0) + 2.0
}
return 0
}
func mitchellnetravali(in float64) float64 {
in = math.Abs(in)
if in <= 1 {
return (7.0*in*in*in - 12.0*in*in + 5.33333333333) * 0.16666666666
}
if in <= 2 {
return (-2.33333333333*in*in*in + 12.0*in*in - 20.0*in + 10.6666666667) * 0.16666666666
}
return 0
}
func sinc(x float64) float64 {
x = math.Abs(x) * math.Pi
if x >= 1.220703e-4 {
return math.Sin(x) / x
}
return 1
}
func lanczos2(in float64) float64 {
if in > -2 && in < 2 {
return sinc(in) * sinc(in*0.5)
}
return 0
}
func lanczos3(in float64) float64 {
if in > -3 && in < 3 {
return sinc(in) * sinc(in*0.3333333333333333)
}
return 0
}
// range [-256,256]
func createWeights8(dy, filterLength int, blur, scale float64, kernel func(float64) float64) ([]int16, []int, int) {
filterLength = filterLength * int(math.Max(math.Ceil(blur*scale), 1))
filterFactor := math.Min(1./(blur*scale), 1)
coeffs := make([]int16, dy*filterLength)
start := make([]int, dy)
for y := 0; y < dy; y++ {
interpX := scale*(float64(y)+0.5) - 0.5
start[y] = int(interpX) - filterLength/2 + 1
interpX -= float64(start[y])
for i := 0; i < filterLength; i++ {
in := (interpX - float64(i)) * filterFactor
coeffs[y*filterLength+i] = int16(kernel(in) * 256)
}
}
return coeffs, start, filterLength
}
// range [-65536,65536]
func createWeights16(dy, filterLength int, blur, scale float64, kernel func(float64) float64) ([]int32, []int, int) {
filterLength = filterLength * int(math.Max(math.Ceil(blur*scale), 1))
filterFactor := math.Min(1./(blur*scale), 1)
coeffs := make([]int32, dy*filterLength)
start := make([]int, dy)
for y := 0; y < dy; y++ {
interpX := scale*(float64(y)+0.5) - 0.5
start[y] = int(interpX) - filterLength/2 + 1
interpX -= float64(start[y])
for i := 0; i < filterLength; i++ {
in := (interpX - float64(i)) * filterFactor
coeffs[y*filterLength+i] = int32(kernel(in) * 65536)
}
}
return coeffs, start, filterLength
}
func createWeightsNearest(dy, filterLength int, blur, scale float64) ([]bool, []int, int) {
filterLength = filterLength * int(math.Max(math.Ceil(blur*scale), 1))
filterFactor := math.Min(1./(blur*scale), 1)
coeffs := make([]bool, dy*filterLength)
start := make([]int, dy)
for y := 0; y < dy; y++ {
interpX := scale*(float64(y)+0.5) - 0.5
start[y] = int(interpX) - filterLength/2 + 1
interpX -= float64(start[y])
for i := 0; i < filterLength; i++ {
in := (interpX - float64(i)) * filterFactor
if in >= -0.5 && in < 0.5 {
coeffs[y*filterLength+i] = true
} else {
coeffs[y*filterLength+i] = false
}
}
}
return coeffs, start, filterLength
}

318
vendor/github.com/nfnt/resize/nearest.go generated vendored Normal file
View file

@ -0,0 +1,318 @@
/*
Copyright (c) 2014, Charlie Vieth <charlie.vieth@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
*/
package resize
import "image"
func floatToUint8(x float32) uint8 {
// Nearest-neighbor values are always
// positive no need to check lower-bound.
if x > 0xfe {
return 0xff
}
return uint8(x)
}
func floatToUint16(x float32) uint16 {
if x > 0xfffe {
return 0xffff
}
return uint16(x)
}
func nearestGeneric(in image.Image, out *image.RGBA64, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case xi < 0:
xi = 0
case xi >= maxX:
xi = maxX
}
r, g, b, a := in.At(xi+in.Bounds().Min.X, x+in.Bounds().Min.Y).RGBA()
rgba[0] += float32(r)
rgba[1] += float32(g)
rgba[2] += float32(b)
rgba[3] += float32(a)
sum++
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := floatToUint16(rgba[0] / sum)
out.Pix[offset+0] = uint8(value >> 8)
out.Pix[offset+1] = uint8(value)
value = floatToUint16(rgba[1] / sum)
out.Pix[offset+2] = uint8(value >> 8)
out.Pix[offset+3] = uint8(value)
value = floatToUint16(rgba[2] / sum)
out.Pix[offset+4] = uint8(value >> 8)
out.Pix[offset+5] = uint8(value)
value = floatToUint16(rgba[3] / sum)
out.Pix[offset+6] = uint8(value >> 8)
out.Pix[offset+7] = uint8(value)
}
}
}
func nearestRGBA(in *image.RGBA, out *image.RGBA, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 4
case xi >= maxX:
xi = 4 * maxX
default:
xi = 0
}
rgba[0] += float32(row[xi+0])
rgba[1] += float32(row[xi+1])
rgba[2] += float32(row[xi+2])
rgba[3] += float32(row[xi+3])
sum++
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4
out.Pix[xo+0] = floatToUint8(rgba[0] / sum)
out.Pix[xo+1] = floatToUint8(rgba[1] / sum)
out.Pix[xo+2] = floatToUint8(rgba[2] / sum)
out.Pix[xo+3] = floatToUint8(rgba[3] / sum)
}
}
}
func nearestNRGBA(in *image.NRGBA, out *image.NRGBA, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 4
case xi >= maxX:
xi = 4 * maxX
default:
xi = 0
}
rgba[0] += float32(row[xi+0])
rgba[1] += float32(row[xi+1])
rgba[2] += float32(row[xi+2])
rgba[3] += float32(row[xi+3])
sum++
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4
out.Pix[xo+0] = floatToUint8(rgba[0] / sum)
out.Pix[xo+1] = floatToUint8(rgba[1] / sum)
out.Pix[xo+2] = floatToUint8(rgba[2] / sum)
out.Pix[xo+3] = floatToUint8(rgba[3] / sum)
}
}
}
func nearestRGBA64(in *image.RGBA64, out *image.RGBA64, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 8
case xi >= maxX:
xi = 8 * maxX
default:
xi = 0
}
rgba[0] += float32(uint16(row[xi+0])<<8 | uint16(row[xi+1]))
rgba[1] += float32(uint16(row[xi+2])<<8 | uint16(row[xi+3]))
rgba[2] += float32(uint16(row[xi+4])<<8 | uint16(row[xi+5]))
rgba[3] += float32(uint16(row[xi+6])<<8 | uint16(row[xi+7]))
sum++
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := floatToUint16(rgba[0] / sum)
out.Pix[xo+0] = uint8(value >> 8)
out.Pix[xo+1] = uint8(value)
value = floatToUint16(rgba[1] / sum)
out.Pix[xo+2] = uint8(value >> 8)
out.Pix[xo+3] = uint8(value)
value = floatToUint16(rgba[2] / sum)
out.Pix[xo+4] = uint8(value >> 8)
out.Pix[xo+5] = uint8(value)
value = floatToUint16(rgba[3] / sum)
out.Pix[xo+6] = uint8(value >> 8)
out.Pix[xo+7] = uint8(value)
}
}
}
func nearestNRGBA64(in *image.NRGBA64, out *image.NRGBA64, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 8
case xi >= maxX:
xi = 8 * maxX
default:
xi = 0
}
rgba[0] += float32(uint16(row[xi+0])<<8 | uint16(row[xi+1]))
rgba[1] += float32(uint16(row[xi+2])<<8 | uint16(row[xi+3]))
rgba[2] += float32(uint16(row[xi+4])<<8 | uint16(row[xi+5]))
rgba[3] += float32(uint16(row[xi+6])<<8 | uint16(row[xi+7]))
sum++
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := floatToUint16(rgba[0] / sum)
out.Pix[xo+0] = uint8(value >> 8)
out.Pix[xo+1] = uint8(value)
value = floatToUint16(rgba[1] / sum)
out.Pix[xo+2] = uint8(value >> 8)
out.Pix[xo+3] = uint8(value)
value = floatToUint16(rgba[2] / sum)
out.Pix[xo+4] = uint8(value >> 8)
out.Pix[xo+5] = uint8(value)
value = floatToUint16(rgba[3] / sum)
out.Pix[xo+6] = uint8(value >> 8)
out.Pix[xo+7] = uint8(value)
}
}
}
func nearestGray(in *image.Gray, out *image.Gray, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var gray float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case xi < 0:
xi = 0
case xi >= maxX:
xi = maxX
}
gray += float32(row[xi])
sum++
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x - newBounds.Min.X)
out.Pix[offset] = floatToUint8(gray / sum)
}
}
}
func nearestGray16(in *image.Gray16, out *image.Gray16, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var gray float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 2
case xi >= maxX:
xi = 2 * maxX
default:
xi = 0
}
gray += float32(uint16(row[xi+0])<<8 | uint16(row[xi+1]))
sum++
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*2
value := floatToUint16(gray / sum)
out.Pix[offset+0] = uint8(value >> 8)
out.Pix[offset+1] = uint8(value)
}
}
}

620
vendor/github.com/nfnt/resize/resize.go generated vendored Normal file
View file

@ -0,0 +1,620 @@
/*
Copyright (c) 2012, Jan Schlicht <jan.schlicht@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
*/
// Package resize implements various image resizing methods.
//
// The package works with the Image interface described in the image package.
// Various interpolation methods are provided and multiple processors may be
// utilized in the computations.
//
// Example:
// imgResized := resize.Resize(1000, 0, imgOld, resize.MitchellNetravali)
package resize
import (
"image"
"runtime"
"sync"
)
// An InterpolationFunction provides the parameters that describe an
// interpolation kernel. It returns the number of samples to take
// and the kernel function to use for sampling.
type InterpolationFunction int
// InterpolationFunction constants
const (
// Nearest-neighbor interpolation
NearestNeighbor InterpolationFunction = iota
// Bilinear interpolation
Bilinear
// Bicubic interpolation (with cubic hermite spline)
Bicubic
// Mitchell-Netravali interpolation
MitchellNetravali
// Lanczos interpolation (a=2)
Lanczos2
// Lanczos interpolation (a=3)
Lanczos3
)
// kernal, returns an InterpolationFunctions taps and kernel.
func (i InterpolationFunction) kernel() (int, func(float64) float64) {
switch i {
case Bilinear:
return 2, linear
case Bicubic:
return 4, cubic
case MitchellNetravali:
return 4, mitchellnetravali
case Lanczos2:
return 4, lanczos2
case Lanczos3:
return 6, lanczos3
default:
// Default to NearestNeighbor.
return 2, nearest
}
}
// values <1 will sharpen the image
var blur = 1.0
// Resize scales an image to new width and height using the interpolation function interp.
// A new image with the given dimensions will be returned.
// If one of the parameters width or height is set to 0, its size will be calculated so that
// the aspect ratio is that of the originating image.
// The resizing algorithm uses channels for parallel computation.
// If the input image has width or height of 0, it is returned unchanged.
func Resize(width, height uint, img image.Image, interp InterpolationFunction) image.Image {
scaleX, scaleY := calcFactors(width, height, float64(img.Bounds().Dx()), float64(img.Bounds().Dy()))
if width == 0 {
width = uint(0.7 + float64(img.Bounds().Dx())/scaleX)
}
if height == 0 {
height = uint(0.7 + float64(img.Bounds().Dy())/scaleY)
}
// Trivial case: return input image
if int(width) == img.Bounds().Dx() && int(height) == img.Bounds().Dy() {
return img
}
// Input image has no pixels
if img.Bounds().Dx() <= 0 || img.Bounds().Dy() <= 0 {
return img
}
if interp == NearestNeighbor {
return resizeNearest(width, height, scaleX, scaleY, img, interp)
}
taps, kernel := interp.kernel()
cpus := runtime.GOMAXPROCS(0)
wg := sync.WaitGroup{}
// Generic access to image.Image is slow in tight loops.
// The optimal access has to be determined from the concrete image type.
switch input := img.(type) {
case *image.RGBA:
// 8-bit precision
temp := image.NewRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewRGBA(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA)
go func() {
defer wg.Done()
resizeRGBA(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA)
go func() {
defer wg.Done()
resizeRGBA(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.NRGBA:
// 8-bit precision
temp := image.NewRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewRGBA(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA)
go func() {
defer wg.Done()
resizeNRGBA(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA)
go func() {
defer wg.Done()
resizeRGBA(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.YCbCr:
// 8-bit precision
// accessing the YCbCr arrays in a tight loop is slow.
// converting the image to ycc increases performance by 2x.
temp := newYCC(image.Rect(0, 0, input.Bounds().Dy(), int(width)), input.SubsampleRatio)
result := newYCC(image.Rect(0, 0, int(width), int(height)), image.YCbCrSubsampleRatio444)
coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
in := imageYCbCrToYCC(input)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*ycc)
go func() {
defer wg.Done()
resizeYCbCr(in, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*ycc)
go func() {
defer wg.Done()
resizeYCbCr(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result.YCbCr()
case *image.RGBA64:
// 16-bit precision
temp := image.NewRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
resizeRGBA64(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
resizeRGBA64(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.NRGBA64:
// 16-bit precision
temp := image.NewRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
resizeNRGBA64(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
resizeRGBA64(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.Gray:
// 8-bit precision
temp := image.NewGray(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewGray(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeights8(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.Gray)
go func() {
defer wg.Done()
resizeGray(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeights8(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.Gray)
go func() {
defer wg.Done()
resizeGray(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.Gray16:
// 16-bit precision
temp := image.NewGray16(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewGray16(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.Gray16)
go func() {
defer wg.Done()
resizeGray16(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.Gray16)
go func() {
defer wg.Done()
resizeGray16(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
default:
// 16-bit precision
temp := image.NewRGBA64(image.Rect(0, 0, img.Bounds().Dy(), int(width)))
result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeights16(temp.Bounds().Dy(), taps, blur, scaleX, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
resizeGeneric(img, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeights16(result.Bounds().Dy(), taps, blur, scaleY, kernel)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
resizeRGBA64(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
}
}
func resizeNearest(width, height uint, scaleX, scaleY float64, img image.Image, interp InterpolationFunction) image.Image {
taps, _ := interp.kernel()
cpus := runtime.GOMAXPROCS(0)
wg := sync.WaitGroup{}
switch input := img.(type) {
case *image.RGBA:
// 8-bit precision
temp := image.NewRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewRGBA(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA)
go func() {
defer wg.Done()
nearestRGBA(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA)
go func() {
defer wg.Done()
nearestRGBA(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.NRGBA:
// 8-bit precision
temp := image.NewNRGBA(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewNRGBA(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.NRGBA)
go func() {
defer wg.Done()
nearestNRGBA(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.NRGBA)
go func() {
defer wg.Done()
nearestNRGBA(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.YCbCr:
// 8-bit precision
// accessing the YCbCr arrays in a tight loop is slow.
// converting the image to ycc increases performance by 2x.
temp := newYCC(image.Rect(0, 0, input.Bounds().Dy(), int(width)), input.SubsampleRatio)
result := newYCC(image.Rect(0, 0, int(width), int(height)), image.YCbCrSubsampleRatio444)
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
in := imageYCbCrToYCC(input)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*ycc)
go func() {
defer wg.Done()
nearestYCbCr(in, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*ycc)
go func() {
defer wg.Done()
nearestYCbCr(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result.YCbCr()
case *image.RGBA64:
// 16-bit precision
temp := image.NewRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
nearestRGBA64(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
nearestRGBA64(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.NRGBA64:
// 16-bit precision
temp := image.NewNRGBA64(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewNRGBA64(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.NRGBA64)
go func() {
defer wg.Done()
nearestNRGBA64(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.NRGBA64)
go func() {
defer wg.Done()
nearestNRGBA64(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.Gray:
// 8-bit precision
temp := image.NewGray(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewGray(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.Gray)
go func() {
defer wg.Done()
nearestGray(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.Gray)
go func() {
defer wg.Done()
nearestGray(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
case *image.Gray16:
// 16-bit precision
temp := image.NewGray16(image.Rect(0, 0, input.Bounds().Dy(), int(width)))
result := image.NewGray16(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.Gray16)
go func() {
defer wg.Done()
nearestGray16(input, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.Gray16)
go func() {
defer wg.Done()
nearestGray16(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
default:
// 16-bit precision
temp := image.NewRGBA64(image.Rect(0, 0, img.Bounds().Dy(), int(width)))
result := image.NewRGBA64(image.Rect(0, 0, int(width), int(height)))
// horizontal filter, results in transposed temporary image
coeffs, offset, filterLength := createWeightsNearest(temp.Bounds().Dy(), taps, blur, scaleX)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(temp, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
nearestGeneric(img, slice, scaleX, coeffs, offset, filterLength)
}()
}
wg.Wait()
// horizontal filter on transposed image, result is not transposed
coeffs, offset, filterLength = createWeightsNearest(result.Bounds().Dy(), taps, blur, scaleY)
wg.Add(cpus)
for i := 0; i < cpus; i++ {
slice := makeSlice(result, i, cpus).(*image.RGBA64)
go func() {
defer wg.Done()
nearestRGBA64(temp, slice, scaleY, coeffs, offset, filterLength)
}()
}
wg.Wait()
return result
}
}
// Calculates scaling factors using old and new image dimensions.
func calcFactors(width, height uint, oldWidth, oldHeight float64) (scaleX, scaleY float64) {
if width == 0 {
if height == 0 {
scaleX = 1.0
scaleY = 1.0
} else {
scaleY = oldHeight / float64(height)
scaleX = scaleY
}
} else {
scaleX = oldWidth / float64(width)
if height == 0 {
scaleY = scaleX
} else {
scaleY = oldHeight / float64(height)
}
}
return
}
type imageWithSubImage interface {
image.Image
SubImage(image.Rectangle) image.Image
}
func makeSlice(img imageWithSubImage, i, n int) image.Image {
return img.SubImage(image.Rect(img.Bounds().Min.X, img.Bounds().Min.Y+i*img.Bounds().Dy()/n, img.Bounds().Max.X, img.Bounds().Min.Y+(i+1)*img.Bounds().Dy()/n))
}

55
vendor/github.com/nfnt/resize/thumbnail.go generated vendored Normal file
View file

@ -0,0 +1,55 @@
/*
Copyright (c) 2012, Jan Schlicht <jan.schlicht@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
*/
package resize
import (
"image"
)
// Thumbnail will downscale provided image to max width and height preserving
// original aspect ratio and using the interpolation function interp.
// It will return original image, without processing it, if original sizes
// are already smaller than provided constraints.
func Thumbnail(maxWidth, maxHeight uint, img image.Image, interp InterpolationFunction) image.Image {
origBounds := img.Bounds()
origWidth := uint(origBounds.Dx())
origHeight := uint(origBounds.Dy())
newWidth, newHeight := origWidth, origHeight
// Return original image if it have same or smaller size as constraints
if maxWidth >= origWidth && maxHeight >= origHeight {
return img
}
// Preserve aspect ratio
if origWidth > maxWidth {
newHeight = uint(origHeight * maxWidth / origWidth)
if newHeight < 1 {
newHeight = 1
}
newWidth = maxWidth
}
if newHeight > maxHeight {
newWidth = uint(newWidth * maxHeight / newHeight)
if newWidth < 1 {
newWidth = 1
}
newHeight = maxHeight
}
return Resize(newWidth, newHeight, img, interp)
}

387
vendor/github.com/nfnt/resize/ycc.go generated vendored Normal file
View file

@ -0,0 +1,387 @@
/*
Copyright (c) 2014, Charlie Vieth <charlie.vieth@gmail.com>
Permission to use, copy, modify, and/or distribute this software for any purpose
with or without fee is hereby granted, provided that the above copyright notice
and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
THIS SOFTWARE.
*/
package resize
import (
"image"
"image/color"
)
// ycc is an in memory YCbCr image. The Y, Cb and Cr samples are held in a
// single slice to increase resizing performance.
type ycc struct {
// Pix holds the image's pixels, in Y, Cb, Cr order. The pixel at
// (x, y) starts at Pix[(y-Rect.Min.Y)*Stride + (x-Rect.Min.X)*3].
Pix []uint8
// Stride is the Pix stride (in bytes) between vertically adjacent pixels.
Stride int
// Rect is the image's bounds.
Rect image.Rectangle
// SubsampleRatio is the subsample ratio of the original YCbCr image.
SubsampleRatio image.YCbCrSubsampleRatio
}
// PixOffset returns the index of the first element of Pix that corresponds to
// the pixel at (x, y).
func (p *ycc) PixOffset(x, y int) int {
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*3
}
func (p *ycc) Bounds() image.Rectangle {
return p.Rect
}
func (p *ycc) ColorModel() color.Model {
return color.YCbCrModel
}
func (p *ycc) At(x, y int) color.Color {
if !(image.Point{x, y}.In(p.Rect)) {
return color.YCbCr{}
}
i := p.PixOffset(x, y)
return color.YCbCr{
p.Pix[i+0],
p.Pix[i+1],
p.Pix[i+2],
}
}
func (p *ycc) Opaque() bool {
return true
}
// SubImage returns an image representing the portion of the image p visible
// through r. The returned value shares pixels with the original image.
func (p *ycc) SubImage(r image.Rectangle) image.Image {
r = r.Intersect(p.Rect)
if r.Empty() {
return &ycc{SubsampleRatio: p.SubsampleRatio}
}
i := p.PixOffset(r.Min.X, r.Min.Y)
return &ycc{
Pix: p.Pix[i:],
Stride: p.Stride,
Rect: r,
SubsampleRatio: p.SubsampleRatio,
}
}
// newYCC returns a new ycc with the given bounds and subsample ratio.
func newYCC(r image.Rectangle, s image.YCbCrSubsampleRatio) *ycc {
w, h := r.Dx(), r.Dy()
buf := make([]uint8, 3*w*h)
return &ycc{Pix: buf, Stride: 3 * w, Rect: r, SubsampleRatio: s}
}
// Copy of image.YCbCrSubsampleRatio constants - this allows us to support
// older versions of Go where these constants are not defined (i.e. Go 1.4)
const (
ycbcrSubsampleRatio444 image.YCbCrSubsampleRatio = iota
ycbcrSubsampleRatio422
ycbcrSubsampleRatio420
ycbcrSubsampleRatio440
ycbcrSubsampleRatio411
ycbcrSubsampleRatio410
)
// YCbCr converts ycc to a YCbCr image with the same subsample ratio
// as the YCbCr image that ycc was generated from.
func (p *ycc) YCbCr() *image.YCbCr {
ycbcr := image.NewYCbCr(p.Rect, p.SubsampleRatio)
switch ycbcr.SubsampleRatio {
case ycbcrSubsampleRatio422:
return p.ycbcr422(ycbcr)
case ycbcrSubsampleRatio420:
return p.ycbcr420(ycbcr)
case ycbcrSubsampleRatio440:
return p.ycbcr440(ycbcr)
case ycbcrSubsampleRatio444:
return p.ycbcr444(ycbcr)
case ycbcrSubsampleRatio411:
return p.ycbcr411(ycbcr)
case ycbcrSubsampleRatio410:
return p.ycbcr410(ycbcr)
}
return ycbcr
}
// imageYCbCrToYCC converts a YCbCr image to a ycc image for resizing.
func imageYCbCrToYCC(in *image.YCbCr) *ycc {
w, h := in.Rect.Dx(), in.Rect.Dy()
p := ycc{
Pix: make([]uint8, 3*w*h),
Stride: 3 * w,
Rect: image.Rect(0, 0, w, h),
SubsampleRatio: in.SubsampleRatio,
}
switch in.SubsampleRatio {
case ycbcrSubsampleRatio422:
return convertToYCC422(in, &p)
case ycbcrSubsampleRatio420:
return convertToYCC420(in, &p)
case ycbcrSubsampleRatio440:
return convertToYCC440(in, &p)
case ycbcrSubsampleRatio444:
return convertToYCC444(in, &p)
case ycbcrSubsampleRatio411:
return convertToYCC411(in, &p)
case ycbcrSubsampleRatio410:
return convertToYCC410(in, &p)
}
return &p
}
func (p *ycc) ycbcr422(ycbcr *image.YCbCr) *image.YCbCr {
var off int
Pix := p.Pix
Y := ycbcr.Y
Cb := ycbcr.Cb
Cr := ycbcr.Cr
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
yy := y * ycbcr.YStride
cy := y * ycbcr.CStride
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
ci := cy + x/2
Y[yy+x] = Pix[off+0]
Cb[ci] = Pix[off+1]
Cr[ci] = Pix[off+2]
off += 3
}
}
return ycbcr
}
func (p *ycc) ycbcr420(ycbcr *image.YCbCr) *image.YCbCr {
var off int
Pix := p.Pix
Y := ycbcr.Y
Cb := ycbcr.Cb
Cr := ycbcr.Cr
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
yy := y * ycbcr.YStride
cy := (y / 2) * ycbcr.CStride
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
ci := cy + x/2
Y[yy+x] = Pix[off+0]
Cb[ci] = Pix[off+1]
Cr[ci] = Pix[off+2]
off += 3
}
}
return ycbcr
}
func (p *ycc) ycbcr440(ycbcr *image.YCbCr) *image.YCbCr {
var off int
Pix := p.Pix
Y := ycbcr.Y
Cb := ycbcr.Cb
Cr := ycbcr.Cr
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
yy := y * ycbcr.YStride
cy := (y / 2) * ycbcr.CStride
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
ci := cy + x
Y[yy+x] = Pix[off+0]
Cb[ci] = Pix[off+1]
Cr[ci] = Pix[off+2]
off += 3
}
}
return ycbcr
}
func (p *ycc) ycbcr444(ycbcr *image.YCbCr) *image.YCbCr {
var off int
Pix := p.Pix
Y := ycbcr.Y
Cb := ycbcr.Cb
Cr := ycbcr.Cr
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
yy := y * ycbcr.YStride
cy := y * ycbcr.CStride
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
ci := cy + x
Y[yy+x] = Pix[off+0]
Cb[ci] = Pix[off+1]
Cr[ci] = Pix[off+2]
off += 3
}
}
return ycbcr
}
func (p *ycc) ycbcr411(ycbcr *image.YCbCr) *image.YCbCr {
var off int
Pix := p.Pix
Y := ycbcr.Y
Cb := ycbcr.Cb
Cr := ycbcr.Cr
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
yy := y * ycbcr.YStride
cy := y * ycbcr.CStride
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
ci := cy + x/4
Y[yy+x] = Pix[off+0]
Cb[ci] = Pix[off+1]
Cr[ci] = Pix[off+2]
off += 3
}
}
return ycbcr
}
func (p *ycc) ycbcr410(ycbcr *image.YCbCr) *image.YCbCr {
var off int
Pix := p.Pix
Y := ycbcr.Y
Cb := ycbcr.Cb
Cr := ycbcr.Cr
for y := 0; y < ycbcr.Rect.Max.Y-ycbcr.Rect.Min.Y; y++ {
yy := y * ycbcr.YStride
cy := (y / 2) * ycbcr.CStride
for x := 0; x < ycbcr.Rect.Max.X-ycbcr.Rect.Min.X; x++ {
ci := cy + x/4
Y[yy+x] = Pix[off+0]
Cb[ci] = Pix[off+1]
Cr[ci] = Pix[off+2]
off += 3
}
}
return ycbcr
}
func convertToYCC422(in *image.YCbCr, p *ycc) *ycc {
var off int
Pix := p.Pix
Y := in.Y
Cb := in.Cb
Cr := in.Cr
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
yy := y * in.YStride
cy := y * in.CStride
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
ci := cy + x/2
Pix[off+0] = Y[yy+x]
Pix[off+1] = Cb[ci]
Pix[off+2] = Cr[ci]
off += 3
}
}
return p
}
func convertToYCC420(in *image.YCbCr, p *ycc) *ycc {
var off int
Pix := p.Pix
Y := in.Y
Cb := in.Cb
Cr := in.Cr
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
yy := y * in.YStride
cy := (y / 2) * in.CStride
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
ci := cy + x/2
Pix[off+0] = Y[yy+x]
Pix[off+1] = Cb[ci]
Pix[off+2] = Cr[ci]
off += 3
}
}
return p
}
func convertToYCC440(in *image.YCbCr, p *ycc) *ycc {
var off int
Pix := p.Pix
Y := in.Y
Cb := in.Cb
Cr := in.Cr
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
yy := y * in.YStride
cy := (y / 2) * in.CStride
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
ci := cy + x
Pix[off+0] = Y[yy+x]
Pix[off+1] = Cb[ci]
Pix[off+2] = Cr[ci]
off += 3
}
}
return p
}
func convertToYCC444(in *image.YCbCr, p *ycc) *ycc {
var off int
Pix := p.Pix
Y := in.Y
Cb := in.Cb
Cr := in.Cr
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
yy := y * in.YStride
cy := y * in.CStride
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
ci := cy + x
Pix[off+0] = Y[yy+x]
Pix[off+1] = Cb[ci]
Pix[off+2] = Cr[ci]
off += 3
}
}
return p
}
func convertToYCC411(in *image.YCbCr, p *ycc) *ycc {
var off int
Pix := p.Pix
Y := in.Y
Cb := in.Cb
Cr := in.Cr
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
yy := y * in.YStride
cy := y * in.CStride
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
ci := cy + x/4
Pix[off+0] = Y[yy+x]
Pix[off+1] = Cb[ci]
Pix[off+2] = Cr[ci]
off += 3
}
}
return p
}
func convertToYCC410(in *image.YCbCr, p *ycc) *ycc {
var off int
Pix := p.Pix
Y := in.Y
Cb := in.Cb
Cr := in.Cr
for y := 0; y < in.Rect.Max.Y-in.Rect.Min.Y; y++ {
yy := y * in.YStride
cy := (y / 2) * in.CStride
for x := 0; x < in.Rect.Max.X-in.Rect.Min.X; x++ {
ci := cy + x/4
Pix[off+0] = Y[yy+x]
Pix[off+1] = Cb[ci]
Pix[off+2] = Cr[ci]
off += 3
}
}
return p
}

6
vendor/modules.txt vendored
View file

@ -90,6 +90,10 @@ github.com/chromedp/cdproto/webauthn
github.com/chromedp/chromedp github.com/chromedp/chromedp
github.com/chromedp/chromedp/device github.com/chromedp/chromedp/device
github.com/chromedp/chromedp/kb github.com/chromedp/chromedp/kb
# github.com/corona10/goimagehash v1.0.3
github.com/corona10/goimagehash
github.com/corona10/goimagehash/etcs
github.com/corona10/goimagehash/transforms
# github.com/cpuguy83/go-md2man/v2 v2.0.0 # github.com/cpuguy83/go-md2man/v2 v2.0.0
github.com/cpuguy83/go-md2man/v2/md2man github.com/cpuguy83/go-md2man/v2/md2man
# github.com/davecgh/go-spew v1.1.1 # github.com/davecgh/go-spew v1.1.1
@ -228,6 +232,8 @@ github.com/modern-go/concurrent
github.com/modern-go/reflect2 github.com/modern-go/reflect2
# github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 # github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
github.com/natefinch/pie github.com/natefinch/pie
# github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
github.com/nfnt/resize
# github.com/pelletier/go-toml v1.2.0 # github.com/pelletier/go-toml v1.2.0
github.com/pelletier/go-toml github.com/pelletier/go-toml
# github.com/pkg/errors v0.9.1 # github.com/pkg/errors v0.9.1