Caption support (#2462)

Co-authored-by: WithoutPants <53250216+WithoutPants@users.noreply.github.com>
This commit is contained in:
cj 2022-05-05 20:59:28 -05:00 committed by GitHub
parent ab1b30ffb7
commit c1a096a1a6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
114 changed files with 16899 additions and 17 deletions

3
go.mod
View file

@ -46,6 +46,7 @@ require (
) )
require ( require (
github.com/asticode/go-astisub v0.20.0
github.com/go-chi/httplog v0.2.1 github.com/go-chi/httplog v0.2.1
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
github.com/hashicorp/golang-lru v0.5.4 github.com/hashicorp/golang-lru v0.5.4
@ -59,6 +60,8 @@ require (
require ( require (
github.com/agnivade/levenshtein v1.1.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect
github.com/antchfx/xpath v1.2.0 // indirect github.com/antchfx/xpath v1.2.0 // indirect
github.com/asticode/go-astikit v0.20.0 // indirect
github.com/asticode/go-astits v1.8.0 // indirect
github.com/chromedp/sysutil v1.0.0 // indirect github.com/chromedp/sysutil v1.0.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect

7
go.sum
View file

@ -104,6 +104,12 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV
github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/asticode/go-astikit v0.20.0 h1:+7N+J4E4lWx2QOkRdOf6DafWJMv6O4RRfgClwQokrH8=
github.com/asticode/go-astikit v0.20.0/go.mod h1:h4ly7idim1tNhaVkdVBeXQZEE3L0xblP7fCWbgwipF0=
github.com/asticode/go-astisub v0.20.0 h1:mKuLwgGkQj35RRHFiTcq+2hgR7g1mHiYiIkr9UNTmXw=
github.com/asticode/go-astisub v0.20.0/go.mod h1:WTkuSzFB+Bp7wezuSf2Oxulj5A8zu2zLRVFf6bIFQK8=
github.com/asticode/go-astits v1.8.0 h1:rf6aiiGn/QhlFjNON1n5plqF3Fs025XLUwiQ0NB6oZg=
github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ=
github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go-v2 v1.3.2/go.mod h1:7OaACgj2SX3XGWnrIjGlJM22h6yD6MEWKvm7levnnM8= github.com/aws/aws-sdk-go-v2 v1.3.2/go.mod h1:7OaACgj2SX3XGWnrIjGlJM22h6yD6MEWKvm7levnnM8=
github.com/aws/aws-sdk-go-v2 v1.6.0/go.mod h1:tI4KhsR5VkzlUa2DZAdwx7wCAYGwkZZ1H31PYrBFx1w= github.com/aws/aws-sdk-go-v2 v1.6.0/go.mod h1:tI4KhsR5VkzlUa2DZAdwx7wCAYGwkZZ1H31PYrBFx1w=
@ -625,6 +631,7 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.4.0/go.mod h1:NWz/XGvpEW1FyYQ7fCx4dqYBLlfTcE+A9FLAkNKqjFE=
github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=

View file

@ -44,3 +44,6 @@ models:
model: github.com/stashapp/stash/pkg/models.SavedFilter model: github.com/stashapp/stash/pkg/models.SavedFilter
StashID: StashID:
model: github.com/stashapp/stash/pkg/models.StashID model: github.com/stashapp/stash/pkg/models.StashID
SceneCaption:
model: github.com/stashapp/stash/pkg/models.SceneCaption

View file

@ -13,6 +13,10 @@ fragment SlimSceneData on Scene {
phash phash
interactive interactive
interactive_speed interactive_speed
captions {
language_code
caption_type
}
file { file {
size size
@ -35,6 +39,7 @@ fragment SlimSceneData on Scene {
sprite sprite
funscript funscript
interactive_heatmap interactive_heatmap
caption
} }
scene_markers { scene_markers {

View file

@ -13,6 +13,10 @@ fragment SceneData on Scene {
phash phash
interactive interactive
interactive_speed interactive_speed
captions {
language_code
caption_type
}
created_at created_at
updated_at updated_at
@ -37,6 +41,7 @@ fragment SceneData on Scene {
sprite sprite
funscript funscript
interactive_heatmap interactive_heatmap
caption
} }
scene_markers { scene_markers {

View file

@ -174,6 +174,8 @@ input SceneFilterType {
interactive: Boolean interactive: Boolean
"""Filter by InteractiveSpeed""" """Filter by InteractiveSpeed"""
interactive_speed: IntCriterionInput interactive_speed: IntCriterionInput
"""Filter by captions"""
captions: StringCriterionInput
} }
input MovieFilterType { input MovieFilterType {

View file

@ -19,6 +19,7 @@ type ScenePathsType {
sprite: String # Resolver sprite: String # Resolver
funscript: String # Resolver funscript: String # Resolver
interactive_heatmap: String # Resolver interactive_heatmap: String # Resolver
caption: String # Resolver
} }
type SceneMovie { type SceneMovie {
@ -26,6 +27,11 @@ type SceneMovie {
scene_index: Int scene_index: Int
} }
type SceneCaption {
language_code: String!
caption_type: String!
}
type Scene { type Scene {
id: ID! id: ID!
checksum: String checksum: String
@ -41,6 +47,7 @@ type Scene {
phash: String phash: String
interactive: Boolean! interactive: Boolean!
interactive_speed: Int interactive_speed: Int
captions: [SceneCaption!]
created_at: Time! created_at: Time!
updated_at: Time! updated_at: Time!
file_mod_time: Time file_mod_time: Time

View file

@ -98,6 +98,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
spritePath := builder.GetSpriteURL() spritePath := builder.GetSpriteURL()
chaptersVttPath := builder.GetChaptersVTTURL() chaptersVttPath := builder.GetChaptersVTTURL()
funscriptPath := builder.GetFunscriptURL() funscriptPath := builder.GetFunscriptURL()
captionBasePath := builder.GetCaptionURL()
interactiveHeatmap := builder.GetInteractiveHeatmapURL() interactiveHeatmap := builder.GetInteractiveHeatmapURL()
return &models.ScenePathsType{ return &models.ScenePathsType{
@ -110,6 +111,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S
Sprite: &spritePath, Sprite: &spritePath,
Funscript: &funscriptPath, Funscript: &funscriptPath,
InteractiveHeatmap: &interactiveHeatmap, InteractiveHeatmap: &interactiveHeatmap,
Caption: &captionBasePath,
}, nil }, nil
} }
@ -124,6 +126,17 @@ func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (re
return ret, nil return ret, nil
} }
func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.SceneCaption, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Scene().GetCaptions(obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, err
}
func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) { func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) {
if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error {
ret, err = repo.Gallery().FindBySceneID(obj.ID) ret, err = repo.Gallery().FindBySceneID(obj.ID)

View file

@ -1,6 +1,7 @@
package api package api
import ( import (
"bytes"
"context" "context"
"net/http" "net/http"
"strconv" "strconv"
@ -41,6 +42,7 @@ func (rs sceneRoutes) Routes() chi.Router {
r.Get("/vtt/chapter", rs.ChapterVtt) r.Get("/vtt/chapter", rs.ChapterVtt)
r.Get("/funscript", rs.Funscript) r.Get("/funscript", rs.Funscript)
r.Get("/interactive_heatmap", rs.InteractiveHeatmap) r.Get("/interactive_heatmap", rs.InteractiveHeatmap)
r.Get("/caption", rs.CaptionLang)
r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream) r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream)
r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview) r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview)
@ -284,6 +286,46 @@ func (rs sceneRoutes) InteractiveHeatmap(w http.ResponseWriter, r *http.Request)
http.ServeFile(w, r, filepath) http.ServeFile(w, r, filepath)
} }
func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang string, ext string) {
s := r.Context().Value(sceneKey).(*models.Scene)
if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error {
var err error
captions, err := repo.Scene().GetCaptions(s.ID)
for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType {
sub, err := scene.ReadSubs(caption.Path(s.Path))
if err == nil {
var b bytes.Buffer
err = sub.WriteToWebVTT(&b)
if err == nil {
w.Header().Set("Content-Type", "text/vtt")
w.Header().Add("Cache-Control", "no-cache")
_, _ = b.WriteTo(w)
}
return err
}
logger.Debugf("Error while reading subs: %v", err)
}
}
return err
}); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func (rs sceneRoutes) CaptionLang(w http.ResponseWriter, r *http.Request) {
// serve caption based on lang query param, if provided
if err := r.ParseForm(); err != nil {
logger.Warnf("[caption] error parsing query form: %v", err)
}
l := r.Form.Get("lang")
ext := r.Form.Get("type")
rs.Caption(w, r, l, ext)
}
func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) { func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene) scene := r.Context().Value(sceneKey).(*models.Scene)
w.Header().Set("Content-Type", "text/vtt") w.Header().Set("Content-Type", "text/vtt")

View file

@ -67,6 +67,10 @@ func (b SceneURLBuilder) GetFunscriptURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/funscript" return b.BaseURL + "/scene/" + b.SceneID + "/funscript"
} }
func (b SceneURLBuilder) GetCaptionURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/caption"
}
func (b SceneURLBuilder) GetInteractiveHeatmapURL() string { func (b SceneURLBuilder) GetInteractiveHeatmapURL() string {
return b.BaseURL + "/scene/" + b.SceneID + "/interactive_heatmap" return b.BaseURL + "/scene/" + b.SceneID + "/interactive_heatmap"
} }

View file

@ -12,6 +12,7 @@ import (
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
) )
func isGallery(pathname string) bool { func isGallery(pathname string) bool {
@ -19,6 +20,10 @@ func isGallery(pathname string) bool {
return fsutil.MatchExtension(pathname, gExt) return fsutil.MatchExtension(pathname, gExt)
} }
func isCaptions(pathname string) bool {
return fsutil.MatchExtension(pathname, scene.CaptionExts)
}
func isVideo(pathname string) bool { func isVideo(pathname string) bool {
vidExt := config.GetInstance().GetVideoExtensions() vidExt := config.GetInstance().GetVideoExtensions()
return fsutil.MatchExtension(pathname, vidExt) return fsutil.MatchExtension(pathname, vidExt)

View file

@ -16,6 +16,7 @@ import (
"github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate" "github.com/stashapp/stash/pkg/scene/generate"
"github.com/stashapp/stash/pkg/utils" "github.com/stashapp/stash/pkg/utils"
) )
@ -277,6 +278,8 @@ func (t *ScanTask) Start(ctx context.Context) {
s = t.scanScene(ctx) s = t.scanScene(ctx)
case isImage(path): case isImage(path):
t.scanImage(ctx) t.scanImage(ctx)
case isCaptions(path):
t.associateCaptions(ctx)
} }
}) })
@ -351,6 +354,7 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error {
vidExt := config.GetVideoExtensions() vidExt := config.GetVideoExtensions()
imgExt := config.GetImageExtensions() imgExt := config.GetImageExtensions()
gExt := config.GetGalleryExtensions() gExt := config.GetGalleryExtensions()
capExt := scene.CaptionExts
excludeVidRegex := generateRegexps(config.GetExcludes()) excludeVidRegex := generateRegexps(config.GetExcludes())
excludeImgRegex := generateRegexps(config.GetImageExcludes()) excludeImgRegex := generateRegexps(config.GetImageExcludes())
@ -394,6 +398,10 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error {
} }
} }
if fsutil.MatchExtension(path, capExt) {
return f(path, info, err)
}
return nil return nil
}) })
} }

View file

@ -2,7 +2,9 @@ package manager
import ( import (
"context" "context"
"path/filepath"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/logger"
@ -78,3 +80,48 @@ func (t *ScanTask) scanScene(ctx context.Context) *models.Scene {
return retScene return retScene
} }
// associates captions to scene/s with the same basename
func (t *ScanTask) associateCaptions(ctx context.Context) {
vExt := config.GetInstance().GetVideoExtensions()
captionPath := t.file.Path()
captionLang := scene.GetCaptionsLangFromPath(captionPath)
relatedFiles := scene.GenerateCaptionCandidates(captionPath, vExt)
if err := t.TxnManager.WithTxn(ctx, func(r models.Repository) error {
var err error
sqb := r.Scene()
for _, scenePath := range relatedFiles {
s, er := sqb.FindByPath(scenePath)
if er != nil {
logger.Errorf("Error searching for scene %s: %v", scenePath, er)
continue
}
if s != nil { // found related Scene
logger.Debugf("Matched captions to scene %s", s.Path)
captions, er := sqb.GetCaptions(s.ID)
if er == nil {
fileExt := filepath.Ext(captionPath)
ext := fileExt[1:]
if !scene.IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
newCaption := &models.SceneCaption{
LanguageCode: captionLang,
Filename: filepath.Base(captionPath),
CaptionType: ext,
}
captions = append(captions, newCaption)
er = sqb.UpdateCaptions(s.ID, captions)
if er == nil {
logger.Debugf("Updated captions for scene %s. Added %s", s.Path, captionLang)
}
}
}
}
}
return err
}); err != nil {
logger.Error(err.Error())
}
}

View file

@ -23,7 +23,7 @@ import (
var DB *sqlx.DB var DB *sqlx.DB
var WriteMu sync.Mutex var WriteMu sync.Mutex
var dbPath string var dbPath string
var appSchemaVersion uint = 30 var appSchemaVersion uint = 31
var databaseSchemaVersion uint var databaseSchemaVersion uint
//go:embed migrations/*.sql //go:embed migrations/*.sql

View file

@ -0,0 +1,8 @@
CREATE TABLE `scene_captions` (
`scene_id` integer,
`language_code` varchar(255) NOT NULL,
`filename` varchar(255) NOT NULL,
`caption_type` varchar(255) NOT NULL,
primary key (`scene_id`, `language_code`, `caption_type`),
foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE
);

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks
@ -482,6 +482,28 @@ func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) {
return r0, r1 return r0, r1
} }
func (_m *SceneReaderWriter) GetCaptions(sceneID int) ([]*models.SceneCaption, error) {
ret := _m.Called(sceneID)
var r0 []*models.SceneCaption
if rf, ok := ret.Get(0).(func(int) []*models.SceneCaption); ok {
r0 = rf(sceneID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]*models.SceneCaption)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(int) error); ok {
r1 = rf(sceneID)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetCover provides a mock function with given fields: sceneID // GetCover provides a mock function with given fields: sceneID
func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) { func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) {
ret := _m.Called(sceneID) ret := _m.Called(sceneID)
@ -729,6 +751,20 @@ func (_m *SceneReaderWriter) Update(updatedScene models.ScenePartial) (*models.S
return r0, r1 return r0, r1
} }
// UpdateCaptions provides a mock function with given fields: id, newCaptions
func (_m *SceneReaderWriter) UpdateCaptions(sceneID int, captions []*models.SceneCaption) error {
ret := _m.Called(sceneID, captions)
var r0 error
if rf, ok := ret.Get(0).(func(int, []*models.SceneCaption) error); ok {
r0 = rf(sceneID, captions)
} else {
r0 = ret.Error(0)
}
return r0
}
// UpdateCover provides a mock function with given fields: sceneID, cover // UpdateCover provides a mock function with given fields: sceneID, cover
func (_m *SceneReaderWriter) UpdateCover(sceneID int, cover []byte) error { func (_m *SceneReaderWriter) UpdateCover(sceneID int, cover []byte) error {
ret := _m.Called(sceneID, cover) ret := _m.Called(sceneID, cover)

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -1,4 +1,4 @@
// Code generated by mockery v0.0.0-dev. DO NOT EDIT. // Code generated by mockery v2.10.0. DO NOT EDIT.
package mocks package mocks

View file

@ -221,3 +221,13 @@ func (s *Scenes) Append(o interface{}) {
func (s *Scenes) New() interface{} { func (s *Scenes) New() interface{} {
return &Scene{} return &Scene{}
} }
type SceneCaption struct {
LanguageCode string `json:"language_code"`
Filename string `json:"filename"`
CaptionType string `json:"caption_type"`
}
func (c SceneCaption) Path(scenePath string) string {
return filepath.Join(filepath.Dir(scenePath), c.Filename)
}

View file

@ -62,6 +62,7 @@ type SceneReader interface {
Wall(q *string) ([]*Scene, error) Wall(q *string) ([]*Scene, error)
All() ([]*Scene, error) All() ([]*Scene, error)
Query(options SceneQueryOptions) (*SceneQueryResult, error) Query(options SceneQueryOptions) (*SceneQueryResult, error)
GetCaptions(sceneID int) ([]*SceneCaption, error)
GetCover(sceneID int) ([]byte, error) GetCover(sceneID int) ([]byte, error)
GetMovies(sceneID int) ([]MoviesScenes, error) GetMovies(sceneID int) ([]MoviesScenes, error)
GetTagIDs(sceneID int) ([]int, error) GetTagIDs(sceneID int) ([]int, error)
@ -79,6 +80,7 @@ type SceneWriter interface {
ResetOCounter(id int) (int, error) ResetOCounter(id int) (int, error)
UpdateFileModTime(id int, modTime NullSQLiteTimestamp) error UpdateFileModTime(id int, modTime NullSQLiteTimestamp) error
Destroy(id int) error Destroy(id int) error
UpdateCaptions(id int, captions []*SceneCaption) error
UpdateCover(sceneID int, cover []byte) error UpdateCover(sceneID int, cover []byte) error
DestroyCover(sceneID int) error DestroyCover(sceneID int) error
UpdatePerformers(sceneID int, performerIDs []int) error UpdatePerformers(sceneID int, performerIDs []int) error

106
pkg/scene/caption.go Normal file
View file

@ -0,0 +1,106 @@
package scene
import (
"os"
"path/filepath"
"strings"
"golang.org/x/text/language"
"github.com/asticode/go-astisub"
"github.com/stashapp/stash/pkg/models"
)
var CaptionExts = []string{"vtt", "srt"} // in a case where vtt and srt files are both provided prioritize vtt file due to native support
// to be used for captions without a language code in the filename
// ISO 639-1 uses 2 or 3 a-z chars for codes so 00 is a safe non valid choise
// https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes
const LangUnknown = "00"
// GetCaptionPath generates the path of a caption
// from a given file path, wanted language and caption sufffix
func GetCaptionPath(path, lang, suffix string) string {
ext := filepath.Ext(path)
fn := strings.TrimSuffix(path, ext)
captionExt := ""
if len(lang) == 0 || lang == LangUnknown {
captionExt = suffix
} else {
captionExt = lang + "." + suffix
}
return fn + "." + captionExt
}
// ReadSubs reads a captions file
func ReadSubs(path string) (*astisub.Subtitles, error) {
return astisub.OpenFile(path)
}
// IsValidLanguage checks whether the given string is a valid
// ISO 639 language code
func IsValidLanguage(lang string) bool {
_, err := language.ParseBase(lang)
return err == nil
}
// IsLangInCaptions returns true if lang is present
// in the captions
func IsLangInCaptions(lang string, ext string, captions []*models.SceneCaption) bool {
for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType {
return true
}
}
return false
}
// GenerateCaptionCandidates generates a list of filenames with exts as extensions
// that can associated with the caption
func GenerateCaptionCandidates(captionPath string, exts []string) []string {
var candidates []string
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
// a caption file can be something like scene_filename.srt or scene_filename.en.srt
// if a language code is present and valid remove it from the basename
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
basename = strings.TrimSuffix(basename, languageExt)
}
for _, ext := range exts {
candidates = append(candidates, basename+"."+ext)
}
return candidates
}
// GetCaptionsLangFromPath returns the language code from a given captions path
// If no valid language is present LangUknown is returned
func GetCaptionsLangFromPath(captionPath string) string {
langCode := LangUnknown
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
langCode = languageExt[1:]
}
return langCode
}
// CleanCaptions removes non existent/accessible language codes from captions
func CleanCaptions(scenePath string, captions []*models.SceneCaption) (cleanedCaptions []*models.SceneCaption, changed bool) {
changed = false
for _, caption := range captions {
found := false
f := caption.Path(scenePath)
if _, er := os.Stat(f); er == nil {
cleanedCaptions = append(cleanedCaptions, caption)
found = true
}
if !found {
changed = true
}
}
return
}

55
pkg/scene/caption_test.go Normal file
View file

@ -0,0 +1,55 @@
package scene
import (
"testing"
"github.com/stretchr/testify/assert"
)
var testExts = []string{"mkv", "mp4"}
type testCase struct {
captionPath string
expectedLang string
expectedCandidates []string
}
var testCases = []testCase{
{
captionPath: "/stash/video.vtt",
expectedLang: LangUnknown,
expectedCandidates: []string{"/stash/video.mkv", "/stash/video.mp4"},
},
{
captionPath: "/stash/video.en.vtt",
expectedLang: "en",
expectedCandidates: []string{"/stash/video.mkv", "/stash/video.mp4"}, // lang code valid, remove en part
},
{
captionPath: "/stash/video.test.srt",
expectedLang: LangUnknown,
expectedCandidates: []string{"/stash/video.test.mkv", "/stash/video.test.mp4"}, // no lang code/lang code invalid test should remain
},
{
captionPath: "C:\\videos\\video.fr.srt",
expectedLang: "fr",
expectedCandidates: []string{"C:\\videos\\video.mkv", "C:\\videos\\video.mp4"},
},
{
captionPath: "C:\\videos\\video.xx.srt",
expectedLang: LangUnknown,
expectedCandidates: []string{"C:\\videos\\video.xx.mkv", "C:\\videos\\video.xx.mp4"}, // no lang code/lang code invalid xx should remain
},
}
func TestGenerateCaptionCandidates(t *testing.T) {
for _, c := range testCases {
assert.ElementsMatch(t, c.expectedCandidates, GenerateCaptionCandidates(c.captionPath, testExts))
}
}
func TestGetCaptionsLangFromPath(t *testing.T) {
for _, l := range testCases {
assert.Equal(t, l.expectedLang, GetCaptionsLangFromPath(l.captionPath))
}
}

View file

@ -105,6 +105,27 @@ func (scanner *Scanner) ScanExisting(ctx context.Context, existing file.FileBase
changed = true changed = true
} }
if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error {
var err error
sqb := r.Scene()
captions, er := sqb.GetCaptions(s.ID)
if er == nil {
if len(captions) > 0 {
clean, altered := CleanCaptions(s.Path, captions)
if altered {
er = sqb.UpdateCaptions(s.ID, clean)
if er == nil {
logger.Debugf("Captions for %s cleaned: %s -> %s", path, captions, clean)
}
}
}
}
return err
}); err != nil {
logger.Error(err.Error())
}
if changed { if changed {
// we are operating on a checksum now, so grab a mutex on the checksum // we are operating on a checksum now, so grab a mutex on the checksum
done := make(chan struct{}) done := make(chan struct{})

View file

@ -365,6 +365,52 @@ func (r *imageRepository) replace(id int, image []byte) error {
return err return err
} }
type captionRepository struct {
repository
}
func (r *captionRepository) get(id int) ([]*models.SceneCaption, error) {
query := fmt.Sprintf("SELECT %s, %s, %s from %s WHERE %s = ?", sceneCaptionCodeColumn, sceneCaptionFilenameColumn, sceneCaptionTypeColumn, r.tableName, r.idColumn)
var ret []*models.SceneCaption
err := r.queryFunc(query, []interface{}{id}, false, func(rows *sqlx.Rows) error {
var captionCode string
var captionFilename string
var captionType string
if err := rows.Scan(&captionCode, &captionFilename, &captionType); err != nil {
return err
}
caption := &models.SceneCaption{
LanguageCode: captionCode,
Filename: captionFilename,
CaptionType: captionType,
}
ret = append(ret, caption)
return nil
})
return ret, err
}
func (r *captionRepository) insert(id int, caption *models.SceneCaption) (sql.Result, error) {
stmt := fmt.Sprintf("INSERT INTO %s (%s, %s, %s, %s) VALUES (?, ?, ?, ?)", r.tableName, r.idColumn, sceneCaptionCodeColumn, sceneCaptionFilenameColumn, sceneCaptionTypeColumn)
return r.tx.Exec(stmt, id, caption.LanguageCode, caption.Filename, caption.CaptionType)
}
func (r *captionRepository) replace(id int, captions []*models.SceneCaption) error {
if err := r.destroy([]int{id}); err != nil {
return err
}
for _, caption := range captions {
if _, err := r.insert(id, caption); err != nil {
return err
}
}
return nil
}
type stringRepository struct { type stringRepository struct {
repository repository
stringColumn string stringColumn string

View file

@ -19,6 +19,11 @@ const scenesTagsTable = "scenes_tags"
const scenesGalleriesTable = "scenes_galleries" const scenesGalleriesTable = "scenes_galleries"
const moviesScenesTable = "movies_scenes" const moviesScenesTable = "movies_scenes"
const sceneCaptionsTable = "scene_captions"
const sceneCaptionCodeColumn = "language_code"
const sceneCaptionFilenameColumn = "filename"
const sceneCaptionTypeColumn = "caption_type"
var scenesForPerformerQuery = selectAll(sceneTable) + ` var scenesForPerformerQuery = selectAll(sceneTable) + `
LEFT JOIN performers_scenes as performers_join on performers_join.scene_id = scenes.id LEFT JOIN performers_scenes as performers_join on performers_join.scene_id = scenes.id
WHERE performers_join.performer_id = ? WHERE performers_join.performer_id = ?
@ -127,6 +132,25 @@ func (qb *sceneQueryBuilder) UpdateFileModTime(id int, modTime models.NullSQLite
}) })
} }
func (qb *sceneQueryBuilder) captionRepository() *captionRepository {
return &captionRepository{
repository: repository{
tx: qb.tx,
tableName: sceneCaptionsTable,
idColumn: sceneIDColumn,
},
}
}
func (qb *sceneQueryBuilder) GetCaptions(sceneID int) ([]*models.SceneCaption, error) {
return qb.captionRepository().get(sceneID)
}
func (qb *sceneQueryBuilder) UpdateCaptions(sceneID int, captions []*models.SceneCaption) error {
return qb.captionRepository().replace(sceneID, captions)
}
func (qb *sceneQueryBuilder) IncrementOCounter(id int) (int, error) { func (qb *sceneQueryBuilder) IncrementOCounter(id int) (int, error) {
_, err := qb.tx.Exec( _, err := qb.tx.Exec(
`UPDATE scenes SET o_counter = o_counter + 1 WHERE scenes.id = ?`, `UPDATE scenes SET o_counter = o_counter + 1 WHERE scenes.id = ?`,
@ -385,6 +409,8 @@ func (qb *sceneQueryBuilder) makeFilter(sceneFilter *models.SceneFilterType) *fi
query.handleCriterion(boolCriterionHandler(sceneFilter.Interactive, "scenes.interactive")) query.handleCriterion(boolCriterionHandler(sceneFilter.Interactive, "scenes.interactive"))
query.handleCriterion(intCriterionHandler(sceneFilter.InteractiveSpeed, "scenes.interactive_speed")) query.handleCriterion(intCriterionHandler(sceneFilter.InteractiveSpeed, "scenes.interactive_speed"))
query.handleCriterion(sceneCaptionCriterionHandler(qb, sceneFilter.Captions))
query.handleCriterion(sceneTagsCriterionHandler(qb, sceneFilter.Tags)) query.handleCriterion(sceneTagsCriterionHandler(qb, sceneFilter.Tags))
query.handleCriterion(sceneTagCountCriterionHandler(qb, sceneFilter.TagCount)) query.handleCriterion(sceneTagCountCriterionHandler(qb, sceneFilter.TagCount))
query.handleCriterion(scenePerformersCriterionHandler(qb, sceneFilter.Performers)) query.handleCriterion(scenePerformersCriterionHandler(qb, sceneFilter.Performers))
@ -607,6 +633,18 @@ func (qb *sceneQueryBuilder) getMultiCriterionHandlerBuilder(foreignTable, joinT
} }
} }
func sceneCaptionCriterionHandler(qb *sceneQueryBuilder, captions *models.StringCriterionInput) criterionHandlerFunc {
h := stringListCriterionHandlerBuilder{
joinTable: sceneCaptionsTable,
stringColumn: sceneCaptionCodeColumn,
addJoinTable: func(f *filterBuilder) {
qb.captionRepository().join(f, "", "scenes.id")
},
}
return h.handler(captions)
}
func sceneTagsCriterionHandler(qb *sceneQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { func sceneTagsCriterionHandler(qb *sceneQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc {
h := joinedHierarchicalMultiCriterionHandlerBuilder{ h := joinedHierarchicalMultiCriterionHandlerBuilder{
tx: qb.tx, tx: qb.tx,

View file

@ -1,3 +1,6 @@
### ✨ New Features
* Add support for VTT and SRT captions for scenes. ([#2462](https://github.com/stashapp/stash/pull/2462))
### 🎨 Improvements ### 🎨 Improvements
* Changed playback rate options to be the same as those provided by YouTube. ([#2550](https://github.com/stashapp/stash/pull/2550)) * Changed playback rate options to be the same as those provided by YouTube. ([#2550](https://github.com/stashapp/stash/pull/2550))
* Display error message on fatal error when running stash with double-click in Windows. ([#2543](https://github.com/stashapp/stash/pull/2543)) * Display error message on fatal error when running stash with double-click in Windows. ([#2543](https://github.com/stashapp/stash/pull/2543))

View file

@ -19,6 +19,7 @@ import KeyboardShortcuts from "src/docs/en/KeyboardShortcuts.md";
import Help from "src/docs/en/Help.md"; import Help from "src/docs/en/Help.md";
import Deduplication from "src/docs/en/Deduplication.md"; import Deduplication from "src/docs/en/Deduplication.md";
import Interactive from "src/docs/en/Interactive.md"; import Interactive from "src/docs/en/Interactive.md";
import Captions from "src/docs/en/Captions.md";
import Identify from "src/docs/en/Identify.md"; import Identify from "src/docs/en/Identify.md";
import Browsing from "src/docs/en/Browsing.md"; import Browsing from "src/docs/en/Browsing.md";
import { MarkdownPage } from "../Shared/MarkdownPage"; import { MarkdownPage } from "../Shared/MarkdownPage";
@ -134,6 +135,11 @@ export const Manual: React.FC<IManualProps> = ({
title: "Interactivity", title: "Interactivity",
content: Interactive, content: Interactive,
}, },
{
key: "Captions.md",
title: "Captions",
content: Captions,
},
{ {
key: "KeyboardShortcuts.md", key: "KeyboardShortcuts.md",
title: "Keyboard Shortcuts", title: "Keyboard Shortcuts",

View file

@ -16,6 +16,7 @@ import * as GQL from "src/core/generated-graphql";
import { ScenePlayerScrubber } from "./ScenePlayerScrubber"; import { ScenePlayerScrubber } from "./ScenePlayerScrubber";
import { ConfigurationContext } from "src/hooks/Config"; import { ConfigurationContext } from "src/hooks/Config";
import { Interactive } from "src/utils/interactive"; import { Interactive } from "src/utils/interactive";
import { languageMap } from "src/utils/caption";
export const VIDEO_PLAYER_ID = "VideoJsPlayer"; export const VIDEO_PLAYER_ID = "VideoJsPlayer";
@ -160,6 +161,13 @@ export const ScenePlayer: React.FC<IScenePlayerProps> = ({
const player = VideoJS(videoElement, options); const player = VideoJS(videoElement, options);
const settings = (player as any).textTrackSettings;
settings.setValues({
backgroundColor: "#000",
backgroundOpacity: "0.5",
});
settings.updateDisplay();
(player as any).landscapeFullscreen({ (player as any).landscapeFullscreen({
fullscreen: { fullscreen: {
enterOnRotate: true, enterOnRotate: true,
@ -215,6 +223,38 @@ export const ScenePlayer: React.FC<IScenePlayerProps> = ({
}, []); }, []);
useEffect(() => { useEffect(() => {
let prevCaptionOffset = 0;
function addCaptionOffset(player: VideoJsPlayer, offset: number) {
const tracks = player.remoteTextTracks();
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
const { cues } = track;
if (cues) {
for (let j = 0; j < cues.length; j++) {
const cue = cues[j];
cue.startTime = cue.startTime + offset;
cue.endTime = cue.endTime + offset;
}
}
}
}
function removeCaptionOffset(player: VideoJsPlayer, offset: number) {
const tracks = player.remoteTextTracks();
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
const { cues } = track;
if (cues) {
for (let j = 0; j < cues.length; j++) {
const cue = cues[j];
cue.startTime = cue.startTime + prevCaptionOffset - offset;
cue.endTime = cue.endTime + prevCaptionOffset - offset;
}
}
}
}
function handleOffset(player: VideoJsPlayer) { function handleOffset(player: VideoJsPlayer) {
if (!scene) return; if (!scene) return;
@ -222,11 +262,25 @@ export const ScenePlayer: React.FC<IScenePlayerProps> = ({
const isDirect = const isDirect =
currentSrc.endsWith("/stream") || currentSrc.endsWith("/stream.m3u8"); currentSrc.endsWith("/stream") || currentSrc.endsWith("/stream.m3u8");
const curTime = player.currentTime();
if (!isDirect) { if (!isDirect) {
(player as any).setOffsetDuration(scene.file.duration); (player as any).setOffsetDuration(scene.file.duration);
} else { } else {
(player as any).clearOffsetDuration(); (player as any).clearOffsetDuration();
} }
if (curTime != prevCaptionOffset) {
if (!isDirect) {
removeCaptionOffset(player, curTime);
prevCaptionOffset = curTime;
} else {
if (prevCaptionOffset != 0) {
addCaptionOffset(player, prevCaptionOffset);
prevCaptionOffset = 0;
}
}
}
} }
function handleError(play: boolean) { function handleError(play: boolean) {
@ -268,6 +322,58 @@ export const ScenePlayer: React.FC<IScenePlayerProps> = ({
return false; return false;
} }
function getDefaultLanguageCode() {
var languageCode = window.navigator.language;
if (languageCode.indexOf("-") !== -1) {
languageCode = languageCode.split("-")[0];
}
if (languageCode.indexOf("_") !== -1) {
languageCode = languageCode.split("_")[0];
}
return languageCode;
}
function loadCaptions(player: VideoJsPlayer) {
if (!scene) return;
if (scene.captions) {
var languageCode = getDefaultLanguageCode();
var hasDefault = false;
for (let caption of scene.captions) {
var lang = caption.language_code;
var label = lang;
if (languageMap.has(lang)) {
label = languageMap.get(lang)!;
}
label = label + " (" + caption.caption_type + ")";
var setAsDefault = !hasDefault && languageCode == lang;
if (!hasDefault && setAsDefault) {
hasDefault = true;
}
player.addRemoteTextTrack(
{
src:
scene.paths.caption +
"?lang=" +
lang +
"&type=" +
caption.caption_type,
kind: "captions",
srclang: lang,
label: label,
default: setAsDefault,
},
true
);
}
}
}
if (!scene || scene.id === sceneId.current) return; if (!scene || scene.id === sceneId.current) return;
sceneId.current = scene.id; sceneId.current = scene.id;
@ -285,8 +391,8 @@ export const ScenePlayer: React.FC<IScenePlayerProps> = ({
(player as any).clearOffsetDuration(); (player as any).clearOffsetDuration();
const tracks = player.remoteTextTracks(); const tracks = player.remoteTextTracks();
if (tracks.length > 0) { for (let i = 0; i < tracks.length; i++) {
player.removeRemoteTextTrack(tracks[0] as any); player.removeRemoteTextTrack(tracks[i] as any);
} }
player.src( player.src(
@ -308,6 +414,10 @@ export const ScenePlayer: React.FC<IScenePlayerProps> = ({
); );
} }
if (scene.captions?.length! > 0) {
loadCaptions(player);
}
player.currentTime(0); player.currentTime(0);
player.loop( player.loop(
@ -338,12 +448,10 @@ export const ScenePlayer: React.FC<IScenePlayerProps> = ({
if (scene.interactive) { if (scene.interactive) {
interactiveClient.ensurePlaying(this.currentTime()); interactiveClient.ensurePlaying(this.currentTime());
} }
setTime(this.currentTime()); setTime(this.currentTime());
}); });
player.on("seeking", function (this: VideoJsPlayer) { player.on("seeking", function (this: VideoJsPlayer) {
// backwards compatibility - may want to remove this in future
this.play(); this.play();
}); });

View file

@ -492,6 +492,10 @@ $sceneTabWidth: 450px;
z-index: 1; z-index: 1;
} }
.vjs-text-track-settings select {
background: #fff;
}
.VideoPlayer .VideoPlayer
.video-js .video-js
.vjs-seek-button.skip-back .vjs-seek-button.skip-back

View file

@ -0,0 +1,14 @@
# Captions
Stash supports captioning with SRT and VTT files.
These files need to be named as follows:
## Scene
- {scene_name}.{language_code}.ext
- {scene_name}.ext
Where `{language_code}` is defined by the [ISO-6399-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (2 letters) standard and `ext` is the file extension. Captions files without a language code will be labeled as Unknown in the video player but will work fine.
Scenes with captions can be filtered with the `captions` criterion.

View file

@ -743,6 +743,7 @@
"instagram": "Instagram", "instagram": "Instagram",
"interactive": "Interactive", "interactive": "Interactive",
"interactive_speed": "Interactive speed", "interactive_speed": "Interactive speed",
"captions": "Captions",
"isMissing": "Is Missing", "isMissing": "Is Missing",
"library": "Library", "library": "Library",
"loading": { "loading": {

View file

@ -0,0 +1,42 @@
import { CriterionModifier } from "src/core/generated-graphql";
import { languageMap, valueToCode } from "src/utils/caption";
import { CriterionType } from "../types";
import { CriterionOption, StringCriterion } from "./criterion";
const languageStrings = Array.from(languageMap.values());
class CaptionsCriterionOptionType extends CriterionOption {
constructor(value: CriterionType) {
super({
messageID: value,
type: value,
parameterName: value,
modifierOptions: [
CriterionModifier.Includes,
CriterionModifier.Excludes,
CriterionModifier.IsNull,
CriterionModifier.NotNull,
],
options: languageStrings,
});
}
}
export const CaptionsCriterionOption = new CaptionsCriterionOptionType(
"captions"
);
export class CaptionCriterion extends StringCriterion {
protected toCriterionInput() {
const value = valueToCode(this.value);
return {
value,
modifier: this.modifier,
};
}
constructor() {
super(CaptionsCriterionOption);
}
}

View file

@ -43,6 +43,7 @@ import { CriterionType } from "../types";
import { InteractiveCriterion } from "./interactive"; import { InteractiveCriterion } from "./interactive";
import { RatingCriterionOption } from "./rating"; import { RatingCriterionOption } from "./rating";
import { DuplicatedCriterion, PhashCriterionOption } from "./phash"; import { DuplicatedCriterion, PhashCriterionOption } from "./phash";
import { CaptionCriterion } from "./captions";
export function makeCriteria(type: CriterionType = "none") { export function makeCriteria(type: CriterionType = "none") {
switch (type) { switch (type) {
@ -159,6 +160,8 @@ export function makeCriteria(type: CriterionType = "none") {
return new StringCriterion(new StringCriterionOption(type, type)); return new StringCriterion(new StringCriterionOption(type, type));
case "interactive": case "interactive":
return new InteractiveCriterion(); return new InteractiveCriterion();
case "captions":
return new CaptionCriterion();
case "parent_tag_count": case "parent_tag_count":
return new NumberCriterion( return new NumberCriterion(
new MandatoryNumberCriterionOption( new MandatoryNumberCriterionOption(

View file

@ -23,6 +23,7 @@ import {
PhashCriterionOption, PhashCriterionOption,
} from "./criteria/phash"; } from "./criteria/phash";
import { PerformerFavoriteCriterionOption } from "./criteria/favorite"; import { PerformerFavoriteCriterionOption } from "./criteria/favorite";
import { CaptionsCriterionOption } from "./criteria/captions";
const defaultSortBy = "date"; const defaultSortBy = "date";
const sortByOptions = [ const sortByOptions = [
@ -78,6 +79,7 @@ const criterionOptions = [
createStringCriterionOption("url"), createStringCriterionOption("url"),
createStringCriterionOption("stash_id"), createStringCriterionOption("stash_id"),
InteractiveCriterionOption, InteractiveCriterionOption,
CaptionsCriterionOption,
createMandatoryNumberCriterionOption("interactive_speed"), createMandatoryNumberCriterionOption("interactive_speed"),
]; ];

View file

@ -113,6 +113,7 @@ export type CriterionType =
| "stash_id" | "stash_id"
| "interactive" | "interactive"
| "interactive_speed" | "interactive_speed"
| "captions"
| "name" | "name"
| "details" | "details"
| "title" | "title"

View file

@ -0,0 +1,22 @@
export const languageMap = new Map<string, string>([
["de", "Deutsche"],
["en", "English"],
["es", "Español"],
["fr", "Français"],
["it", "Italiano"],
["ja", "日本"],
["ko", "한국인"],
["nl", "Holandés"],
["pt", "Português"],
["00", "Unknown"], // stash reserved language code
]);
export const valueToCode = (value?: string | null) => {
if (!value) {
return undefined;
}
return Array.from(languageMap.keys()).find((v) => {
return languageMap.get(v) === value;
});
};

8
vendor/github.com/asticode/go-astikit/.travis.sh generated vendored Normal file
View file

@ -0,0 +1,8 @@
#!/bin/sh
if [ "$(go list -m all)" != "github.com/asticode/go-astikit" ]; then
echo "This repo doesn't allow any external dependencies"
exit 1
else
echo "cheers!"
fi

15
vendor/github.com/asticode/go-astikit/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,15 @@
language: go
go:
- 1.x
- tip
install:
- bash .travis.sh
- go get -t ./...
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
matrix:
allow_failures:
- go: tip
script:
- go test -race -v -coverprofile=coverage.out
- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci

21
vendor/github.com/asticode/go-astikit/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Quentin Renard
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

6
vendor/github.com/asticode/go-astikit/README.md generated vendored Normal file
View file

@ -0,0 +1,6 @@
[![GoReportCard](http://goreportcard.com/badge/github.com/asticode/go-astikit)](http://goreportcard.com/report/github.com/asticode/go-astikit)
[![GoDoc](https://godoc.org/github.com/asticode/go-astikit?status.svg)](https://godoc.org/github.com/asticode/go-astikit)
[![Travis](https://travis-ci.org/asticode/go-astikit.svg?branch=master)](https://travis-ci.org/asticode/go-astikit#)
[![Coveralls](https://coveralls.io/repos/github/asticode/go-astikit/badge.svg?branch=master)](https://coveralls.io/github/asticode/go-astikit)
`astikit` is a set of golang helpers that don't require any external dependencies.

214
vendor/github.com/asticode/go-astikit/archive.go generated vendored Normal file
View file

@ -0,0 +1,214 @@
package astikit
import (
"archive/zip"
"context"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
// internal shouldn't lead with a "/"
func zipInternalPath(p string) (external, internal string) {
if items := strings.Split(p, ".zip"); len(items) > 1 {
external = items[0] + ".zip"
internal = strings.TrimPrefix(strings.Join(items[1:], ".zip"), string(os.PathSeparator))
return
}
external = p
return
}
// Zip zips a src into a dst
// Possible dst formats are:
// - /path/to/zip.zip
// - /path/to/zip.zip/root/path
func Zip(ctx context.Context, dst, src string) (err error) {
// Get external/internal path
externalPath, internalPath := zipInternalPath(dst)
// Make sure the directory exists
if err = os.MkdirAll(filepath.Dir(externalPath), DefaultDirMode); err != nil {
return fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(externalPath), err)
}
// Create destination file
var dstFile *os.File
if dstFile, err = os.Create(externalPath); err != nil {
return fmt.Errorf("astikit: creating %s failed: %w", externalPath, err)
}
defer dstFile.Close()
// Create zip writer
var zw = zip.NewWriter(dstFile)
defer zw.Close()
// Walk
if err = filepath.Walk(src, func(path string, info os.FileInfo, e error) (err error) {
// Process error
if e != nil {
err = e
return
}
// Init header
var h *zip.FileHeader
if h, err = zip.FileInfoHeader(info); err != nil {
return fmt.Errorf("astikit: initializing zip header failed: %w", err)
}
// Set header info
h.Name = filepath.Join(internalPath, strings.TrimPrefix(path, src))
if info.IsDir() {
h.Name += string(os.PathSeparator)
} else {
h.Method = zip.Deflate
}
// Create writer
var w io.Writer
if w, err = zw.CreateHeader(h); err != nil {
return fmt.Errorf("astikit: creating zip header failed: %w", err)
}
// If path is dir, stop here
if info.IsDir() {
return
}
// Open path
var walkFile *os.File
if walkFile, err = os.Open(path); err != nil {
return fmt.Errorf("astikit: opening %s failed: %w", path, err)
}
defer walkFile.Close()
// Copy
if _, err = Copy(ctx, w, walkFile); err != nil {
return fmt.Errorf("astikit: copying failed: %w", err)
}
return
}); err != nil {
return fmt.Errorf("astikit: walking failed: %w", err)
}
return
}
// Unzip unzips a src into a dst
// Possible src formats are:
// - /path/to/zip.zip
// - /path/to/zip.zip/root/path
func Unzip(ctx context.Context, dst, src string) (err error) {
// Get external/internal path
externalPath, internalPath := zipInternalPath(src)
// Make sure the destination exists
if err = os.MkdirAll(dst, DefaultDirMode); err != nil {
return fmt.Errorf("astikit: mkdirall %s failed: %w", dst, err)
}
// Open overall reader
var r *zip.ReadCloser
if r, err = zip.OpenReader(externalPath); err != nil {
return fmt.Errorf("astikit: opening overall zip reader on %s failed: %w", externalPath, err)
}
defer r.Close()
// Loop through files to determine their type
var dirs, files, symlinks = make(map[string]*zip.File), make(map[string]*zip.File), make(map[string]*zip.File)
for _, f := range r.File {
// Validate internal path
if internalPath != "" && !strings.HasPrefix(f.Name, internalPath) {
continue
}
var p = filepath.Join(dst, strings.TrimPrefix(f.Name, internalPath))
// Check file type
if f.FileInfo().Mode()&os.ModeSymlink != 0 {
symlinks[p] = f
} else if f.FileInfo().IsDir() {
dirs[p] = f
} else {
files[p] = f
}
}
// Invalid internal path
if internalPath != "" && len(dirs) == 0 && len(files) == 0 && len(symlinks) == 0 {
return fmt.Errorf("astikit: content in archive does not match specified internal path %s", internalPath)
}
// Create dirs
for p, f := range dirs {
if err = os.MkdirAll(p, f.FileInfo().Mode().Perm()); err != nil {
return fmt.Errorf("astikit: mkdirall %s failed: %w", p, err)
}
}
// Create files
for p, f := range files {
if err = createZipFile(ctx, f, p); err != nil {
return fmt.Errorf("astikit: creating zip file into %s failed: %w", p, err)
}
}
// Create symlinks
for p, f := range symlinks {
if err = createZipSymlink(f, p); err != nil {
return fmt.Errorf("astikit: creating zip symlink into %s failed: %w", p, err)
}
}
return
}
func createZipFile(ctx context.Context, f *zip.File, p string) (err error) {
// Open file reader
var fr io.ReadCloser
if fr, err = f.Open(); err != nil {
return fmt.Errorf("astikit: opening zip reader on file %s failed: %w", f.Name, err)
}
defer fr.Close()
// Since dirs don't always come up we make sure the directory of the file exists with default
// file mode
if err = os.MkdirAll(filepath.Dir(p), DefaultDirMode); err != nil {
return fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(p), err)
}
// Open the file
var fl *os.File
if fl, err = os.OpenFile(p, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.FileInfo().Mode().Perm()); err != nil {
return fmt.Errorf("astikit: opening file %s failed: %w", p, err)
}
defer fl.Close()
// Copy
if _, err = Copy(ctx, fl, fr); err != nil {
return fmt.Errorf("astikit: copying %s into %s failed: %w", f.Name, p, err)
}
return
}
func createZipSymlink(f *zip.File, p string) (err error) {
// Open file reader
var fr io.ReadCloser
if fr, err = f.Open(); err != nil {
return fmt.Errorf("astikit: opening zip reader on file %s failed: %w", f.Name, err)
}
defer fr.Close()
// If file is a symlink we retrieve the target path that is in the content of the file
var b []byte
if b, err = ioutil.ReadAll(fr); err != nil {
return fmt.Errorf("astikit: ioutil.Readall on %s failed: %w", f.Name, err)
}
// Create the symlink
if err = os.Symlink(string(b), p); err != nil {
return fmt.Errorf("astikit: creating symlink from %s to %s failed: %w", string(b), p, err)
}
return
}

8
vendor/github.com/asticode/go-astikit/astikit.go generated vendored Normal file
View file

@ -0,0 +1,8 @@
package astikit
import "os"
// Default modes
var (
DefaultDirMode os.FileMode = 0755
)

297
vendor/github.com/asticode/go-astikit/binary.go generated vendored Normal file
View file

@ -0,0 +1,297 @@
package astikit
import (
"encoding/binary"
"errors"
"io"
)
// BitsWriter represents an object that can write individual bits into a writer
// in a developer-friendly way. Check out the Write method for more information.
// This is particularly helpful when you want to build a slice of bytes based
// on individual bits for testing purposes.
type BitsWriter struct {
bo binary.ByteOrder
cache byte
cacheLen byte
bsCache []byte
w io.Writer
writeCb BitsWriterWriteCallback
}
type BitsWriterWriteCallback func([]byte)
// BitsWriterOptions represents BitsWriter options
type BitsWriterOptions struct {
ByteOrder binary.ByteOrder
// WriteCallback is called every time when full byte is written
WriteCallback BitsWriterWriteCallback
Writer io.Writer
}
// NewBitsWriter creates a new BitsWriter
func NewBitsWriter(o BitsWriterOptions) (w *BitsWriter) {
w = &BitsWriter{
bo: o.ByteOrder,
bsCache: make([]byte, 1),
w: o.Writer,
writeCb: o.WriteCallback,
}
if w.bo == nil {
w.bo = binary.BigEndian
}
return
}
func (w *BitsWriter) SetWriteCallback(cb BitsWriterWriteCallback) {
w.writeCb = cb
}
// Write writes bits into the writer. Bits are only written when there are
// enough to create a byte. When using a string or a bool, bits are added
// from left to right as if
// Available types are:
// - string("10010"): processed as n bits, n being the length of the input
// - []byte: processed as n bytes, n being the length of the input
// - bool: processed as one bit
// - uint8/uint16/uint32/uint64: processed as n bits, if type is uintn
func (w *BitsWriter) Write(i interface{}) error {
// Transform input into "10010" format
switch a := i.(type) {
case string:
for _, r := range a {
var err error
if r == '1' {
err = w.writeBit(1)
} else {
err = w.writeBit(0)
}
if err != nil {
return err
}
}
case []byte:
for _, b := range a {
if err := w.writeFullByte(b); err != nil {
return err
}
}
case bool:
if a {
return w.writeBit(1)
} else {
return w.writeBit(0)
}
case uint8:
return w.writeFullByte(a)
case uint16:
return w.writeFullInt(uint64(a), 2)
case uint32:
return w.writeFullInt(uint64(a), 4)
case uint64:
return w.writeFullInt(a, 8)
default:
return errors.New("astikit: invalid type")
}
return nil
}
// Writes exactly n bytes from bs
// Writes first n bytes of bs if len(bs) > n
// Pads with padByte at the end if len(bs) < n
func (w *BitsWriter) WriteBytesN(bs []byte, n int, padByte uint8) error {
if len(bs) >= n {
return w.Write(bs[:n])
}
if err := w.Write(bs); err != nil {
return err
}
// no bytes.Repeat here to avoid allocation
for i := 0; i < n-len(bs); i++ {
if err := w.Write(padByte); err != nil {
return err
}
}
return nil
}
func (w *BitsWriter) writeFullInt(in uint64, len int) error {
if w.bo == binary.BigEndian {
for i := len - 1; i >= 0; i-- {
err := w.writeFullByte(byte((in >> (i * 8)) & 0xff))
if err != nil {
return err
}
}
} else {
for i := 0; i < len; i++ {
err := w.writeFullByte(byte((in >> (i * 8)) & 0xff))
if err != nil {
return err
}
}
}
return nil
}
func (w *BitsWriter) flushBsCache() error {
if _, err := w.w.Write(w.bsCache); err != nil {
return err
}
if w.writeCb != nil {
w.writeCb(w.bsCache)
}
return nil
}
func (w *BitsWriter) writeFullByte(b byte) error {
if w.cacheLen == 0 {
w.bsCache[0] = b
} else {
w.bsCache[0] = w.cache | (b >> w.cacheLen)
w.cache = b << (8 - w.cacheLen)
}
return w.flushBsCache()
}
func (w *BitsWriter) writeBit(bit byte) error {
w.cache = w.cache | (bit)<<(7-w.cacheLen)
w.cacheLen++
if w.cacheLen == 8 {
w.bsCache[0] = w.cache
if err := w.flushBsCache(); err != nil {
return err
}
w.cacheLen = 0
w.cache = 0
}
return nil
}
// WriteN writes the input into n bits
func (w *BitsWriter) WriteN(i interface{}, n int) error {
var toWrite uint64
switch a := i.(type) {
case uint8:
toWrite = uint64(a)
case uint16:
toWrite = uint64(a)
case uint32:
toWrite = uint64(a)
case uint64:
toWrite = a
default:
return errors.New("astikit: invalid type")
}
for i := n - 1; i >= 0; i-- {
err := w.writeBit(byte(toWrite>>i) & 0x1)
if err != nil {
return err
}
}
return nil
}
// BitsWriterBatch allows to chain multiple Write* calls and check for error only once
// For more info see https://github.com/asticode/go-astikit/pull/6
type BitsWriterBatch struct {
err error
w *BitsWriter
}
func NewBitsWriterBatch(w *BitsWriter) BitsWriterBatch {
return BitsWriterBatch{
w: w,
}
}
// Calls BitsWriter.Write if there was no write error before
func (b *BitsWriterBatch) Write(i interface{}) {
if b.err == nil {
b.err = b.w.Write(i)
}
}
// Calls BitsWriter.WriteN if there was no write error before
func (b *BitsWriterBatch) WriteN(i interface{}, n int) {
if b.err == nil {
b.err = b.w.WriteN(i, n)
}
}
// Calls BitsWriter.WriteBytesN if there was no write error before
func (b *BitsWriterBatch) WriteBytesN(bs []byte, n int, padByte uint8) {
if b.err == nil {
b.err = b.w.WriteBytesN(bs, n, padByte)
}
}
// Returns first write error
func (b *BitsWriterBatch) Err() error {
return b.err
}
var byteHamming84Tab = [256]uint8{
0x01, 0xff, 0xff, 0x08, 0xff, 0x0c, 0x04, 0xff, 0xff, 0x08, 0x08, 0x08, 0x06, 0xff, 0xff, 0x08,
0xff, 0x0a, 0x02, 0xff, 0x06, 0xff, 0xff, 0x0f, 0x06, 0xff, 0xff, 0x08, 0x06, 0x06, 0x06, 0xff,
0xff, 0x0a, 0x04, 0xff, 0x04, 0xff, 0x04, 0x04, 0x00, 0xff, 0xff, 0x08, 0xff, 0x0d, 0x04, 0xff,
0x0a, 0x0a, 0xff, 0x0a, 0xff, 0x0a, 0x04, 0xff, 0xff, 0x0a, 0x03, 0xff, 0x06, 0xff, 0xff, 0x0e,
0x01, 0x01, 0x01, 0xff, 0x01, 0xff, 0xff, 0x0f, 0x01, 0xff, 0xff, 0x08, 0xff, 0x0d, 0x05, 0xff,
0x01, 0xff, 0xff, 0x0f, 0xff, 0x0f, 0x0f, 0x0f, 0xff, 0x0b, 0x03, 0xff, 0x06, 0xff, 0xff, 0x0f,
0x01, 0xff, 0xff, 0x09, 0xff, 0x0d, 0x04, 0xff, 0xff, 0x0d, 0x03, 0xff, 0x0d, 0x0d, 0xff, 0x0d,
0xff, 0x0a, 0x03, 0xff, 0x07, 0xff, 0xff, 0x0f, 0x03, 0xff, 0x03, 0x03, 0xff, 0x0d, 0x03, 0xff,
0xff, 0x0c, 0x02, 0xff, 0x0c, 0x0c, 0xff, 0x0c, 0x00, 0xff, 0xff, 0x08, 0xff, 0x0c, 0x05, 0xff,
0x02, 0xff, 0x02, 0x02, 0xff, 0x0c, 0x02, 0xff, 0xff, 0x0b, 0x02, 0xff, 0x06, 0xff, 0xff, 0x0e,
0x00, 0xff, 0xff, 0x09, 0xff, 0x0c, 0x04, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0xff, 0xff, 0x0e,
0xff, 0x0a, 0x02, 0xff, 0x07, 0xff, 0xff, 0x0e, 0x00, 0xff, 0xff, 0x0e, 0xff, 0x0e, 0x0e, 0x0e,
0x01, 0xff, 0xff, 0x09, 0xff, 0x0c, 0x05, 0xff, 0xff, 0x0b, 0x05, 0xff, 0x05, 0xff, 0x05, 0x05,
0xff, 0x0b, 0x02, 0xff, 0x07, 0xff, 0xff, 0x0f, 0x0b, 0x0b, 0xff, 0x0b, 0xff, 0x0b, 0x05, 0xff,
0xff, 0x09, 0x09, 0x09, 0x07, 0xff, 0xff, 0x09, 0x00, 0xff, 0xff, 0x09, 0xff, 0x0d, 0x05, 0xff,
0x07, 0xff, 0xff, 0x09, 0x07, 0x07, 0x07, 0xff, 0xff, 0x0b, 0x03, 0xff, 0x07, 0xff, 0xff, 0x0e,
}
// ByteHamming84Decode hamming 8/4 decodes
func ByteHamming84Decode(i uint8) (o uint8, ok bool) {
o = byteHamming84Tab[i]
if o == 0xff {
return
}
ok = true
return
}
var byteParityTab = [256]uint8{
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01,
0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00,
}
// ByteParity returns the byte parity
func ByteParity(i uint8) (o uint8, ok bool) {
ok = byteParityTab[i] == 1
o = i & 0x7f
return
}

164
vendor/github.com/asticode/go-astikit/bytes.go generated vendored Normal file
View file

@ -0,0 +1,164 @@
package astikit
import "fmt"
// BytesIterator represents an object capable of iterating sequentially and safely
// through a slice of bytes. This is particularly useful when you need to iterate
// through a slice of bytes and don't want to check for "index out of range" errors
// manually.
type BytesIterator struct {
bs []byte
offset int
}
// NewBytesIterator creates a new BytesIterator
func NewBytesIterator(bs []byte) *BytesIterator {
return &BytesIterator{bs: bs}
}
// NextByte returns the next byte
func (i *BytesIterator) NextByte() (b byte, err error) {
if len(i.bs) < i.offset+1 {
err = fmt.Errorf("astikit: slice length is %d, offset %d is invalid", len(i.bs), i.offset)
return
}
b = i.bs[i.offset]
i.offset++
return
}
// NextBytes returns the n next bytes
func (i *BytesIterator) NextBytes(n int) (bs []byte, err error) {
if len(i.bs) < i.offset+n {
err = fmt.Errorf("astikit: slice length is %d, offset %d is invalid", len(i.bs), i.offset+n)
return
}
bs = make([]byte, n)
copy(bs, i.bs[i.offset:i.offset+n])
i.offset += n
return
}
// NextBytesNoCopy returns the n next bytes
// Be careful with this function as it doesn't make a copy of returned data.
// bs will point to internal BytesIterator buffer.
// If you need to modify returned bytes or store it for some time, use NextBytes instead
func (i *BytesIterator) NextBytesNoCopy(n int) (bs []byte, err error) {
if len(i.bs) < i.offset+n {
err = fmt.Errorf("astikit: slice length is %d, offset %d is invalid", len(i.bs), i.offset+n)
return
}
bs = i.bs[i.offset : i.offset+n]
i.offset += n
return
}
// Seek seeks to the nth byte
func (i *BytesIterator) Seek(n int) {
i.offset = n
}
// Skip skips the n previous/next bytes
func (i *BytesIterator) Skip(n int) {
i.offset += n
}
// HasBytesLeft checks whether there are bytes left
func (i *BytesIterator) HasBytesLeft() bool {
return i.offset < len(i.bs)
}
// Offset returns the offset
func (i *BytesIterator) Offset() int {
return i.offset
}
// Dump dumps the rest of the slice
func (i *BytesIterator) Dump() (bs []byte) {
if !i.HasBytesLeft() {
return
}
bs = make([]byte, len(i.bs)-i.offset)
copy(bs, i.bs[i.offset:len(i.bs)])
i.offset = len(i.bs)
return
}
// Len returns the slice length
func (i *BytesIterator) Len() int {
return len(i.bs)
}
const (
padRight = "right"
padLeft = "left"
)
type bytesPadder struct {
cut bool
direction string
length int
repeat byte
}
func newBytesPadder(repeat byte, length int) *bytesPadder {
return &bytesPadder{
direction: padLeft,
length: length,
repeat: repeat,
}
}
func (p *bytesPadder) pad(i []byte) []byte {
if len(i) == p.length {
return i
} else if len(i) > p.length {
if p.cut {
return i[:p.length]
}
return i
} else {
o := make([]byte, len(i))
copy(o, i)
for idx := 0; idx < p.length-len(i); idx++ {
if p.direction == padRight {
o = append(o, p.repeat)
} else {
o = append([]byte{p.repeat}, o...)
}
o = append(o, p.repeat)
}
o = o[:p.length]
return o
}
}
// PadOption represents a Pad option
type PadOption func(p *bytesPadder)
// PadCut is a PadOption
// It indicates to the padder it must cut the input to the provided length
// if its original length is bigger
func PadCut(p *bytesPadder) { p.cut = true }
// PadLeft is a PadOption
// It indicates additionnal bytes have to be added to the left
func PadLeft(p *bytesPadder) { p.direction = padLeft }
// PadRight is a PadOption
// It indicates additionnal bytes have to be added to the right
func PadRight(p *bytesPadder) { p.direction = padRight }
// BytesPad pads the slice of bytes with additionnal options
func BytesPad(i []byte, repeat byte, length int, options ...PadOption) []byte {
p := newBytesPadder(repeat, length)
for _, o := range options {
o(p)
}
return p.pad(i)
}
// StrPad pads the string with additionnal options
func StrPad(i string, repeat rune, length int, options ...PadOption) string {
return string(BytesPad([]byte(i), byte(repeat), length, options...))
}

57
vendor/github.com/asticode/go-astikit/defer.go generated vendored Normal file
View file

@ -0,0 +1,57 @@
package astikit
import (
"sync"
)
// CloseFunc is a method that closes something
type CloseFunc func() error
// Closer is an object that can close several things
type Closer struct {
fs []CloseFunc
m *sync.Mutex
}
// NewCloser creates a new closer
func NewCloser() *Closer {
return &Closer{
m: &sync.Mutex{},
}
}
// Close implements the io.Closer interface
func (c *Closer) Close() error {
// Lock
c.m.Lock()
defer c.m.Unlock()
// Loop through closers
err := NewErrors()
for _, f := range c.fs {
err.Add(f())
}
// Reset closers
c.fs = []CloseFunc{}
// Return
if err.IsNil() {
return nil
}
return err
}
// Add adds a close func at the beginning of the list
func (c *Closer) Add(f CloseFunc) {
c.m.Lock()
defer c.m.Unlock()
c.fs = append([]CloseFunc{f}, c.fs...)
}
// NewChild creates a new child closer
func (c *Closer) NewChild() (child *Closer) {
child = NewCloser()
c.Add(child.Close)
return
}

71
vendor/github.com/asticode/go-astikit/errors.go generated vendored Normal file
View file

@ -0,0 +1,71 @@
package astikit
import (
"errors"
"strings"
"sync"
)
// Errors is an error containing multiple errors
type Errors struct {
m *sync.Mutex // Locks p
p []error
}
// NewErrors creates new errors
func NewErrors(errs ...error) *Errors {
return &Errors{
m: &sync.Mutex{},
p: errs,
}
}
// Add adds a new error
func (errs *Errors) Add(err error) {
if err == nil {
return
}
errs.m.Lock()
defer errs.m.Unlock()
errs.p = append(errs.p, err)
}
// IsNil checks whether the error is nil
func (errs *Errors) IsNil() bool {
errs.m.Lock()
defer errs.m.Unlock()
return len(errs.p) == 0
}
// Loop loops through the errors
func (errs *Errors) Loop(fn func(idx int, err error) bool) {
errs.m.Lock()
defer errs.m.Unlock()
for idx, err := range errs.p {
if stop := fn(idx, err); stop {
return
}
}
}
// Error implements the error interface
func (errs *Errors) Error() string {
errs.m.Lock()
defer errs.m.Unlock()
var ss []string
for _, err := range errs.p {
ss = append(ss, err.Error())
}
return strings.Join(ss, " && ")
}
// ErrorCause returns the cause of an error
func ErrorCause(err error) error {
for {
if u := errors.Unwrap(err); u != nil {
err = u
continue
}
return err
}
}

104
vendor/github.com/asticode/go-astikit/exec.go generated vendored Normal file
View file

@ -0,0 +1,104 @@
package astikit
import (
"context"
"fmt"
"os/exec"
"strings"
"sync"
)
// Statuses
const (
ExecStatusCrashed = "crashed"
ExecStatusRunning = "running"
ExecStatusStopped = "stopped"
)
// ExecHandler represents an object capable of handling the execution of a cmd
type ExecHandler struct {
cancel context.CancelFunc
ctx context.Context
err error
o sync.Once
stopped bool
}
// Status returns the cmd status
func (h *ExecHandler) Status() string {
if h.ctx.Err() != nil {
if h.stopped || h.err == nil {
return ExecStatusStopped
}
return ExecStatusCrashed
}
return ExecStatusRunning
}
// Stop stops the cmd
func (h *ExecHandler) Stop() {
h.o.Do(func() {
h.cancel()
h.stopped = true
})
}
// ExecCmdOptions represents exec options
type ExecCmdOptions struct {
Args []string
CmdAdapter func(cmd *exec.Cmd, h *ExecHandler) error
Name string
StopFunc func(cmd *exec.Cmd) error
}
// ExecCmd executes a cmd
// The process will be stopped when the worker stops
func ExecCmd(w *Worker, o ExecCmdOptions) (h *ExecHandler, err error) {
// Create handler
h = &ExecHandler{}
h.ctx, h.cancel = context.WithCancel(w.Context())
// Create command
cmd := exec.Command(o.Name, o.Args...)
// Adapt command
if o.CmdAdapter != nil {
if err = o.CmdAdapter(cmd, h); err != nil {
err = fmt.Errorf("astikit: adapting cmd failed: %w", err)
return
}
}
// Start
w.Logger().Infof("astikit: starting %s", strings.Join(cmd.Args, " "))
if err = cmd.Start(); err != nil {
err = fmt.Errorf("astikit: executing %s: %w", strings.Join(cmd.Args, " "), err)
return
}
// Handle context
go func() {
// Wait for context to be done
<-h.ctx.Done()
// Get stop func
f := func() error { return cmd.Process.Kill() }
if o.StopFunc != nil {
f = func() error { return o.StopFunc(cmd) }
}
// Stop
if err = f(); err != nil {
w.Logger().Error(fmt.Errorf("astikit: stopping cmd failed: %w", err))
return
}
}()
// Execute in a task
w.NewTask().Do(func() {
h.err = cmd.Wait()
h.cancel()
w.Logger().Infof("astikit: status is now %s for %s", h.Status(), strings.Join(cmd.Args, " "))
})
return
}

48
vendor/github.com/asticode/go-astikit/flag.go generated vendored Normal file
View file

@ -0,0 +1,48 @@
package astikit
import (
"os"
"strings"
)
// FlagCmd retrieves the command from the input Args
func FlagCmd() (o string) {
if len(os.Args) >= 2 && os.Args[1][0] != '-' {
o = os.Args[1]
os.Args = append([]string{os.Args[0]}, os.Args[2:]...)
}
return
}
// FlagStrings represents a flag that can be set several times and
// stores unique string values
type FlagStrings struct {
Map map[string]bool
Slice *[]string
}
// NewFlagStrings creates a new FlagStrings
func NewFlagStrings() FlagStrings {
return FlagStrings{
Map: make(map[string]bool),
Slice: &[]string{},
}
}
// String implements the flag.Value interface
func (f FlagStrings) String() string {
if f.Slice == nil {
return ""
}
return strings.Join(*f.Slice, ",")
}
// Set implements the flag.Value interface
func (f FlagStrings) Set(i string) error {
if _, ok := f.Map[i]; ok {
return nil
}
f.Map[i] = true
*f.Slice = append(*f.Slice, i)
return nil
}

60
vendor/github.com/asticode/go-astikit/float.go generated vendored Normal file
View file

@ -0,0 +1,60 @@
package astikit
import (
"bytes"
"fmt"
"strconv"
)
// Rational represents a rational
type Rational struct{ den, num int }
// NewRational creates a new rational
func NewRational(num, den int) *Rational {
return &Rational{
den: den,
num: num,
}
}
// Num returns the rational num
func (r *Rational) Num() int {
return r.num
}
// Den returns the rational den
func (r *Rational) Den() int {
return r.den
}
// ToFloat64 returns the rational as a float64
func (r *Rational) ToFloat64() float64 {
return float64(r.num) / float64(r.den)
}
// MarshalText implements the TextMarshaler interface
func (r *Rational) MarshalText() (b []byte, err error) {
b = []byte(fmt.Sprintf("%d/%d", r.num, r.den))
return
}
// UnmarshalText implements the TextUnmarshaler interface
func (r *Rational) UnmarshalText(b []byte) (err error) {
r.num = 0
r.den = 1
if len(b) == 0 {
return
}
items := bytes.Split(b, []byte("/"))
if r.num, err = strconv.Atoi(string(items[0])); err != nil {
err = fmt.Errorf("astikit: atoi of %s failed: %w", string(items[0]), err)
return
}
if len(items) > 1 {
if r.den, err = strconv.Atoi(string(items[1])); err != nil {
err = fmt.Errorf("astifloat: atoi of %s failed: %w", string(items[1]), err)
return
}
}
return
}

632
vendor/github.com/asticode/go-astikit/http.go generated vendored Normal file
View file

@ -0,0 +1,632 @@
package astikit
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net"
"net/http"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
"time"
)
var ErrHTTPSenderUnmarshaledError = errors.New("astikit: unmarshaled error")
// ServeHTTPOptions represents serve options
type ServeHTTPOptions struct {
Addr string
Handler http.Handler
}
// ServeHTTP spawns an HTTP server
func ServeHTTP(w *Worker, o ServeHTTPOptions) {
// Create server
s := &http.Server{Addr: o.Addr, Handler: o.Handler}
// Execute in a task
w.NewTask().Do(func() {
// Log
w.Logger().Infof("astikit: serving on %s", o.Addr)
// Serve
var done = make(chan error)
go func() {
if err := s.ListenAndServe(); err != nil {
done <- err
}
}()
// Wait for context or done to be done
select {
case <-w.ctx.Done():
if w.ctx.Err() != context.Canceled {
w.Logger().Error(fmt.Errorf("astikit: context error: %w", w.ctx.Err()))
}
case err := <-done:
if err != nil {
w.Logger().Error(fmt.Errorf("astikit: serving failed: %w", err))
}
}
// Shutdown
w.Logger().Infof("astikit: shutting down server on %s", o.Addr)
if err := s.Shutdown(context.Background()); err != nil {
w.Logger().Error(fmt.Errorf("astikit: shutting down server on %s failed: %w", o.Addr, err))
}
})
}
// HTTPClient represents an HTTP client
type HTTPClient interface {
Do(req *http.Request) (*http.Response, error)
}
// HTTPSender represents an object capable of sending http requests
type HTTPSender struct {
client HTTPClient
l SeverityLogger
retryFunc HTTPSenderRetryFunc
retryMax int
retrySleep time.Duration
timeout time.Duration
}
// HTTPSenderRetryFunc is a function that decides whether to retry an HTTP request
type HTTPSenderRetryFunc func(resp *http.Response) error
// HTTPSenderOptions represents HTTPSender options
type HTTPSenderOptions struct {
Client HTTPClient
Logger StdLogger
RetryFunc HTTPSenderRetryFunc
RetryMax int
RetrySleep time.Duration
Timeout time.Duration
}
// NewHTTPSender creates a new HTTP sender
func NewHTTPSender(o HTTPSenderOptions) (s *HTTPSender) {
s = &HTTPSender{
client: o.Client,
l: AdaptStdLogger(o.Logger),
retryFunc: o.RetryFunc,
retryMax: o.RetryMax,
retrySleep: o.RetrySleep,
timeout: o.Timeout,
}
if s.client == nil {
s.client = &http.Client{}
}
if s.retryFunc == nil {
s.retryFunc = s.defaultHTTPRetryFunc
}
return
}
func (s *HTTPSender) defaultHTTPRetryFunc(resp *http.Response) error {
if resp.StatusCode >= http.StatusInternalServerError {
return fmt.Errorf("astikit: invalid status code %d", resp.StatusCode)
}
return nil
}
// Send sends a new *http.Request
func (s *HTTPSender) Send(req *http.Request) (*http.Response, error) {
return s.SendWithTimeout(req, s.timeout)
}
// SendWithTimeout sends a new *http.Request with a timeout
func (s *HTTPSender) SendWithTimeout(req *http.Request, timeout time.Duration) (resp *http.Response, err error) {
// Set name
name := req.Method + " request"
if req.URL != nil {
name += " to " + req.URL.String()
}
// Timeout
if timeout > 0 {
// Create context
ctx, cancel := context.WithTimeout(req.Context(), timeout)
defer cancel()
// Update request
req = req.WithContext(ctx)
// Update name
name += " with timeout " + timeout.String()
}
// Loop
// We start at retryMax + 1 so that it runs at least once even if retryMax == 0
tries := 0
for retriesLeft := s.retryMax + 1; retriesLeft > 0; retriesLeft-- {
// Get request name
nr := name + " (" + strconv.Itoa(s.retryMax-retriesLeft+2) + "/" + strconv.Itoa(s.retryMax+1) + ")"
tries++
// Send request
s.l.Debugf("astikit: sending %s", nr)
if resp, err = s.client.Do(req); err != nil {
// Retry if error is temporary, stop here otherwise
if netError, ok := err.(net.Error); !ok || !netError.Temporary() {
err = fmt.Errorf("astikit: sending %s failed: %w", nr, err)
return
}
} else if err = req.Context().Err(); err != nil {
err = fmt.Errorf("astikit: request context failed: %w", err)
return
} else {
err = s.retryFunc(resp)
}
// Retry
if err != nil {
if retriesLeft > 1 {
s.l.Errorf("astikit: sending %s failed, sleeping %s and retrying... (%d retries left): %w", nr, s.retrySleep, retriesLeft-1, err)
time.Sleep(s.retrySleep)
}
continue
}
// Return if conditions for retrying were not met
return
}
// Max retries limit reached
err = fmt.Errorf("astikit: sending %s failed after %d tries: %w", name, tries, err)
return
}
// HTTPSendJSONOptions represents SendJSON options
type HTTPSendJSONOptions struct {
BodyError interface{}
BodyIn interface{}
BodyOut interface{}
Headers map[string]string
Method string
URL string
}
// SendJSON sends a new JSON HTTP request
func (s *HTTPSender) SendJSON(o HTTPSendJSONOptions) (err error) {
// Marshal body in
var bi io.Reader
if o.BodyIn != nil {
bb := &bytes.Buffer{}
if err = json.NewEncoder(bb).Encode(o.BodyIn); err != nil {
err = fmt.Errorf("astikit: marshaling body in failed: %w", err)
return
}
bi = bb
}
// Create request
var req *http.Request
if req, err = http.NewRequest(o.Method, o.URL, bi); err != nil {
err = fmt.Errorf("astikit: creating request failed: %w", err)
return
}
// Add headers
for k, v := range o.Headers {
req.Header.Set(k, v)
}
// Send request
var resp *http.Response
if resp, err = s.Send(req); err != nil {
err = fmt.Errorf("astikit: sending request failed: %w", err)
return
}
defer resp.Body.Close()
// Process status code
if code := resp.StatusCode; code < 200 || code > 299 {
// Try unmarshaling error
if o.BodyError != nil {
if err2 := json.NewDecoder(resp.Body).Decode(o.BodyError); err2 == nil {
err = ErrHTTPSenderUnmarshaledError
return
}
}
// Default error
err = fmt.Errorf("astikit: invalid status code %d", code)
return
}
// Unmarshal body out
if o.BodyOut != nil {
if err = json.NewDecoder(resp.Body).Decode(o.BodyOut); err != nil {
err = fmt.Errorf("astikit: unmarshaling failed: %w", err)
return
}
}
return
}
// HTTPResponseFunc is a func that can process an $http.Response
type HTTPResponseFunc func(resp *http.Response) error
func defaultHTTPResponseFunc(resp *http.Response) (err error) {
if resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusMultipleChoices {
err = fmt.Errorf("astikit: invalid status code %d", resp.StatusCode)
return
}
return
}
// HTTPDownloader represents an object capable of downloading several HTTP srcs simultaneously
// and doing stuff to the results
type HTTPDownloader struct {
bp *BufferPool
l *GoroutineLimiter
responseFunc HTTPResponseFunc
s *HTTPSender
}
// HTTPDownloaderOptions represents HTTPDownloader options
type HTTPDownloaderOptions struct {
Limiter GoroutineLimiterOptions
ResponseFunc HTTPResponseFunc
Sender HTTPSenderOptions
}
// NewHTTPDownloader creates a new HTTPDownloader
func NewHTTPDownloader(o HTTPDownloaderOptions) (d *HTTPDownloader) {
d = &HTTPDownloader{
bp: NewBufferPool(),
l: NewGoroutineLimiter(o.Limiter),
responseFunc: o.ResponseFunc,
s: NewHTTPSender(o.Sender),
}
if d.responseFunc == nil {
d.responseFunc = defaultHTTPResponseFunc
}
return
}
// Close closes the downloader properly
func (d *HTTPDownloader) Close() error {
return d.l.Close()
}
type HTTPDownloaderSrc struct {
Body io.Reader
Header http.Header
Method string
URL string
}
// It is the responsibility of the caller to call i.Close()
type httpDownloaderFunc func(ctx context.Context, idx int, i *BufferPoolItem) error
func (d *HTTPDownloader) do(ctx context.Context, fn httpDownloaderFunc, idx int, src HTTPDownloaderSrc) (err error) {
// Defaults
if src.Method == "" {
src.Method = http.MethodGet
}
// Create request
var r *http.Request
if r, err = http.NewRequestWithContext(ctx, src.Method, src.URL, src.Body); err != nil {
err = fmt.Errorf("astikit: creating request to %s failed: %w", src.URL, err)
return
}
// Copy header
for k := range src.Header {
r.Header.Set(k, src.Header.Get(k))
}
// Send request
var resp *http.Response
if resp, err = d.s.Send(r); err != nil {
err = fmt.Errorf("astikit: sending request to %s failed: %w", src.URL, err)
return
}
defer resp.Body.Close()
// Create buffer pool item
buf := d.bp.New()
// Process response
if err = d.responseFunc(resp); err != nil {
err = fmt.Errorf("astikit: response for request to %s is invalid: %w", src.URL, err)
return
}
// Copy body
if _, err = Copy(ctx, buf, resp.Body); err != nil {
err = fmt.Errorf("astikit: copying body of %s failed: %w", src.URL, err)
return
}
// Custom
if err = fn(ctx, idx, buf); err != nil {
err = fmt.Errorf("astikit: custom callback on %s failed: %w", src.URL, err)
return
}
return
}
func (d *HTTPDownloader) download(ctx context.Context, srcs []HTTPDownloaderSrc, fn httpDownloaderFunc) (err error) {
// Nothing to download
if len(srcs) == 0 {
return nil
}
// Loop through srcs
wg := &sync.WaitGroup{}
wg.Add(len(srcs))
for idx, src := range srcs {
func(idx int, src HTTPDownloaderSrc) {
// Update error with ctx
if ctx.Err() != nil {
err = ctx.Err()
}
// Do nothing if error
if err != nil {
wg.Done()
return
}
// Do
d.l.Do(func() {
// Task is done
defer wg.Done()
// Do
if errD := d.do(ctx, fn, idx, src); errD != nil && err == nil {
err = errD
return
}
})
}(idx, src)
}
// Wait
wg.Wait()
return
}
// DownloadInDirectory downloads in parallel a set of srcs and saves them in a dst directory
func (d *HTTPDownloader) DownloadInDirectory(ctx context.Context, dst string, srcs ...HTTPDownloaderSrc) error {
return d.download(ctx, srcs, func(ctx context.Context, idx int, buf *BufferPoolItem) (err error) {
// Make sure to close buffer
defer buf.Close()
// Make sure destination directory exists
if err = os.MkdirAll(dst, DefaultDirMode); err != nil {
err = fmt.Errorf("astikit: mkdirall %s failed: %w", dst, err)
return
}
// Create destination file
var f *os.File
dst := filepath.Join(dst, filepath.Base(srcs[idx].URL))
if f, err = os.Create(dst); err != nil {
err = fmt.Errorf("astikit: creating %s failed: %w", dst, err)
return
}
defer f.Close()
// Copy buffer
if _, err = Copy(ctx, f, buf); err != nil {
err = fmt.Errorf("astikit: copying content to %s failed: %w", dst, err)
return
}
return
})
}
// DownloadInWriter downloads in parallel a set of srcs and concatenates them in a writer while
// maintaining the initial order
func (d *HTTPDownloader) DownloadInWriter(ctx context.Context, dst io.Writer, srcs ...HTTPDownloaderSrc) error {
// Init
type chunk struct {
buf *BufferPoolItem
idx int
}
var cs []chunk
var m sync.Mutex // Locks cs
var requiredIdx int
// Make sure to close all buffers
defer func() {
for _, c := range cs {
c.buf.Close()
}
}()
// Download
return d.download(ctx, srcs, func(ctx context.Context, idx int, buf *BufferPoolItem) (err error) {
// Lock
m.Lock()
defer m.Unlock()
// Check where to insert chunk
var idxInsert = -1
for idxChunk := 0; idxChunk < len(cs); idxChunk++ {
if idx < cs[idxChunk].idx {
idxInsert = idxChunk
break
}
}
// Create chunk
c := chunk{
buf: buf,
idx: idx,
}
// Add chunk
if idxInsert > -1 {
cs = append(cs[:idxInsert], append([]chunk{c}, cs[idxInsert:]...)...)
} else {
cs = append(cs, c)
}
// Loop through chunks
for idxChunk := 0; idxChunk < len(cs); idxChunk++ {
// Get chunk
c := cs[idxChunk]
// The chunk should be copied
if c.idx == requiredIdx {
// Copy chunk content
// Do not check error right away since we still want to close the buffer
// and remove the chunk
_, err = Copy(ctx, dst, c.buf)
// Close buffer
c.buf.Close()
// Remove chunk
requiredIdx++
cs = append(cs[:idxChunk], cs[idxChunk+1:]...)
idxChunk--
// Check error
if err != nil {
err = fmt.Errorf("astikit: copying chunk #%d to dst failed: %w", c.idx, err)
return
}
}
}
return
})
}
// DownloadInFile downloads in parallel a set of srcs and concatenates them in a dst file while
// maintaining the initial order
func (d *HTTPDownloader) DownloadInFile(ctx context.Context, dst string, srcs ...HTTPDownloaderSrc) (err error) {
// Make sure destination directory exists
if err = os.MkdirAll(filepath.Dir(dst), DefaultDirMode); err != nil {
err = fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(dst), err)
return
}
// Create destination file
var f *os.File
if f, err = os.Create(dst); err != nil {
err = fmt.Errorf("astikit: creating %s failed: %w", dst, err)
return
}
defer f.Close()
// Download in writer
return d.DownloadInWriter(ctx, f, srcs...)
}
// HTTPMiddleware represents an HTTP middleware
type HTTPMiddleware func(http.Handler) http.Handler
// ChainHTTPMiddlewares chains HTTP middlewares
func ChainHTTPMiddlewares(h http.Handler, ms ...HTTPMiddleware) http.Handler {
return ChainHTTPMiddlewaresWithPrefix(h, []string{}, ms...)
}
// ChainHTTPMiddlewaresWithPrefix chains HTTP middlewares if one of prefixes is present
func ChainHTTPMiddlewaresWithPrefix(h http.Handler, prefixes []string, ms ...HTTPMiddleware) http.Handler {
for _, m := range ms {
if m == nil {
continue
}
if len(prefixes) == 0 {
h = m(h)
} else {
t := h
h = http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
for _, prefix := range prefixes {
if strings.HasPrefix(r.URL.EscapedPath(), prefix) {
m(t).ServeHTTP(rw, r)
return
}
}
t.ServeHTTP(rw, r)
})
}
}
return h
}
func handleHTTPBasicAuth(username, password string, rw http.ResponseWriter, r *http.Request) bool {
if u, p, ok := r.BasicAuth(); !ok || u != username || p != password {
rw.Header().Set("WWW-Authenticate", "Basic Realm=Please enter your credentials")
rw.WriteHeader(http.StatusUnauthorized)
return true
}
return false
}
// HTTPMiddlewareBasicAuth adds basic HTTP auth to an HTTP handler
func HTTPMiddlewareBasicAuth(username, password string) HTTPMiddleware {
if username == "" && password == "" {
return nil
}
return func(h http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// Handle basic auth
if handleHTTPBasicAuth(username, password, rw, r) {
return
}
// Next handler
h.ServeHTTP(rw, r)
})
}
}
func setHTTPContentType(contentType string, rw http.ResponseWriter) {
rw.Header().Set("Content-Type", contentType)
}
// HTTPMiddlewareContentType adds a content type to an HTTP handler
func HTTPMiddlewareContentType(contentType string) HTTPMiddleware {
return func(h http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// Set content type
setHTTPContentType(contentType, rw)
// Next handler
h.ServeHTTP(rw, r)
})
}
}
func setHTTPHeaders(vs map[string]string, rw http.ResponseWriter) {
for k, v := range vs {
rw.Header().Set(k, v)
}
}
// HTTPMiddlewareHeaders adds headers to an HTTP handler
func HTTPMiddlewareHeaders(vs map[string]string) HTTPMiddleware {
return func(h http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// Set headers
setHTTPHeaders(vs, rw)
// Next handler
h.ServeHTTP(rw, r)
})
}
}
// HTTPMiddlewareCORSHeaders adds CORS headers to an HTTP handler
func HTTPMiddlewareCORSHeaders() HTTPMiddleware {
return HTTPMiddlewareHeaders(map[string]string{
"Access-Control-Allow-Headers": "*",
"Access-Control-Allow-Methods": "*",
"Access-Control-Allow-Origin": "*",
})
}

121
vendor/github.com/asticode/go-astikit/io.go generated vendored Normal file
View file

@ -0,0 +1,121 @@
package astikit
import (
"bytes"
"context"
"io"
)
// Copy is a copy with a context
func Copy(ctx context.Context, dst io.Writer, src io.Reader) (int64, error) {
return io.Copy(dst, NewCtxReader(ctx, src))
}
type nopCloser struct {
io.Writer
}
func (nopCloser) Close() error { return nil }
// NopCloser returns a WriteCloser with a no-op Close method wrapping
// the provided Writer w.
func NopCloser(w io.Writer) io.WriteCloser {
return nopCloser{w}
}
// CtxReader represents a reader with a context
type CtxReader struct {
ctx context.Context
reader io.Reader
}
// NewCtxReader creates a reader with a context
func NewCtxReader(ctx context.Context, r io.Reader) *CtxReader {
return &CtxReader{
ctx: ctx,
reader: r,
}
}
// Read implements the io.Reader interface
func (r *CtxReader) Read(p []byte) (n int, err error) {
// Check context
if err = r.ctx.Err(); err != nil {
return
}
// Read
return r.reader.Read(p)
}
// WriterAdapter represents an object that can adapt a Writer
type WriterAdapter struct {
buffer *bytes.Buffer
o WriterAdapterOptions
}
// WriterAdapterOptions represents WriterAdapter options
type WriterAdapterOptions struct {
Callback func(i []byte)
Split []byte
}
// NewWriterAdapter creates a new WriterAdapter
func NewWriterAdapter(o WriterAdapterOptions) *WriterAdapter {
return &WriterAdapter{
buffer: &bytes.Buffer{},
o: o,
}
}
// Close closes the adapter properly
func (w *WriterAdapter) Close() error {
if w.buffer.Len() > 0 {
w.write(w.buffer.Bytes())
}
return nil
}
// Write implements the io.Writer interface
func (w *WriterAdapter) Write(i []byte) (n int, err error) {
// Update n to avoid broken pipe error
defer func() {
n = len(i)
}()
// Split
if len(w.o.Split) > 0 {
// Split bytes are not present, write in buffer
if !bytes.Contains(i, w.o.Split) {
w.buffer.Write(i)
return
}
// Loop in split items
items := bytes.Split(i, w.o.Split)
for i := 0; i < len(items)-1; i++ {
// If this is the first item, prepend the buffer
if i == 0 {
items[i] = append(w.buffer.Bytes(), items[i]...)
w.buffer.Reset()
}
// Write
w.write(items[i])
}
// Add remaining to buffer
w.buffer.Write(items[len(items)-1])
return
}
// By default, forward the bytes
w.write(i)
return
}
func (w *WriterAdapter) write(i []byte) {
if w.o.Callback != nil {
w.o.Callback(i)
}
}

101
vendor/github.com/asticode/go-astikit/limiter.go generated vendored Normal file
View file

@ -0,0 +1,101 @@
package astikit
import (
"context"
"sync"
"time"
)
// Limiter represents a limiter
type Limiter struct {
buckets map[string]*LimiterBucket
m *sync.Mutex // Locks buckets
}
// NewLimiter creates a new limiter
func NewLimiter() *Limiter {
return &Limiter{
buckets: make(map[string]*LimiterBucket),
m: &sync.Mutex{},
}
}
// Add adds a new bucket
func (l *Limiter) Add(name string, cap int, period time.Duration) *LimiterBucket {
l.m.Lock()
defer l.m.Unlock()
if _, ok := l.buckets[name]; !ok {
l.buckets[name] = newLimiterBucket(cap, period)
}
return l.buckets[name]
}
// Bucket retrieves a bucket from the limiter
func (l *Limiter) Bucket(name string) (b *LimiterBucket, ok bool) {
l.m.Lock()
defer l.m.Unlock()
b, ok = l.buckets[name]
return
}
// Close closes the limiter properly
func (l *Limiter) Close() {
l.m.Lock()
defer l.m.Unlock()
for _, b := range l.buckets {
b.Close()
}
}
// LimiterBucket represents a limiter bucket
type LimiterBucket struct {
cancel context.CancelFunc
cap int
ctx context.Context
count int
period time.Duration
o *sync.Once
}
// newLimiterBucket creates a new bucket
func newLimiterBucket(cap int, period time.Duration) (b *LimiterBucket) {
b = &LimiterBucket{
cap: cap,
count: 0,
period: period,
o: &sync.Once{},
}
b.ctx, b.cancel = context.WithCancel(context.Background())
go b.tick()
return
}
// Inc increments the bucket count
func (b *LimiterBucket) Inc() bool {
if b.count >= b.cap {
return false
}
b.count++
return true
}
// tick runs a ticker to purge the bucket
func (b *LimiterBucket) tick() {
var t = time.NewTicker(b.period)
defer t.Stop()
for {
select {
case <-t.C:
b.count = 0
case <-b.ctx.Done():
return
}
}
}
// close closes the bucket properly
func (b *LimiterBucket) Close() {
b.o.Do(func() {
b.cancel()
})
}

171
vendor/github.com/asticode/go-astikit/logger.go generated vendored Normal file
View file

@ -0,0 +1,171 @@
package astikit
import (
"context"
)
// CompleteLogger represents a complete logger
type CompleteLogger interface {
StdLogger
SeverityLogger
SeverityCtxLogger
}
// StdLogger represents a standard logger
type StdLogger interface {
Fatal(v ...interface{})
Fatalf(format string, v ...interface{})
Print(v ...interface{})
Printf(format string, v ...interface{})
}
// SeverityLogger represents a severity logger
type SeverityLogger interface {
Debug(v ...interface{})
Debugf(format string, v ...interface{})
Error(v ...interface{})
Errorf(format string, v ...interface{})
Info(v ...interface{})
Infof(format string, v ...interface{})
Warn(v ...interface{})
Warnf(format string, v ...interface{})
}
// SeverityCtxLogger represents a severity with context logger
type SeverityCtxLogger interface {
DebugC(ctx context.Context, v ...interface{})
DebugCf(ctx context.Context, format string, v ...interface{})
ErrorC(ctx context.Context, v ...interface{})
ErrorCf(ctx context.Context, format string, v ...interface{})
FatalC(ctx context.Context, v ...interface{})
FatalCf(ctx context.Context, format string, v ...interface{})
InfoC(ctx context.Context, v ...interface{})
InfoCf(ctx context.Context, format string, v ...interface{})
WarnC(ctx context.Context, v ...interface{})
WarnCf(ctx context.Context, format string, v ...interface{})
}
type completeLogger struct {
print, debug, error, fatal, info, warn func(v ...interface{})
printf, debugf, errorf, fatalf, infof, warnf func(format string, v ...interface{})
debugC, errorC, fatalC, infoC, warnC func(ctx context.Context, v ...interface{})
debugCf, errorCf, fatalCf, infoCf, warnCf func(ctx context.Context, format string, v ...interface{})
}
func newCompleteLogger() *completeLogger {
return &completeLogger{
debug: func(v ...interface{}) {},
debugf: func(format string, v ...interface{}) {},
debugC: func(ctx context.Context, v ...interface{}) {},
debugCf: func(ctx context.Context, format string, v ...interface{}) {},
error: func(v ...interface{}) {},
errorf: func(format string, v ...interface{}) {},
errorC: func(ctx context.Context, v ...interface{}) {},
errorCf: func(ctx context.Context, format string, v ...interface{}) {},
fatal: func(v ...interface{}) {},
fatalf: func(format string, v ...interface{}) {},
fatalC: func(ctx context.Context, v ...interface{}) {},
fatalCf: func(ctx context.Context, format string, v ...interface{}) {},
info: func(v ...interface{}) {},
infof: func(format string, v ...interface{}) {},
infoC: func(ctx context.Context, v ...interface{}) {},
infoCf: func(ctx context.Context, format string, v ...interface{}) {},
print: func(v ...interface{}) {},
printf: func(format string, v ...interface{}) {},
warn: func(v ...interface{}) {},
warnf: func(format string, v ...interface{}) {},
warnC: func(ctx context.Context, v ...interface{}) {},
warnCf: func(ctx context.Context, format string, v ...interface{}) {},
}
}
func (l *completeLogger) Debug(v ...interface{}) { l.debug(v...) }
func (l *completeLogger) Debugf(format string, v ...interface{}) { l.debugf(format, v...) }
func (l *completeLogger) DebugC(ctx context.Context, v ...interface{}) { l.debugC(ctx, v...) }
func (l *completeLogger) DebugCf(ctx context.Context, format string, v ...interface{}) {
l.debugCf(ctx, format, v...)
}
func (l *completeLogger) Error(v ...interface{}) { l.error(v...) }
func (l *completeLogger) Errorf(format string, v ...interface{}) { l.errorf(format, v...) }
func (l *completeLogger) ErrorC(ctx context.Context, v ...interface{}) { l.errorC(ctx, v...) }
func (l *completeLogger) ErrorCf(ctx context.Context, format string, v ...interface{}) {
l.errorCf(ctx, format, v...)
}
func (l *completeLogger) Fatal(v ...interface{}) { l.fatal(v...) }
func (l *completeLogger) Fatalf(format string, v ...interface{}) { l.fatalf(format, v...) }
func (l *completeLogger) FatalC(ctx context.Context, v ...interface{}) { l.fatalC(ctx, v...) }
func (l *completeLogger) FatalCf(ctx context.Context, format string, v ...interface{}) {
l.fatalCf(ctx, format, v...)
}
func (l *completeLogger) Info(v ...interface{}) { l.info(v...) }
func (l *completeLogger) Infof(format string, v ...interface{}) { l.infof(format, v...) }
func (l *completeLogger) InfoC(ctx context.Context, v ...interface{}) { l.infoC(ctx, v...) }
func (l *completeLogger) InfoCf(ctx context.Context, format string, v ...interface{}) {
l.infoCf(ctx, format, v...)
}
func (l *completeLogger) Print(v ...interface{}) { l.print(v...) }
func (l *completeLogger) Printf(format string, v ...interface{}) { l.printf(format, v...) }
func (l *completeLogger) Warn(v ...interface{}) { l.warn(v...) }
func (l *completeLogger) Warnf(format string, v ...interface{}) { l.warnf(format, v...) }
func (l *completeLogger) WarnC(ctx context.Context, v ...interface{}) { l.warnC(ctx, v...) }
func (l *completeLogger) WarnCf(ctx context.Context, format string, v ...interface{}) {
l.warnCf(ctx, format, v...)
}
// AdaptStdLogger transforms an StdLogger into a CompleteLogger if needed
func AdaptStdLogger(i StdLogger) CompleteLogger {
if v, ok := i.(CompleteLogger); ok {
return v
}
l := newCompleteLogger()
if i == nil {
return l
}
l.fatal = i.Fatal
l.fatalf = i.Fatalf
l.print = i.Print
l.printf = i.Printf
if v, ok := i.(SeverityLogger); ok {
l.debug = v.Debug
l.debugf = v.Debugf
l.error = v.Error
l.errorf = v.Errorf
l.info = v.Info
l.infof = v.Infof
l.warn = v.Warn
l.warnf = v.Warnf
} else {
l.debug = l.print
l.debugf = l.printf
l.error = l.print
l.errorf = l.printf
l.info = l.print
l.infof = l.printf
l.warn = l.print
l.warnf = l.printf
}
if v, ok := i.(SeverityCtxLogger); ok {
l.debugC = v.DebugC
l.debugCf = v.DebugCf
l.errorC = v.ErrorC
l.errorCf = v.ErrorCf
l.fatalC = v.FatalC
l.fatalCf = v.FatalCf
l.infoC = v.InfoC
l.infoCf = v.InfoCf
l.warnC = v.WarnC
l.warnCf = v.WarnCf
} else {
l.debugC = func(ctx context.Context, v ...interface{}) { l.debug(v...) }
l.debugCf = func(ctx context.Context, format string, v ...interface{}) { l.debugf(format, v...) }
l.errorC = func(ctx context.Context, v ...interface{}) { l.error(v...) }
l.errorCf = func(ctx context.Context, format string, v ...interface{}) { l.errorf(format, v...) }
l.fatalC = func(ctx context.Context, v ...interface{}) { l.fatal(v...) }
l.fatalCf = func(ctx context.Context, format string, v ...interface{}) { l.fatalf(format, v...) }
l.infoC = func(ctx context.Context, v ...interface{}) { l.info(v...) }
l.infoCf = func(ctx context.Context, format string, v ...interface{}) { l.infof(format, v...) }
l.warnC = func(ctx context.Context, v ...interface{}) { l.warn(v...) }
l.warnCf = func(ctx context.Context, format string, v ...interface{}) { l.warnf(format, v...) }
}
return l
}

67
vendor/github.com/asticode/go-astikit/map.go generated vendored Normal file
View file

@ -0,0 +1,67 @@
package astikit
import (
"fmt"
"sync"
)
// BiMap represents a bidirectional map
type BiMap struct {
forward map[interface{}]interface{}
inverse map[interface{}]interface{}
m *sync.Mutex
}
// NewBiMap creates a new BiMap
func NewBiMap() *BiMap {
return &BiMap{
forward: make(map[interface{}]interface{}),
inverse: make(map[interface{}]interface{}),
m: &sync.Mutex{},
}
}
func (m *BiMap) get(k interface{}, i map[interface{}]interface{}) (v interface{}, ok bool) {
m.m.Lock()
defer m.m.Unlock()
v, ok = i[k]
return
}
// Get gets the value in the forward map based on the provided key
func (m *BiMap) Get(k interface{}) (interface{}, bool) { return m.get(k, m.forward) }
// GetInverse gets the value in the inverse map based on the provided key
func (m *BiMap) GetInverse(k interface{}) (interface{}, bool) { return m.get(k, m.inverse) }
// MustGet gets the value in the forward map based on the provided key and panics if key is not found
func (m *BiMap) MustGet(k interface{}) interface{} {
v, ok := m.get(k, m.forward)
if !ok {
panic(fmt.Sprintf("astikit: key %+v not found in foward map", k))
}
return v
}
// MustGetInverse gets the value in the inverse map based on the provided key and panics if key is not found
func (m *BiMap) MustGetInverse(k interface{}) interface{} {
v, ok := m.get(k, m.inverse)
if !ok {
panic(fmt.Sprintf("astikit: key %+v not found in inverse map", k))
}
return v
}
func (m *BiMap) set(k, v interface{}, f, i map[interface{}]interface{}) *BiMap {
m.m.Lock()
defer m.m.Unlock()
f[k] = v
i[v] = k
return m
}
// Set sets the value in the forward and inverse map for the provided forward key
func (m *BiMap) Set(k, v interface{}) *BiMap { return m.set(k, v, m.forward, m.inverse) }
// SetInverse sets the value in the forward and inverse map for the provided inverse key
func (m *BiMap) SetInverse(k, v interface{}) *BiMap { return m.set(k, v, m.inverse, m.forward) }

148
vendor/github.com/asticode/go-astikit/os.go generated vendored Normal file
View file

@ -0,0 +1,148 @@
package astikit
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
)
// MoveFile is a cancellable move of a local file to a local or remote location
func MoveFile(ctx context.Context, dst, src string, f CopyFileFunc) (err error) {
// Copy
if err = CopyFile(ctx, dst, src, f); err != nil {
err = fmt.Errorf("astikit: copying file %s to %s failed: %w", src, dst, err)
return
}
// Delete
if err = os.Remove(src); err != nil {
err = fmt.Errorf("astikit: removing %s failed: %w", src, err)
return
}
return
}
// CopyFileFunc represents a CopyFile func
type CopyFileFunc func(ctx context.Context, dst string, srcStat os.FileInfo, srcFile *os.File) error
// CopyFile is a cancellable copy of a local file to a local or remote location
func CopyFile(ctx context.Context, dst, src string, f CopyFileFunc) (err error) {
// Check context
if err = ctx.Err(); err != nil {
return
}
// Stat src
var srcStat os.FileInfo
if srcStat, err = os.Stat(src); err != nil {
err = fmt.Errorf("astikit: stating %s failed: %w", src, err)
return
}
// Src is a dir
if srcStat.IsDir() {
// Walk through the dir
if err = filepath.Walk(src, func(path string, info os.FileInfo, errWalk error) (err error) {
// Check error
if errWalk != nil {
err = errWalk
return
}
// Do not process root
if src == path {
return
}
// Copy
p := filepath.Join(dst, strings.TrimPrefix(path, filepath.Clean(src)))
if err = CopyFile(ctx, p, path, f); err != nil {
err = fmt.Errorf("astikit: copying %s to %s failed: %w", path, p, err)
return
}
return nil
}); err != nil {
err = fmt.Errorf("astikit: walking through %s failed: %w", src, err)
return
}
return
}
// Open src
var srcFile *os.File
if srcFile, err = os.Open(src); err != nil {
err = fmt.Errorf("astikit: opening %s failed: %w", src, err)
return
}
defer srcFile.Close()
// Custom
if err = f(ctx, dst, srcStat, srcFile); err != nil {
err = fmt.Errorf("astikit: custom failed: %w", err)
return
}
return
}
// LocalCopyFileFunc is the local CopyFileFunc that allows doing cross partition copies
func LocalCopyFileFunc(ctx context.Context, dst string, srcStat os.FileInfo, srcFile *os.File) (err error) {
// Check context
if err = ctx.Err(); err != nil {
return
}
// Create the destination folder
if err = os.MkdirAll(filepath.Dir(dst), DefaultDirMode); err != nil {
err = fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(dst), err)
return
}
// Create the destination file
var dstFile *os.File
if dstFile, err = os.Create(dst); err != nil {
err = fmt.Errorf("astikit: creating %s failed: %w", dst, err)
return
}
defer dstFile.Close()
// Chmod using os.chmod instead of file.Chmod
if err = os.Chmod(dst, srcStat.Mode()); err != nil {
err = fmt.Errorf("astikit: chmod %s %s failed, %w", dst, srcStat.Mode(), err)
return
}
// Copy the content
if _, err = Copy(ctx, dstFile, srcFile); err != nil {
err = fmt.Errorf("astikit: copying content of %s to %s failed: %w", srcFile.Name(), dstFile.Name(), err)
return
}
return
}
// SignalHandler represents a func that can handle a signal
type SignalHandler func(s os.Signal)
// TermSignalHandler returns a SignalHandler that is executed only on a term signal
func TermSignalHandler(f func()) SignalHandler {
return func(s os.Signal) {
if isTermSignal(s) {
f()
}
}
}
// LoggerSignalHandler returns a SignalHandler that logs the signal
func LoggerSignalHandler(l SeverityLogger, ignoredSignals ...os.Signal) SignalHandler {
ss := make(map[os.Signal]bool)
for _, s := range ignoredSignals {
ss[s] = true
}
return func(s os.Signal) {
if _, ok := ss[s]; ok {
return
}
l.Debugf("astikit: received signal %s", s)
}
}

12
vendor/github.com/asticode/go-astikit/os_js.go generated vendored Normal file
View file

@ -0,0 +1,12 @@
// +build js,wasm
package astikit
import (
"os"
"syscall"
)
func isTermSignal(s os.Signal) bool {
return s == syscall.SIGKILL || s == syscall.SIGINT || s == syscall.SIGQUIT || s == syscall.SIGTERM
}

12
vendor/github.com/asticode/go-astikit/os_others.go generated vendored Normal file
View file

@ -0,0 +1,12 @@
// +build !js !wasm
package astikit
import (
"os"
"syscall"
)
func isTermSignal(s os.Signal) bool {
return s == syscall.SIGABRT || s == syscall.SIGKILL || s == syscall.SIGINT || s == syscall.SIGQUIT || s == syscall.SIGTERM
}

426
vendor/github.com/asticode/go-astikit/pcm.go generated vendored Normal file
View file

@ -0,0 +1,426 @@
package astikit
import (
"fmt"
"math"
"sync"
"time"
)
// PCMLevel computes the PCM level of samples
// https://dsp.stackexchange.com/questions/2951/loudness-of-pcm-stream
// https://dsp.stackexchange.com/questions/290/getting-loudness-of-a-track-with-rms?noredirect=1&lq=1
func PCMLevel(samples []int) float64 {
// Compute sum of square values
var sum float64
for _, s := range samples {
sum += math.Pow(float64(s), 2)
}
// Square root
return math.Sqrt(sum / float64(len(samples)))
}
func maxPCMSample(bitDepth int) int {
return int(math.Pow(2, float64(bitDepth))/2.0) - 1
}
// PCMNormalize normalizes the PCM samples
func PCMNormalize(samples []int, bitDepth int) (o []int) {
// Get max sample
var m int
for _, s := range samples {
if v := int(math.Abs(float64(s))); v > m {
m = v
}
}
// Get max for bit depth
max := maxPCMSample(bitDepth)
// Loop through samples
for _, s := range samples {
o = append(o, s*max/m)
}
return
}
// ConvertPCMBitDepth converts the PCM bit depth
func ConvertPCMBitDepth(srcSample int, srcBitDepth, dstBitDepth int) (dstSample int, err error) {
// Nothing to do
if srcBitDepth == dstBitDepth {
dstSample = srcSample
return
}
// Convert
if srcBitDepth < dstBitDepth {
dstSample = srcSample << uint(dstBitDepth-srcBitDepth)
} else {
dstSample = srcSample >> uint(srcBitDepth-dstBitDepth)
}
return
}
// PCMSampleFunc is a func that can process a sample
type PCMSampleFunc func(s int) error
// PCMSampleRateConverter is an object capable of converting a PCM's sample rate
type PCMSampleRateConverter struct {
b [][]int
dstSampleRate int
fn PCMSampleFunc
numChannels int
numChannelsProcessed int
numSamplesOutputed int
numSamplesProcessed int
srcSampleRate int
}
// NewPCMSampleRateConverter creates a new PCMSampleRateConverter
func NewPCMSampleRateConverter(srcSampleRate, dstSampleRate, numChannels int, fn PCMSampleFunc) *PCMSampleRateConverter {
return &PCMSampleRateConverter{
b: make([][]int, numChannels),
dstSampleRate: dstSampleRate,
fn: fn,
numChannels: numChannels,
srcSampleRate: srcSampleRate,
}
}
// Reset resets the converter
func (c *PCMSampleRateConverter) Reset() {
c.b = make([][]int, c.numChannels)
c.numChannelsProcessed = 0
c.numSamplesOutputed = 0
c.numSamplesProcessed = 0
}
// Add adds a new sample to the converter
func (c *PCMSampleRateConverter) Add(i int) (err error) {
// Forward sample
if c.srcSampleRate == c.dstSampleRate {
if err = c.fn(i); err != nil {
err = fmt.Errorf("astikit: handling sample failed: %w", err)
return
}
return
}
// Increment num channels processed
c.numChannelsProcessed++
// Reset num channels processed
if c.numChannelsProcessed > c.numChannels {
c.numChannelsProcessed = 1
}
// Only increment num samples processed if all channels have been processed
if c.numChannelsProcessed == c.numChannels {
c.numSamplesProcessed++
}
// Append sample to buffer
c.b[c.numChannelsProcessed-1] = append(c.b[c.numChannelsProcessed-1], i)
// Throw away data
if c.srcSampleRate > c.dstSampleRate {
// Make sure to always keep the first sample but do nothing until we have all channels or target sample has been
// reached
if (c.numSamplesOutputed > 0 && float64(c.numSamplesProcessed) < 1.0+float64(c.numSamplesOutputed)*float64(c.srcSampleRate)/float64(c.dstSampleRate)) || c.numChannelsProcessed < c.numChannels {
return
}
// Loop through channels
for idx, b := range c.b {
// Merge samples
var s int
for _, v := range b {
s += v
}
s /= len(b)
// Reset buffer
c.b[idx] = []int{}
// Custom
if err = c.fn(s); err != nil {
err = fmt.Errorf("astikit: handling sample failed: %w", err)
return
}
}
// Increment num samples outputted
c.numSamplesOutputed++
return
}
// Do nothing until we have all channels
if c.numChannelsProcessed < c.numChannels {
return
}
// Repeat data
for c.numSamplesOutputed == 0 || float64(c.numSamplesProcessed)+1.0 > 1.0+float64(c.numSamplesOutputed)*float64(c.srcSampleRate)/float64(c.dstSampleRate) {
// Loop through channels
for _, b := range c.b {
// Invalid length
if len(b) != 1 {
err = fmt.Errorf("astikit: invalid buffer item length %d", len(b))
return
}
// Custom
if err = c.fn(b[0]); err != nil {
err = fmt.Errorf("astikit: handling sample failed: %w", err)
return
}
}
// Increment num samples outputted
c.numSamplesOutputed++
}
// Reset buffer
c.b = make([][]int, c.numChannels)
return
}
// PCMChannelsConverter is an object of converting PCM's channels
type PCMChannelsConverter struct {
dstNumChannels int
fn PCMSampleFunc
srcNumChannels int
srcSamples int
}
// NewPCMChannelsConverter creates a new PCMChannelsConverter
func NewPCMChannelsConverter(srcNumChannels, dstNumChannels int, fn PCMSampleFunc) *PCMChannelsConverter {
return &PCMChannelsConverter{
dstNumChannels: dstNumChannels,
fn: fn,
srcNumChannels: srcNumChannels,
}
}
// Reset resets the converter
func (c *PCMChannelsConverter) Reset() {
c.srcSamples = 0
}
// Add adds a new sample to the converter
func (c *PCMChannelsConverter) Add(i int) (err error) {
// Forward sample
if c.srcNumChannels == c.dstNumChannels {
if err = c.fn(i); err != nil {
err = fmt.Errorf("astikit: handling sample failed: %w", err)
return
}
return
}
// Reset
if c.srcSamples == c.srcNumChannels {
c.srcSamples = 0
}
// Increment src samples
c.srcSamples++
// Throw away data
if c.srcNumChannels > c.dstNumChannels {
// Throw away sample
if c.srcSamples > c.dstNumChannels {
return
}
// Custom
if err = c.fn(i); err != nil {
err = fmt.Errorf("astikit: handling sample failed: %w", err)
return
}
return
}
// Store
var ss []int
if c.srcSamples < c.srcNumChannels {
ss = []int{i}
} else {
// Repeat data
for idx := c.srcNumChannels; idx <= c.dstNumChannels; idx++ {
ss = append(ss, i)
}
}
// Loop through samples
for _, s := range ss {
// Custom
if err = c.fn(s); err != nil {
err = fmt.Errorf("astikit: handling sample failed: %w", err)
return
}
}
return
}
// PCMSilenceDetector represents a PCM silence detector
type PCMSilenceDetector struct {
analyses []pcmSilenceDetectorAnalysis
buf []int
m *sync.Mutex // Locks buf
minAnalysesPerSilence int
o PCMSilenceDetectorOptions
samplesPerAnalysis int
}
type pcmSilenceDetectorAnalysis struct {
level float64
samples []int
}
// PCMSilenceDetectorOptions represents a PCM silence detector options
type PCMSilenceDetectorOptions struct {
MaxSilenceLevel float64 `toml:"max_silence_level"`
MinSilenceDuration time.Duration `toml:"min_silence_duration"`
SampleRate int `toml:"sample_rate"`
StepDuration time.Duration `toml:"step_duration"`
}
// NewPCMSilenceDetector creates a new silence detector
func NewPCMSilenceDetector(o PCMSilenceDetectorOptions) (d *PCMSilenceDetector) {
// Create
d = &PCMSilenceDetector{
m: &sync.Mutex{},
o: o,
}
// Reset
d.Reset()
// Default option values
if d.o.MinSilenceDuration == 0 {
d.o.MinSilenceDuration = time.Second
}
if d.o.StepDuration == 0 {
d.o.StepDuration = 30 * time.Millisecond
}
// Compute attributes depending on options
d.samplesPerAnalysis = int(math.Floor(float64(d.o.SampleRate) * d.o.StepDuration.Seconds()))
d.minAnalysesPerSilence = int(math.Floor(d.o.MinSilenceDuration.Seconds() / d.o.StepDuration.Seconds()))
return
}
// Reset resets the silence detector
func (d *PCMSilenceDetector) Reset() {
// Lock
d.m.Lock()
defer d.m.Unlock()
// Reset
d.analyses = []pcmSilenceDetectorAnalysis{}
d.buf = []int{}
}
// Add adds samples to the buffer and checks whether there are valid samples between silences
func (d *PCMSilenceDetector) Add(samples []int) (validSamples [][]int) {
// Lock
d.m.Lock()
defer d.m.Unlock()
// Append samples to buffer
d.buf = append(d.buf, samples...)
// Analyze samples by step
for len(d.buf) >= d.samplesPerAnalysis {
// Append analysis
d.analyses = append(d.analyses, pcmSilenceDetectorAnalysis{
level: PCMLevel(d.buf[:d.samplesPerAnalysis]),
samples: append([]int(nil), d.buf[:d.samplesPerAnalysis]...),
})
// Remove samples from buffer
d.buf = d.buf[d.samplesPerAnalysis:]
}
// Loop through analyses
var leadingSilence, inBetween, trailingSilence int
for i := 0; i < len(d.analyses); i++ {
if d.analyses[i].level < d.o.MaxSilenceLevel {
// This is a silence
// This is a leading silence
if inBetween == 0 {
leadingSilence++
// The leading silence is valid
// We can trim its useless part
if leadingSilence > d.minAnalysesPerSilence {
d.analyses = d.analyses[leadingSilence-d.minAnalysesPerSilence:]
i -= leadingSilence - d.minAnalysesPerSilence
leadingSilence = d.minAnalysesPerSilence
}
continue
}
// This is a trailing silence
trailingSilence++
// Trailing silence is invalid
if trailingSilence < d.minAnalysesPerSilence {
continue
}
// Trailing silence is valid
// Loop through analyses
var ss []int
for _, a := range d.analyses[:i+1] {
ss = append(ss, a.samples...)
}
// Append valid samples
validSamples = append(validSamples, ss)
// Remove leading silence and non silence
d.analyses = d.analyses[leadingSilence+inBetween:]
i -= leadingSilence + inBetween
// Reset counts
leadingSilence, inBetween, trailingSilence = trailingSilence, 0, 0
} else {
// This is not a silence
// This is a leading non silence
// We need to remove it
if i == 0 {
d.analyses = d.analyses[1:]
i = -1
continue
}
// This is the first in-between
if inBetween == 0 {
// The leading silence is invalid
// We need to remove it as well as this first non silence
if leadingSilence < d.minAnalysesPerSilence {
d.analyses = d.analyses[i+1:]
i = -1
continue
}
}
// This non-silence was preceded by a silence not big enough to be a valid trailing silence
// We incorporate it in the in-between
if trailingSilence > 0 {
inBetween += trailingSilence
trailingSilence = 0
}
// This is an in-between
inBetween++
continue
}
}
return
}

58
vendor/github.com/asticode/go-astikit/ptr.go generated vendored Normal file
View file

@ -0,0 +1,58 @@
package astikit
import "time"
// BoolPtr transforms a bool into a *bool
func BoolPtr(i bool) *bool {
return &i
}
// BytePtr transforms a byte into a *byte
func BytePtr(i byte) *byte {
return &i
}
// DurationPtr transforms a time.Duration into a *time.Duration
func DurationPtr(i time.Duration) *time.Duration {
return &i
}
// Float64Ptr transforms a float64 into a *float64
func Float64Ptr(i float64) *float64 {
return &i
}
// IntPtr transforms an int into an *int
func IntPtr(i int) *int {
return &i
}
// Int64Ptr transforms an int64 into an *int64
func Int64Ptr(i int64) *int64 {
return &i
}
// StrSlicePtr transforms a []string into a *[]string
func StrSlicePtr(i []string) *[]string {
return &i
}
// StrPtr transforms a string into a *string
func StrPtr(i string) *string {
return &i
}
// TimePtr transforms a time.Time into a *time.Time
func TimePtr(i time.Time) *time.Time {
return &i
}
// UInt8Ptr transforms a uint8 into a *uint8
func UInt8Ptr(i uint8) *uint8 {
return &i
}
// UInt32Ptr transforms a uint32 into a *uint32
func UInt32Ptr(i uint32) *uint32 {
return &i
}

36
vendor/github.com/asticode/go-astikit/rand.go generated vendored Normal file
View file

@ -0,0 +1,36 @@
package astikit
import (
"math/rand"
"strings"
"time"
)
const (
randLetterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
randLetterIdxBits = 6 // 6 bits to represent a letter index
randLetterIdxMask = 1<<randLetterIdxBits - 1 // All 1-bits, as many as letterIdxBits
randLetterIdxMax = 63 / randLetterIdxBits // # of letter indices fitting in 63 bits
)
var randSrc = rand.NewSource(time.Now().UnixNano())
// RandStr generates a random string of length n
// https://stackoverflow.com/questions/22892120/how-to-generate-a-random-string-of-a-fixed-length-in-golang
func RandStr(n int) string {
sb := strings.Builder{}
sb.Grow(n)
// A randSrc.Int63() generates 63 random bits, enough for randLetterIdxMax characters!
for i, cache, remain := n-1, randSrc.Int63(), randLetterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = randSrc.Int63(), randLetterIdxMax
}
if idx := int(cache & randLetterIdxMask); idx < len(randLetterBytes) {
sb.WriteByte(randLetterBytes[idx])
i--
}
cache >>= randLetterIdxBits
remain--
}
return sb.String()
}

13
vendor/github.com/asticode/go-astikit/sort.go generated vendored Normal file
View file

@ -0,0 +1,13 @@
package astikit
import "sort"
// SortInt64 sorts a slice of int64s in increasing order.
func SortInt64(a []int64) { sort.Sort(SortInt64Slice(a)) }
// SortInt64Slice attaches the methods of Interface to []int64, sorting in increasing order.
type SortInt64Slice []int64
func (p SortInt64Slice) Len() int { return len(p) }
func (p SortInt64Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p SortInt64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }

113
vendor/github.com/asticode/go-astikit/ssh.go generated vendored Normal file
View file

@ -0,0 +1,113 @@
package astikit
import (
"context"
"fmt"
"io"
"os"
"path/filepath"
)
// SSHSession represents an SSH Session
type SSHSession interface {
Run(string) error
Start(string) error
StdinPipe() (io.WriteCloser, error)
Wait() error
}
// SSHSessionFunc represents a func that can return an SSHSession
type SSHSessionFunc func() (s SSHSession, c *Closer, err error)
// SSHCopyFileFunc is the SSH CopyFileFunc that allows doing SSH copies
func SSHCopyFileFunc(fn SSHSessionFunc) CopyFileFunc {
return func(ctx context.Context, dst string, srcStat os.FileInfo, srcFile *os.File) (err error) {
// Check context
if err = ctx.Err(); err != nil {
return
}
// Using local closure allows better readibility for the defer c.Close() since it
// isolates the use of the ssh session
if err = func() (err error) {
// Create ssh session
var s SSHSession
var c *Closer
if s, c, err = fn(); err != nil {
err = fmt.Errorf("astikit: creating ssh session failed: %w", err)
return
}
defer c.Close()
// Create the destination folder
if err = s.Run("mkdir -p " + filepath.Dir(dst)); err != nil {
err = fmt.Errorf("astikit: creating %s failed: %w", filepath.Dir(dst), err)
return
}
return
}(); err != nil {
return
}
// Using local closure allows better readibility for the defer c.Close() since it
// isolates the use of the ssh session
if err = func() (err error) {
// Create ssh session
var s SSHSession
var c *Closer
if s, c, err = fn(); err != nil {
err = fmt.Errorf("astikit: creating ssh session failed: %w", err)
return
}
defer c.Close()
// Create stdin pipe
var stdin io.WriteCloser
if stdin, err = s.StdinPipe(); err != nil {
err = fmt.Errorf("astikit: creating stdin pipe failed: %w", err)
return
}
defer stdin.Close()
// Use "scp" command
if err = s.Start("scp -qt \"" + filepath.Dir(dst) + "\""); err != nil {
err = fmt.Errorf("astikit: scp to %s failed: %w", dst, err)
return
}
// Send metadata
if _, err = fmt.Fprintln(stdin, fmt.Sprintf("C%04o", srcStat.Mode().Perm()), srcStat.Size(), filepath.Base(dst)); err != nil {
err = fmt.Errorf("astikit: sending metadata failed: %w", err)
return
}
// Copy
if _, err = Copy(ctx, stdin, srcFile); err != nil {
err = fmt.Errorf("astikit: copying failed: %w", err)
return
}
// Send close
if _, err = fmt.Fprint(stdin, "\x00"); err != nil {
err = fmt.Errorf("astikit: sending close failed: %w", err)
return
}
// Close stdin
if err = stdin.Close(); err != nil {
err = fmt.Errorf("astikit: closing failed: %w", err)
return
}
// Wait
if err = s.Wait(); err != nil {
err = fmt.Errorf("astikit: waiting failed: %w", err)
return
}
return
}(); err != nil {
return
}
return
}
}

301
vendor/github.com/asticode/go-astikit/stat.go generated vendored Normal file
View file

@ -0,0 +1,301 @@
package astikit
import (
"context"
"sync"
"sync/atomic"
"time"
)
// Stater is an object that can compute and handle stats
type Stater struct {
cancel context.CancelFunc
ctx context.Context
h StatsHandleFunc
m *sync.Mutex // Locks ss
period time.Duration
running uint32
ss map[*StatMetadata]StatOptions
}
// StatOptions represents stat options
type StatOptions struct {
Handler StatHandler
Metadata *StatMetadata
}
// StatsHandleFunc is a method that can handle stat values
type StatsHandleFunc func(stats []StatValue)
// StatMetadata represents a stat metadata
type StatMetadata struct {
Description string
Label string
Name string
Unit string
}
// StatHandler represents a stat handler
type StatHandler interface {
Start()
Stop()
Value(delta time.Duration) interface{}
}
// StatValue represents a stat value
type StatValue struct {
*StatMetadata
Value interface{}
}
// StaterOptions represents stater options
type StaterOptions struct {
HandleFunc StatsHandleFunc
Period time.Duration
}
// NewStater creates a new stater
func NewStater(o StaterOptions) *Stater {
return &Stater{
h: o.HandleFunc,
m: &sync.Mutex{},
period: o.Period,
ss: make(map[*StatMetadata]StatOptions),
}
}
// Start starts the stater
func (s *Stater) Start(ctx context.Context) {
// Check context
if ctx.Err() != nil {
return
}
// Make sure to start only once
if atomic.CompareAndSwapUint32(&s.running, 0, 1) {
// Update status
defer atomic.StoreUint32(&s.running, 0)
// Reset context
s.ctx, s.cancel = context.WithCancel(ctx)
// Create ticker
t := time.NewTicker(s.period)
defer t.Stop()
// Loop
lastStatAt := now()
for {
select {
case <-t.C:
// Get delta
n := now()
delta := n.Sub(lastStatAt)
lastStatAt = n
// Loop through stats
var stats []StatValue
s.m.Lock()
for _, v := range s.ss {
stats = append(stats, StatValue{
StatMetadata: v.Metadata,
Value: v.Handler.Value(delta),
})
}
s.m.Unlock()
// Handle stats
go s.h(stats)
case <-s.ctx.Done():
return
}
}
}
}
// Stop stops the stater
func (s *Stater) Stop() {
if s.cancel != nil {
s.cancel()
}
}
// AddStats adds stats
func (s *Stater) AddStats(os ...StatOptions) {
s.m.Lock()
defer s.m.Unlock()
for _, o := range os {
s.ss[o.Metadata] = o
}
}
// DelStats deletes stats
func (s *Stater) DelStats(os ...StatOptions) {
s.m.Lock()
defer s.m.Unlock()
for _, o := range os {
delete(s.ss, o.Metadata)
}
}
type durationStat struct {
d time.Duration
fn func(d, delta time.Duration) interface{}
isStarted bool
m *sync.Mutex // Locks isStarted
startedAt time.Time
}
func newDurationStat(fn func(d, delta time.Duration) interface{}) *durationStat {
return &durationStat{
fn: fn,
m: &sync.Mutex{},
}
}
func (s *durationStat) Begin() {
s.m.Lock()
defer s.m.Unlock()
if !s.isStarted {
return
}
s.startedAt = now()
}
func (s *durationStat) End() {
s.m.Lock()
defer s.m.Unlock()
if !s.isStarted {
return
}
s.d += now().Sub(s.startedAt)
s.startedAt = time.Time{}
}
func (s *durationStat) Value(delta time.Duration) (o interface{}) {
// Lock
s.m.Lock()
defer s.m.Unlock()
// Get current values
n := now()
d := s.d
// Recording is still in process
if !s.startedAt.IsZero() {
d += n.Sub(s.startedAt)
s.startedAt = n
}
// Compute stat
o = s.fn(d, delta)
s.d = 0
return
}
func (s *durationStat) Start() {
s.m.Lock()
defer s.m.Unlock()
s.d = 0
s.isStarted = true
}
func (s *durationStat) Stop() {
s.m.Lock()
defer s.m.Unlock()
s.isStarted = false
}
// DurationPercentageStat is an object capable of computing the percentage of time some work is taking per second
type DurationPercentageStat struct {
*durationStat
}
// NewDurationPercentageStat creates a new duration percentage stat
func NewDurationPercentageStat() *DurationPercentageStat {
return &DurationPercentageStat{durationStat: newDurationStat(func(d, delta time.Duration) interface{} {
if delta == 0 {
return 0
}
return float64(d) / float64(delta) * 100
})}
}
type counterStat struct {
c float64
fn func(c, t float64, delta time.Duration) interface{}
isStarted bool
m *sync.Mutex // Locks isStarted
t float64
}
func newCounterStat(fn func(c, t float64, delta time.Duration) interface{}) *counterStat {
return &counterStat{
fn: fn,
m: &sync.Mutex{},
}
}
func (s *counterStat) Add(delta float64) {
s.m.Lock()
defer s.m.Unlock()
if !s.isStarted {
return
}
s.c += delta
s.t++
}
func (s *counterStat) Start() {
s.m.Lock()
defer s.m.Unlock()
s.c = 0
s.isStarted = true
s.t = 0
}
func (s *counterStat) Stop() {
s.m.Lock()
defer s.m.Unlock()
s.isStarted = true
}
func (s *counterStat) Value(delta time.Duration) interface{} {
s.m.Lock()
defer s.m.Unlock()
c := s.c
t := s.t
s.c = 0
s.t = 0
return s.fn(c, t, delta)
}
// CounterAvgStat is an object capable of computing the average value of a counter
type CounterAvgStat struct {
*counterStat
}
// NewCounterAvgStat creates a new counter avg stat
func NewCounterAvgStat() *CounterAvgStat {
return &CounterAvgStat{counterStat: newCounterStat(func(c, t float64, delta time.Duration) interface{} {
if t == 0 {
return 0
}
return c / t
})}
}
// CounterRateStat is an object capable of computing the average value of a counter per second
type CounterRateStat struct {
*counterStat
}
// NewCounterRateStat creates a new counter rate stat
func NewCounterRateStat() *CounterRateStat {
return &CounterRateStat{counterStat: newCounterStat(func(c, t float64, delta time.Duration) interface{} {
if delta.Seconds() == 0 {
return 0
}
return c / delta.Seconds()
})}
}

489
vendor/github.com/asticode/go-astikit/sync.go generated vendored Normal file
View file

@ -0,0 +1,489 @@
package astikit
import (
"bytes"
"context"
"errors"
"fmt"
"runtime"
"sync"
"sync/atomic"
"time"
)
// Stat names
const (
StatNameWorkRatio = "astikit.work.ratio"
)
// Chan constants
const (
// Calling Add() only blocks if the chan has been started and the ctx
// has not been canceled
ChanAddStrategyBlockWhenStarted = "block.when.started"
// Calling Add() never blocks
ChanAddStrategyNoBlock = "no.block"
ChanOrderFIFO = "fifo"
ChanOrderFILO = "filo"
)
// Chan is an object capable of executing funcs in a specific order while controlling the conditions
// in which adding new funcs is blocking
// Check out ChanOptions for detailed options
type Chan struct {
cancel context.CancelFunc
c *sync.Cond
ctx context.Context
fs []func()
mc *sync.Mutex // Locks ctx
mf *sync.Mutex // Locks fs
o ChanOptions
running uint32
statWorkRatio *DurationPercentageStat
}
// ChanOptions are Chan options
type ChanOptions struct {
// Determines the conditions in which Add() blocks. See constants with pattern ChanAddStrategy*
// Default is ChanAddStrategyNoBlock
AddStrategy string
// Order in which the funcs will be processed. See constants with pattern ChanOrder*
// Default is ChanOrderFIFO
Order string
// By default the funcs not yet processed when the context is cancelled are dropped.
// If "ProcessAll" is true, ALL funcs are processed even after the context is cancelled.
// However, no funcs can be added after the context is cancelled
ProcessAll bool
}
// NewChan creates a new Chan
func NewChan(o ChanOptions) *Chan {
return &Chan{
c: sync.NewCond(&sync.Mutex{}),
mc: &sync.Mutex{},
mf: &sync.Mutex{},
o: o,
}
}
// Start starts the chan by looping through functions in the buffer and
// executing them if any, or waiting for a new one otherwise
func (c *Chan) Start(ctx context.Context) {
// Make sure to start only once
if atomic.CompareAndSwapUint32(&c.running, 0, 1) {
// Update status
defer atomic.StoreUint32(&c.running, 0)
// Create context
c.mc.Lock()
c.ctx, c.cancel = context.WithCancel(ctx)
d := c.ctx.Done()
c.mc.Unlock()
// Handle context
go func() {
// Wait for context to be done
<-d
// Signal
c.c.L.Lock()
c.c.Signal()
c.c.L.Unlock()
}()
// Loop
for {
// Lock cond here in case a func is added between retrieving l and doing the if on it
c.c.L.Lock()
// Get number of funcs in buffer
c.mf.Lock()
l := len(c.fs)
c.mf.Unlock()
// Only return if context has been cancelled and:
// - the user wants to drop funcs that has not yet been processed
// - the buffer is empty otherwise
c.mc.Lock()
if c.ctx.Err() != nil && (!c.o.ProcessAll || l == 0) {
c.mc.Unlock()
c.c.L.Unlock()
return
}
c.mc.Unlock()
// No funcs in buffer
if l == 0 {
c.c.Wait()
c.c.L.Unlock()
continue
}
c.c.L.Unlock()
// Get first func
c.mf.Lock()
fn := c.fs[0]
c.mf.Unlock()
// Execute func
if c.statWorkRatio != nil {
c.statWorkRatio.Begin()
}
fn()
if c.statWorkRatio != nil {
c.statWorkRatio.End()
}
// Remove first func
c.mf.Lock()
c.fs = c.fs[1:]
c.mf.Unlock()
}
}
}
// Stop stops the chan
func (c *Chan) Stop() {
c.mc.Lock()
if c.cancel != nil {
c.cancel()
}
c.mc.Unlock()
}
// Add adds a new item to the chan
func (c *Chan) Add(i func()) {
// Check context
c.mc.Lock()
if c.ctx != nil && c.ctx.Err() != nil {
c.mc.Unlock()
return
}
c.mc.Unlock()
// Wrap the function
var fn func()
var wg *sync.WaitGroup
if c.o.AddStrategy == ChanAddStrategyBlockWhenStarted {
wg = &sync.WaitGroup{}
wg.Add(1)
fn = func() {
defer wg.Done()
i()
}
} else {
fn = i
}
// Add func to buffer
c.mf.Lock()
if c.o.Order == ChanOrderFILO {
c.fs = append([]func(){fn}, c.fs...)
} else {
c.fs = append(c.fs, fn)
}
c.mf.Unlock()
// Signal
c.c.L.Lock()
c.c.Signal()
c.c.L.Unlock()
// Wait
if wg != nil {
wg.Wait()
}
}
// Reset resets the chan
func (c *Chan) Reset() {
c.mf.Lock()
defer c.mf.Unlock()
c.fs = []func(){}
}
// Stats returns the chan stats
func (c *Chan) Stats() []StatOptions {
if c.statWorkRatio == nil {
c.statWorkRatio = NewDurationPercentageStat()
}
return []StatOptions{
{
Handler: c.statWorkRatio,
Metadata: &StatMetadata{
Description: "Percentage of time doing work",
Label: "Work ratio",
Name: StatNameWorkRatio,
Unit: "%",
},
},
}
}
// BufferPool represents a *bytes.Buffer pool
type BufferPool struct {
bp *sync.Pool
}
// NewBufferPool creates a new BufferPool
func NewBufferPool() *BufferPool {
return &BufferPool{bp: &sync.Pool{New: func() interface{} { return &bytes.Buffer{} }}}
}
// New creates a new BufferPoolItem
func (p *BufferPool) New() *BufferPoolItem {
return newBufferPoolItem(p.bp.Get().(*bytes.Buffer), p.bp)
}
// BufferPoolItem represents a BufferPool item
type BufferPoolItem struct {
*bytes.Buffer
bp *sync.Pool
}
func newBufferPoolItem(b *bytes.Buffer, bp *sync.Pool) *BufferPoolItem {
return &BufferPoolItem{
Buffer: b,
bp: bp,
}
}
// Close implements the io.Closer interface
func (i *BufferPoolItem) Close() error {
i.Reset()
i.bp.Put(i.Buffer)
return nil
}
// GoroutineLimiter is an object capable of doing several things in parallel while maintaining the
// max number of things running in parallel under a threshold
type GoroutineLimiter struct {
busy int
c *sync.Cond
ctx context.Context
cancel context.CancelFunc
o GoroutineLimiterOptions
}
// GoroutineLimiterOptions represents GoroutineLimiter options
type GoroutineLimiterOptions struct {
Max int
}
// NewGoroutineLimiter creates a new GoroutineLimiter
func NewGoroutineLimiter(o GoroutineLimiterOptions) (l *GoroutineLimiter) {
l = &GoroutineLimiter{
c: sync.NewCond(&sync.Mutex{}),
o: o,
}
if l.o.Max <= 0 {
l.o.Max = 1
}
l.ctx, l.cancel = context.WithCancel(context.Background())
go l.handleCtx()
return
}
// Close closes the limiter properly
func (l *GoroutineLimiter) Close() error {
l.cancel()
return nil
}
func (l *GoroutineLimiter) handleCtx() {
<-l.ctx.Done()
l.c.L.Lock()
l.c.Broadcast()
l.c.L.Unlock()
}
// GoroutineLimiterFunc is a GoroutineLimiter func
type GoroutineLimiterFunc func()
// Do executes custom work in a goroutine
func (l *GoroutineLimiter) Do(fn GoroutineLimiterFunc) (err error) {
// Check context in case the limiter has already been closed
if err = l.ctx.Err(); err != nil {
return
}
// Lock
l.c.L.Lock()
// Wait for a goroutine to be available
for l.busy >= l.o.Max {
l.c.Wait()
}
// Check context in case the limiter has been closed while waiting
if err = l.ctx.Err(); err != nil {
return
}
// Increment
l.busy++
// Unlock
l.c.L.Unlock()
// Execute in a goroutine
go func() {
// Decrement
defer func() {
l.c.L.Lock()
l.busy--
l.c.Signal()
l.c.L.Unlock()
}()
// Execute
fn()
}()
return
}
// Eventer represents an object that can dispatch simple events (name + payload)
type Eventer struct {
c *Chan
hs map[string][]EventerHandler
mh *sync.Mutex
}
// EventerOptions represents Eventer options
type EventerOptions struct {
Chan ChanOptions
}
// EventerHandler represents a function that can handle the payload of an event
type EventerHandler func(payload interface{})
// NewEventer creates a new eventer
func NewEventer(o EventerOptions) *Eventer {
return &Eventer{
c: NewChan(o.Chan),
hs: make(map[string][]EventerHandler),
mh: &sync.Mutex{},
}
}
// On adds an handler for a specific name
func (e *Eventer) On(name string, h EventerHandler) {
// Lock
e.mh.Lock()
defer e.mh.Unlock()
// Add handler
e.hs[name] = append(e.hs[name], h)
}
// Dispatch dispatches a payload for a specific name
func (e *Eventer) Dispatch(name string, payload interface{}) {
// Lock
e.mh.Lock()
defer e.mh.Unlock()
// No handlers
hs, ok := e.hs[name]
if !ok {
return
}
// Loop through handlers
for _, h := range hs {
func(h EventerHandler) {
// Add to chan
e.c.Add(func() {
h(payload)
})
}(h)
}
}
// Start starts the eventer. It is blocking
func (e *Eventer) Start(ctx context.Context) {
e.c.Start(ctx)
}
// Stop stops the eventer
func (e *Eventer) Stop() {
e.c.Stop()
}
// Reset resets the eventer
func (e *Eventer) Reset() {
e.c.Reset()
}
// RWMutex represents a RWMutex capable of logging its actions to ease deadlock debugging
type RWMutex struct {
c string // Last successful caller
l SeverityLogger
m *sync.RWMutex
n string // Name
}
// RWMutexOptions represents RWMutex options
type RWMutexOptions struct {
Logger StdLogger
Name string
}
// NewRWMutex creates a new RWMutex
func NewRWMutex(o RWMutexOptions) *RWMutex {
return &RWMutex{
l: AdaptStdLogger(o.Logger),
m: &sync.RWMutex{},
n: o.Name,
}
}
func (m *RWMutex) caller() (o string) {
if _, file, line, ok := runtime.Caller(2); ok {
o = fmt.Sprintf("%s:%d", file, line)
}
return
}
// Lock write locks the mutex
func (m *RWMutex) Lock() {
c := m.caller()
m.l.Debugf("astikit: requesting lock for %s at %s", m.n, c)
m.m.Lock()
m.l.Debugf("astikit: lock acquired for %s at %s", m.n, c)
m.c = c
}
// Unlock write unlocks the mutex
func (m *RWMutex) Unlock() {
m.m.Unlock()
m.l.Debugf("astikit: unlock executed for %s", m.n)
}
// RLock read locks the mutex
func (m *RWMutex) RLock() {
c := m.caller()
m.l.Debugf("astikit: requesting rlock for %s at %s", m.n, c)
m.m.RLock()
m.l.Debugf("astikit: rlock acquired for %s at %s", m.n, c)
m.c = c
}
// RUnlock read unlocks the mutex
func (m *RWMutex) RUnlock() {
m.m.RUnlock()
m.l.Debugf("astikit: unlock executed for %s", m.n)
}
// IsDeadlocked checks whether the mutex is deadlocked with a given timeout
// and returns the last caller
func (m *RWMutex) IsDeadlocked(timeout time.Duration) (bool, string) {
ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel()
go func() {
m.m.Lock()
cancel()
m.m.Unlock()
}()
<-ctx.Done()
return errors.Is(ctx.Err(), context.DeadlineExceeded), m.c
}

156
vendor/github.com/asticode/go-astikit/template.go generated vendored Normal file
View file

@ -0,0 +1,156 @@
package astikit
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"sync"
"text/template"
)
// Templater represents an object capable of storing and parsing templates
type Templater struct {
layouts []string
m sync.Mutex
templates map[string]*template.Template
}
// NewTemplater creates a new templater
func NewTemplater() *Templater {
return &Templater{templates: make(map[string]*template.Template)}
}
// AddLayoutsFromDir walks through a dir and add files as layouts
func (t *Templater) AddLayoutsFromDir(dirPath, ext string) (err error) {
// Get layouts
if err = filepath.Walk(dirPath, func(path string, info os.FileInfo, e error) (err error) {
// Check input error
if e != nil {
err = fmt.Errorf("astikit: walking layouts has an input error for path %s: %w", path, e)
return
}
// Only process files
if info.IsDir() {
return
}
// Check extension
if ext != "" && filepath.Ext(path) != ext {
return
}
// Read layout
var b []byte
if b, err = ioutil.ReadFile(path); err != nil {
err = fmt.Errorf("astikit: reading %s failed: %w", path, err)
return
}
// Add layout
t.AddLayout(string(b))
return
}); err != nil {
err = fmt.Errorf("astikit: walking layouts in %s failed: %w", dirPath, err)
return
}
return
}
// AddTemplatesFromDir walks through a dir and add files as templates
func (t *Templater) AddTemplatesFromDir(dirPath, ext string) (err error) {
// Loop through templates
if err = filepath.Walk(dirPath, func(path string, info os.FileInfo, e error) (err error) {
// Check input error
if e != nil {
err = fmt.Errorf("astikit: walking templates has an input error for path %s: %w", path, e)
return
}
// Only process files
if info.IsDir() {
return
}
// Check extension
if ext != "" && filepath.Ext(path) != ext {
return
}
// Read file
var b []byte
if b, err = ioutil.ReadFile(path); err != nil {
err = fmt.Errorf("astikit: reading template content of %s failed: %w", path, err)
return
}
// Add template
// We use ToSlash to homogenize Windows path
if err = t.AddTemplate(filepath.ToSlash(strings.TrimPrefix(path, dirPath)), string(b)); err != nil {
err = fmt.Errorf("astikit: adding template failed: %w", err)
return
}
return
}); err != nil {
err = fmt.Errorf("astikit: walking templates in %s failed: %w", dirPath, err)
return
}
return
}
// AddLayout adds a new layout
func (t *Templater) AddLayout(c string) {
t.layouts = append(t.layouts, c)
}
// AddTemplate adds a new template
func (t *Templater) AddTemplate(path, content string) (err error) {
// Parse
var tpl *template.Template
if tpl, err = t.Parse(content); err != nil {
err = fmt.Errorf("astikit: parsing template for path %s failed: %w", path, err)
return
}
// Add template
t.m.Lock()
t.templates[path] = tpl
t.m.Unlock()
return
}
// DelTemplate deletes a template
func (t *Templater) DelTemplate(path string) {
t.m.Lock()
defer t.m.Unlock()
delete(t.templates, path)
}
// Template retrieves a templates
func (t *Templater) Template(path string) (tpl *template.Template, ok bool) {
t.m.Lock()
defer t.m.Unlock()
tpl, ok = t.templates[path]
return
}
// Parse parses the content of a template
func (t *Templater) Parse(content string) (o *template.Template, err error) {
// Parse content
o = template.New("root")
if o, err = o.Parse(content); err != nil {
err = fmt.Errorf("astikit: parsing template content failed: %w", err)
return
}
// Parse layouts
for idx, l := range t.layouts {
if o, err = o.Parse(l); err != nil {
err = fmt.Errorf("astikit: parsing layout #%d failed: %w", idx+1, err)
return
}
}
return
}

58
vendor/github.com/asticode/go-astikit/time.go generated vendored Normal file
View file

@ -0,0 +1,58 @@
package astikit
import (
"context"
"strconv"
"time"
)
var now = func() time.Time { return time.Now() }
// Sleep is a cancellable sleep
func Sleep(ctx context.Context, d time.Duration) (err error) {
for {
select {
case <-time.After(d):
return
case <-ctx.Done():
err = ctx.Err()
return
}
}
}
// Timestamp represents a timestamp you can marshal and umarshal
type Timestamp struct {
time.Time
}
// NewTimestamp creates a new timestamp
func NewTimestamp(t time.Time) *Timestamp {
return &Timestamp{Time: t}
}
// UnmarshalJSON implements the JSONUnmarshaler interface
func (t *Timestamp) UnmarshalJSON(text []byte) error {
return t.UnmarshalText(text)
}
// UnmarshalText implements the TextUnmarshaler interface
func (t *Timestamp) UnmarshalText(text []byte) (err error) {
var i int
if i, err = strconv.Atoi(string(text)); err != nil {
return
}
t.Time = time.Unix(int64(i), 0)
return
}
// MarshalJSON implements the JSONMarshaler interface
func (t Timestamp) MarshalJSON() ([]byte, error) {
return t.MarshalText()
}
// MarshalText implements the TextMarshaler interface
func (t Timestamp) MarshalText() (text []byte, err error) {
text = []byte(strconv.Itoa(int(t.UTC().Unix())))
return
}

184
vendor/github.com/asticode/go-astikit/translator.go generated vendored Normal file
View file

@ -0,0 +1,184 @@
package astikit
import (
"context"
"encoding/json"
"fmt"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
)
// Translator represents an object capable of translating stuff
type Translator struct {
m *sync.RWMutex // Lock p
o TranslatorOptions
p map[string]string
}
// TranslatorOptions represents Translator options
type TranslatorOptions struct {
DefaultLanguage string
}
// NewTranslator creates a new Translator
func NewTranslator(o TranslatorOptions) *Translator {
return &Translator{
m: &sync.RWMutex{},
o: o,
p: make(map[string]string),
}
}
// ParseDir adds translations located in ".json" files in the specified dir
func (t *Translator) ParseDir(dirPath string) (err error) {
// Default dir path
if dirPath == "" {
if dirPath, err = os.Getwd(); err != nil {
err = fmt.Errorf("astikit: getwd failed: %w", err)
return
}
}
// Walk through dir
if err = filepath.Walk(dirPath, func(path string, info os.FileInfo, e error) (err error) {
// Check input error
if e != nil {
err = fmt.Errorf("astikit: walking %s has an input error for path %s: %w", dirPath, path, e)
return
}
// Only process first level files
if info.IsDir() {
if path != dirPath {
err = filepath.SkipDir
}
return
}
// Only process ".json" files
if filepath.Ext(path) != ".json" {
return
}
// Parse file
if err = t.ParseFile(path); err != nil {
err = fmt.Errorf("astikit: parsing %s failed: %w", path, err)
return
}
return
}); err != nil {
err = fmt.Errorf("astikit: walking %s failed: %w", dirPath, err)
return
}
return
}
// ParseFile adds translation located in the provided path
func (t *Translator) ParseFile(path string) (err error) {
// Lock
t.m.Lock()
defer t.m.Unlock()
// Open file
var f *os.File
if f, err = os.Open(path); err != nil {
err = fmt.Errorf("astikit: opening %s failed: %w", path, err)
return
}
defer f.Close()
// Unmarshal
var p map[string]interface{}
if err = json.NewDecoder(f).Decode(&p); err != nil {
err = fmt.Errorf("astikit: unmarshaling %s failed: %w", path, err)
return
}
// Parse
t.parse(p, strings.TrimSuffix(filepath.Base(path), filepath.Ext(path)))
return
}
func (t *Translator) key(prefix, key string) string {
return prefix + "." + key
}
func (t *Translator) parse(i map[string]interface{}, prefix string) {
for k, v := range i {
p := t.key(prefix, k)
switch a := v.(type) {
case string:
t.p[p] = a
case map[string]interface{}:
t.parse(a, p)
}
}
}
// HTTPMiddleware is the Translator HTTP middleware
func (t *Translator) HTTPMiddleware(h http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// Store language in context
if l := r.Header.Get("Accept-Language"); l != "" {
*r = *r.WithContext(contextWithTranslatorLanguage(r.Context(), l))
}
// Next handler
h.ServeHTTP(rw, r)
})
}
const contextKeyTranslatorLanguage = "astikit.translator.language"
func contextWithTranslatorLanguage(ctx context.Context, language string) context.Context {
return context.WithValue(ctx, contextKeyTranslatorLanguage, language)
}
func translatorLanguageFromContext(ctx context.Context) string {
v, ok := ctx.Value(contextKeyTranslatorLanguage).(string)
if !ok {
return ""
}
return v
}
func (t *Translator) language(language string) string {
if language == "" {
return t.o.DefaultLanguage
}
return language
}
// LanguageCtx returns the translator language from the context, or the default language if not in the context
func (t *Translator) LanguageCtx(ctx context.Context) string {
return t.language(translatorLanguageFromContext(ctx))
}
// Translate translates a key into a specific language
func (t *Translator) Translate(language, key string) string {
// Lock
t.m.RLock()
defer t.m.RUnlock()
// Get translation
k1 := t.key(t.language(language), key)
v, ok := t.p[k1]
if ok {
return v
}
// Default translation
k2 := t.key(t.o.DefaultLanguage, key)
if v, ok = t.p[k2]; ok {
return v
}
return k1
}
// TranslateCtx translates a key using the language specified in the context
func (t *Translator) TranslateCtx(ctx context.Context, key string) string {
return t.Translate(translatorLanguageFromContext(ctx), key)
}

148
vendor/github.com/asticode/go-astikit/worker.go generated vendored Normal file
View file

@ -0,0 +1,148 @@
package astikit
import (
"context"
"os"
"os/signal"
"sync"
)
// Worker represents an object capable of blocking, handling signals and stopping
type Worker struct {
cancel context.CancelFunc
ctx context.Context
l SeverityLogger
os, ow sync.Once
wg *sync.WaitGroup
}
// WorkerOptions represents worker options
type WorkerOptions struct {
Logger StdLogger
}
// NewWorker builds a new worker
func NewWorker(o WorkerOptions) (w *Worker) {
w = &Worker{
l: AdaptStdLogger(o.Logger),
wg: &sync.WaitGroup{},
}
w.ctx, w.cancel = context.WithCancel(context.Background())
w.wg.Add(1)
w.l.Info("astikit: starting worker...")
return
}
// HandleSignals handles signals
func (w *Worker) HandleSignals(hs ...SignalHandler) {
// Prepend mandatory handler
hs = append([]SignalHandler{TermSignalHandler(w.Stop)}, hs...)
// Notify
ch := make(chan os.Signal, 1)
signal.Notify(ch)
// Execute in a task
w.NewTask().Do(func() {
for {
select {
case s := <-ch:
// Loop through handlers
for _, h := range hs {
h(s)
}
// Return
if isTermSignal(s) {
return
}
case <-w.Context().Done():
return
}
}
})
}
// Stop stops the Worker
func (w *Worker) Stop() {
w.os.Do(func() {
w.l.Info("astikit: stopping worker...")
w.cancel()
w.wg.Done()
})
}
// Wait is a blocking pattern
func (w *Worker) Wait() {
w.ow.Do(func() {
w.l.Info("astikit: worker is now waiting...")
w.wg.Wait()
})
}
// NewTask creates a new task
func (w *Worker) NewTask() *Task {
return newTask(w.wg)
}
// Context returns the worker's context
func (w *Worker) Context() context.Context {
return w.ctx
}
// Logger returns the worker's logger
func (w *Worker) Logger() SeverityLogger {
return w.l
}
// TaskFunc represents a function that can create a new task
type TaskFunc func() *Task
// Task represents a task
type Task struct {
od, ow sync.Once
wg, pwg *sync.WaitGroup
}
func newTask(parentWg *sync.WaitGroup) (t *Task) {
t = &Task{
wg: &sync.WaitGroup{},
pwg: parentWg,
}
t.pwg.Add(1)
return
}
// NewSubTask creates a new sub task
func (t *Task) NewSubTask() *Task {
return newTask(t.wg)
}
// Do executes the task
func (t *Task) Do(f func()) {
go func() {
// Make sure to mark the task as done
defer t.Done()
// Custom
f()
// Wait for first level subtasks to be done
// Wait() can also be called in f() if something needs to be executed just after Wait()
t.Wait()
}()
}
// Done indicates the task is done
func (t *Task) Done() {
t.od.Do(func() {
t.pwg.Done()
})
}
// Wait waits for first level subtasks to be finished
func (t *Task) Wait() {
t.ow.Do(func() {
t.wg.Wait()
})
}

5
vendor/github.com/asticode/go-astisub/.gitignore generated vendored Normal file
View file

@ -0,0 +1,5 @@
.DS_Store
Thumbs.db
.idea/
cover*
test

14
vendor/github.com/asticode/go-astisub/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,14 @@
language: go
go:
- 1.x
- tip
install:
- go get -t ./...
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
matrix:
allow_failures:
- go: tip
script:
- go test -race -v -coverprofile=coverage.out
- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci

21
vendor/github.com/asticode/go-astisub/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2016 Quentin Renard
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

95
vendor/github.com/asticode/go-astisub/README.md generated vendored Normal file
View file

@ -0,0 +1,95 @@
[![GoReportCard](http://goreportcard.com/badge/github.com/asticode/go-astisub)](http://goreportcard.com/report/github.com/asticode/go-astisub)
[![GoDoc](https://godoc.org/github.com/asticode/go-astisub?status.svg)](https://godoc.org/github.com/asticode/go-astisub)
[![Travis](https://travis-ci.com/asticode/go-astisub.svg?branch=master)](https://travis-ci.com/asticode/go-astisub#)
[![Coveralls](https://coveralls.io/repos/github/asticode/go-astisub/badge.svg?branch=master)](https://coveralls.io/github/asticode/go-astisub)
This is a Golang library to manipulate subtitles.
It allows you to manipulate `srt`, `stl`, `ttml`, `ssa/ass`, `webvtt` and `teletext` files for now.
Available operations are `parsing`, `writing`, `syncing`, `fragmenting`, `unfragmenting`, `merging` and `optimizing`.
# Installation
To install the library:
go get github.com/asticode/go-astisub
To install the CLI:
go install github.com/asticode/go-astisub/astisub
# Using the library in your code
WARNING: the code below doesn't handle errors for readibility purposes. However you SHOULD!
```go
// Open subtitles
s1, _ := astisub.OpenFile("/path/to/example.ttml")
s2, _ := astisub.ReadFromSRT(bytes.NewReader([]byte("00:01:00.000 --> 00:02:00.000\nCredits")))
// Add a duration to every subtitles (syncing)
s1.Add(-2*time.Second)
// Fragment the subtitles
s1.Fragment(2*time.Second)
// Merge subtitles
s1.Merge(s2)
// Optimize subtitles
s1.Optimize()
// Unfragment the subtitles
s1.Unfragment()
// Write subtitles
s1.Write("/path/to/example.srt")
var buf = &bytes.Buffer{}
s2.WriteToTTML(buf)
```
# Using the CLI
If **astisub** has been installed properly you can:
- convert any type of subtitle to any other type of subtitle:
astisub convert -i example.srt -o example.ttml
- fragment any type of subtitle:
astisub fragment -i example.srt -f 2s -o example.out.srt
- merge any type of subtitle into any other type of subtitle:
astisub merge -i example.srt -i example.ttml -o example.out.srt
- optimize any type of subtitle:
astisub optimize -i example.srt -o example.out.srt
- unfragment any type of subtitle:
astisub unfragment -i example.srt -o example.out.srt
- sync any type of subtitle:
astisub sync -i example.srt -s "-2s" -o example.out.srt
# Features and roadmap
- [x] parsing
- [x] writing
- [x] syncing
- [x] fragmenting/unfragmenting
- [x] merging
- [x] ordering
- [x] optimizing
- [x] .srt
- [x] .ttml
- [x] .vtt
- [x] .stl
- [x] .ssa/.ass
- [x] .teletext
- [ ] .smi

10
vendor/github.com/asticode/go-astisub/language.go generated vendored Normal file
View file

@ -0,0 +1,10 @@
package astisub
// Languages
const (
LanguageChinese = "chinese"
LanguageEnglish = "english"
LanguageFrench = "french"
LanguageJapanese = "japanese"
LanguageNorwegian = "norwegian"
)

159
vendor/github.com/asticode/go-astisub/srt.go generated vendored Normal file
View file

@ -0,0 +1,159 @@
package astisub
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
"time"
)
// Constants
const (
srtTimeBoundariesSeparator = " --> "
)
// Vars
var (
bytesSRTTimeBoundariesSeparator = []byte(srtTimeBoundariesSeparator)
)
// parseDurationSRT parses an .srt duration
func parseDurationSRT(i string) (time.Duration, error) {
return parseDuration(i, ",", 3)
}
// ReadFromSRT parses an .srt content
func ReadFromSRT(i io.Reader) (o *Subtitles, err error) {
// Init
o = NewSubtitles()
var scanner = bufio.NewScanner(i)
// Scan
var line string
var lineNum int
var s = &Item{}
for scanner.Scan() {
// Fetch line
line = strings.TrimSpace(scanner.Text())
lineNum++
// Remove BOM header
if lineNum == 1 {
line = strings.TrimPrefix(line, string(BytesBOM))
}
// Line contains time boundaries
if strings.Contains(line, srtTimeBoundariesSeparator) {
// Return the wrong number of rows
if len(s.Lines) == 0 {
err = fmt.Errorf("astisub: line %d: no lines", lineNum)
return
}
// Remove last item of previous subtitle since it's the index
index := s.Lines[len(s.Lines)-1]
s.Lines = s.Lines[:len(s.Lines)-1]
// Remove trailing empty lines
if len(s.Lines) > 0 {
for i := len(s.Lines) - 1; i >= 0; i-- {
if len(s.Lines[i].Items) > 0 {
for j := len(s.Lines[i].Items) - 1; j >= 0; j-- {
if len(s.Lines[i].Items[j].Text) == 0 {
s.Lines[i].Items = s.Lines[i].Items[:j]
} else {
break
}
}
if len(s.Lines[i].Items) == 0 {
s.Lines = s.Lines[:i]
}
}
}
}
// Init subtitle
s = &Item{}
// Fetch Index
s.Index, _ = strconv.Atoi(index.String())
// Extract time boundaries
s1 := strings.Split(line, srtTimeBoundariesSeparator)
if l := len(s1); l < 2 {
err = fmt.Errorf("astisub: line %d: time boundaries has only %d element(s)", lineNum, l)
return
}
// We do this to eliminate extra stuff like positions which are not documented anywhere
s2 := strings.Split(s1[1], " ")
// Parse time boundaries
if s.StartAt, err = parseDurationSRT(s1[0]); err != nil {
err = fmt.Errorf("astisub: line %d: parsing srt duration %s failed: %w", lineNum, s1[0], err)
return
}
if s.EndAt, err = parseDurationSRT(s2[0]); err != nil {
err = fmt.Errorf("astisub: line %d: parsing srt duration %s failed: %w", lineNum, s2[0], err)
return
}
// Append subtitle
o.Items = append(o.Items, s)
} else {
// Add text
s.Lines = append(s.Lines, Line{Items: []LineItem{{Text: strings.TrimSpace(line)}}})
}
}
return
}
// formatDurationSRT formats an .srt duration
func formatDurationSRT(i time.Duration) string {
return formatDuration(i, ",", 3)
}
// WriteToSRT writes subtitles in .srt format
func (s Subtitles) WriteToSRT(o io.Writer) (err error) {
// Do not write anything if no subtitles
if len(s.Items) == 0 {
err = ErrNoSubtitlesToWrite
return
}
// Add BOM header
var c []byte
c = append(c, BytesBOM...)
// Loop through subtitles
for k, v := range s.Items {
// Add time boundaries
c = append(c, []byte(strconv.Itoa(k+1))...)
c = append(c, bytesLineSeparator...)
c = append(c, []byte(formatDurationSRT(v.StartAt))...)
c = append(c, bytesSRTTimeBoundariesSeparator...)
c = append(c, []byte(formatDurationSRT(v.EndAt))...)
c = append(c, bytesLineSeparator...)
// Loop through lines
for _, l := range v.Lines {
c = append(c, []byte(l.String())...)
c = append(c, bytesLineSeparator...)
}
// Add new line
c = append(c, bytesLineSeparator...)
}
// Remove last new line
c = c[:len(c)-1]
// Write
if _, err = o.Write(c); err != nil {
err = fmt.Errorf("astisub: writing failed: %w", err)
return
}
return
}

1297
vendor/github.com/asticode/go-astisub/ssa.go generated vendored Normal file

File diff suppressed because it is too large Load diff

1085
vendor/github.com/asticode/go-astisub/stl.go generated vendored Normal file

File diff suppressed because it is too large Load diff

779
vendor/github.com/asticode/go-astisub/subtitles.go generated vendored Normal file
View file

@ -0,0 +1,779 @@
package astisub
import (
"errors"
"fmt"
"math"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/asticode/go-astikit"
)
// Bytes
var (
BytesBOM = []byte{239, 187, 191}
bytesLineSeparator = []byte("\n")
bytesSpace = []byte(" ")
)
// Colors
var (
ColorBlack = &Color{}
ColorBlue = &Color{Blue: 255}
ColorCyan = &Color{Blue: 255, Green: 255}
ColorGray = &Color{Blue: 128, Green: 128, Red: 128}
ColorGreen = &Color{Green: 128}
ColorLime = &Color{Green: 255}
ColorMagenta = &Color{Blue: 255, Red: 255}
ColorMaroon = &Color{Red: 128}
ColorNavy = &Color{Blue: 128}
ColorOlive = &Color{Green: 128, Red: 128}
ColorPurple = &Color{Blue: 128, Red: 128}
ColorRed = &Color{Red: 255}
ColorSilver = &Color{Blue: 192, Green: 192, Red: 192}
ColorTeal = &Color{Blue: 128, Green: 128}
ColorYellow = &Color{Green: 255, Red: 255}
ColorWhite = &Color{Blue: 255, Green: 255, Red: 255}
)
// Errors
var (
ErrInvalidExtension = errors.New("astisub: invalid extension")
ErrNoSubtitlesToWrite = errors.New("astisub: no subtitles to write")
)
// Now allows testing functions using it
var Now = func() time.Time {
return time.Now()
}
// Options represents open or write options
type Options struct {
Filename string
Teletext TeletextOptions
STL STLOptions
}
// Open opens a subtitle reader based on options
func Open(o Options) (s *Subtitles, err error) {
// Open the file
var f *os.File
if f, err = os.Open(o.Filename); err != nil {
err = fmt.Errorf("astisub: opening %s failed: %w", o.Filename, err)
return
}
defer f.Close()
// Parse the content
switch filepath.Ext(strings.ToLower(o.Filename)) {
case ".srt":
s, err = ReadFromSRT(f)
case ".ssa", ".ass":
s, err = ReadFromSSA(f)
case ".stl":
s, err = ReadFromSTL(f, o.STL)
case ".ts":
s, err = ReadFromTeletext(f, o.Teletext)
case ".ttml":
s, err = ReadFromTTML(f)
case ".vtt":
s, err = ReadFromWebVTT(f)
default:
err = ErrInvalidExtension
}
return
}
// OpenFile opens a file regardless of other options
func OpenFile(filename string) (*Subtitles, error) {
return Open(Options{Filename: filename})
}
// Subtitles represents an ordered list of items with formatting
type Subtitles struct {
Items []*Item
Metadata *Metadata
Regions map[string]*Region
Styles map[string]*Style
}
// NewSubtitles creates new subtitles
func NewSubtitles() *Subtitles {
return &Subtitles{
Regions: make(map[string]*Region),
Styles: make(map[string]*Style),
}
}
// Item represents a text to show between 2 time boundaries with formatting
type Item struct {
Comments []string
Index int
EndAt time.Duration
InlineStyle *StyleAttributes
Lines []Line
Region *Region
StartAt time.Duration
Style *Style
}
// String implements the Stringer interface
func (i Item) String() string {
var os []string
for _, l := range i.Lines {
os = append(os, l.String())
}
return strings.Join(os, " - ")
}
// Color represents a color
type Color struct {
Alpha, Blue, Green, Red uint8
}
// newColorFromSSAString builds a new color based on an SSA string
func newColorFromSSAString(s string, base int) (c *Color, err error) {
var i int64
if i, err = strconv.ParseInt(s, base, 64); err != nil {
err = fmt.Errorf("parsing int %s with base %d failed: %w", s, base, err)
return
}
c = &Color{
Alpha: uint8(i>>24) & 0xff,
Blue: uint8(i>>16) & 0xff,
Green: uint8(i>>8) & 0xff,
Red: uint8(i) & 0xff,
}
return
}
// SSAString expresses the color as an SSA string
func (c *Color) SSAString() string {
return fmt.Sprintf("%.8x", uint32(c.Alpha)<<24|uint32(c.Blue)<<16|uint32(c.Green)<<8|uint32(c.Red))
}
// TTMLString expresses the color as a TTML string
func (c *Color) TTMLString() string {
return fmt.Sprintf("%.6x", uint32(c.Red)<<16|uint32(c.Green)<<8|uint32(c.Blue))
}
type Justification int
var (
JustificationUnchanged = Justification(1)
JustificationLeft = Justification(2)
JustificationCentered = Justification(3)
JustificationRight = Justification(4)
)
// StyleAttributes represents style attributes
type StyleAttributes struct {
SSAAlignment *int
SSAAlphaLevel *float64
SSAAngle *float64 // degrees
SSABackColour *Color
SSABold *bool
SSABorderStyle *int
SSAEffect string
SSAEncoding *int
SSAFontName string
SSAFontSize *float64
SSAItalic *bool
SSALayer *int
SSAMarginLeft *int // pixels
SSAMarginRight *int // pixels
SSAMarginVertical *int // pixels
SSAMarked *bool
SSAOutline *float64 // pixels
SSAOutlineColour *Color
SSAPrimaryColour *Color
SSAScaleX *float64 // %
SSAScaleY *float64 // %
SSASecondaryColour *Color
SSAShadow *float64 // pixels
SSASpacing *float64 // pixels
SSAStrikeout *bool
SSAUnderline *bool
STLBoxing *bool
STLItalics *bool
STLJustification *Justification
STLPosition *STLPosition
STLUnderline *bool
TeletextColor *Color
TeletextDoubleHeight *bool
TeletextDoubleSize *bool
TeletextDoubleWidth *bool
TeletextSpacesAfter *int
TeletextSpacesBefore *int
// TODO Use pointers with real types below
TTMLBackgroundColor *string // https://htmlcolorcodes.com/fr/
TTMLColor *string
TTMLDirection *string
TTMLDisplay *string
TTMLDisplayAlign *string
TTMLExtent *string
TTMLFontFamily *string
TTMLFontSize *string
TTMLFontStyle *string
TTMLFontWeight *string
TTMLLineHeight *string
TTMLOpacity *string
TTMLOrigin *string
TTMLOverflow *string
TTMLPadding *string
TTMLShowBackground *string
TTMLTextAlign *string
TTMLTextDecoration *string
TTMLTextOutline *string
TTMLUnicodeBidi *string
TTMLVisibility *string
TTMLWrapOption *string
TTMLWritingMode *string
TTMLZIndex *int
WebVTTAlign string
WebVTTItalics bool
WebVTTLine string
WebVTTLines int
WebVTTPosition string
WebVTTRegionAnchor string
WebVTTScroll string
WebVTTSize string
WebVTTVertical string
WebVTTViewportAnchor string
WebVTTWidth string
}
func (sa *StyleAttributes) propagateSSAAttributes() {}
func (sa *StyleAttributes) propagateSTLAttributes() {
if sa.STLJustification != nil {
switch *sa.STLJustification {
case JustificationCentered:
// default to middle anyway?
case JustificationRight:
sa.WebVTTAlign = "right"
case JustificationLeft:
sa.WebVTTAlign = "left"
}
}
}
func (sa *StyleAttributes) propagateTeletextAttributes() {
if sa.TeletextColor != nil {
sa.TTMLColor = astikit.StrPtr("#" + sa.TeletextColor.TTMLString())
}
}
//reference for migration: https://w3c.github.io/ttml-webvtt-mapping/
func (sa *StyleAttributes) propagateTTMLAttributes() {
if sa.TTMLTextAlign != nil {
sa.WebVTTAlign = *sa.TTMLTextAlign
}
if sa.TTMLExtent != nil {
//region settings
lineHeight := 5 //assuming height of line as 5.33vh
dimensions := strings.Split(*sa.TTMLExtent, " ")
if len(dimensions) > 1 {
sa.WebVTTWidth = dimensions[0]
if height, err := strconv.Atoi(strings.ReplaceAll(dimensions[1], "%", "")); err == nil {
sa.WebVTTLines = height / lineHeight
}
//cue settings
//default TTML WritingMode is lrtb i.e. left to right, top to bottom
sa.WebVTTSize = dimensions[1]
if sa.TTMLWritingMode != nil && strings.HasPrefix(*sa.TTMLWritingMode, "tb") {
sa.WebVTTSize = dimensions[0]
}
}
}
if sa.TTMLOrigin != nil {
//region settings
sa.WebVTTRegionAnchor = "0%,0%"
sa.WebVTTViewportAnchor = strings.ReplaceAll(strings.TrimSpace(*sa.TTMLOrigin), " ", ",")
sa.WebVTTScroll = "up"
//cue settings
coordinates := strings.Split(*sa.TTMLOrigin, " ")
if len(coordinates) > 1 {
sa.WebVTTLine = coordinates[0]
sa.WebVTTPosition = coordinates[1]
if sa.TTMLWritingMode != nil && strings.HasPrefix(*sa.TTMLWritingMode, "tb") {
sa.WebVTTLine = coordinates[1]
sa.WebVTTPosition = coordinates[0]
}
}
}
}
func (sa *StyleAttributes) propagateWebVTTAttributes() {}
// Metadata represents metadata
// TODO Merge attributes
type Metadata struct {
Comments []string
Framerate int
Language string
SSACollisions string
SSAOriginalEditing string
SSAOriginalScript string
SSAOriginalTiming string
SSAOriginalTranslation string
SSAPlayDepth *int
SSAPlayResX, SSAPlayResY *int
SSAScriptType string
SSAScriptUpdatedBy string
SSASynchPoint string
SSATimer *float64
SSAUpdateDetails string
SSAWrapStyle string
STLCountryOfOrigin string
STLCreationDate *time.Time
STLDisplayStandardCode string
STLMaximumNumberOfDisplayableCharactersInAnyTextRow *int
STLMaximumNumberOfDisplayableRows *int
STLPublisher string
STLRevisionDate *time.Time
STLSubtitleListReferenceCode string
STLTimecodeStartOfProgramme time.Duration
Title string
TTMLCopyright string
}
// Region represents a subtitle's region
type Region struct {
ID string
InlineStyle *StyleAttributes
Style *Style
}
// Style represents a subtitle's style
type Style struct {
ID string
InlineStyle *StyleAttributes
Style *Style
}
// Line represents a set of formatted line items
type Line struct {
Items []LineItem
VoiceName string
}
// String implement the Stringer interface
func (l Line) String() string {
var texts []string
for _, i := range l.Items {
texts = append(texts, i.Text)
}
return strings.Join(texts, " ")
}
// LineItem represents a formatted line item
type LineItem struct {
InlineStyle *StyleAttributes
Style *Style
Text string
}
// Add adds a duration to each time boundaries. As in the time package, duration can be negative.
func (s *Subtitles) Add(d time.Duration) {
for idx := 0; idx < len(s.Items); idx++ {
s.Items[idx].EndAt += d
s.Items[idx].StartAt += d
if s.Items[idx].EndAt <= 0 && s.Items[idx].StartAt <= 0 {
s.Items = append(s.Items[:idx], s.Items[idx+1:]...)
idx--
} else if s.Items[idx].StartAt <= 0 {
s.Items[idx].StartAt = time.Duration(0)
}
}
}
// Duration returns the subtitles duration
func (s Subtitles) Duration() time.Duration {
if len(s.Items) == 0 {
return time.Duration(0)
}
return s.Items[len(s.Items)-1].EndAt
}
// ForceDuration updates the subtitles duration.
// If requested duration is bigger, then we create a dummy item.
// If requested duration is smaller, then we remove useless items and we cut the last item or add a dummy item.
func (s *Subtitles) ForceDuration(d time.Duration, addDummyItem bool) {
// Requested duration is the same as the subtitles'one
if s.Duration() == d {
return
}
// Requested duration is bigger than subtitles'one
if s.Duration() > d {
// Find last item before input duration and update end at
var lastIndex = -1
for index, i := range s.Items {
// Start at is bigger than input duration, we've found the last item
if i.StartAt >= d {
lastIndex = index
break
} else if i.EndAt > d {
s.Items[index].EndAt = d
}
}
// Last index has been found
if lastIndex != -1 {
s.Items = s.Items[:lastIndex]
}
}
// Add dummy item with the minimum duration possible
if addDummyItem && s.Duration() < d {
s.Items = append(s.Items, &Item{EndAt: d, Lines: []Line{{Items: []LineItem{{Text: "..."}}}}, StartAt: d - time.Millisecond})
}
}
// Fragment fragments subtitles with a specific fragment duration
func (s *Subtitles) Fragment(f time.Duration) {
// Nothing to fragment
if len(s.Items) == 0 {
return
}
// Here we want to simulate fragments of duration f until there are no subtitles left in that period of time
var fragmentStartAt, fragmentEndAt = time.Duration(0), f
for fragmentStartAt < s.Items[len(s.Items)-1].EndAt {
// We loop through subtitles and process the ones that either contain the fragment start at,
// or contain the fragment end at
//
// It's useless processing subtitles contained between fragment start at and end at
// |____________________| <- subtitle
// | |
// fragment start at fragment end at
for i, sub := range s.Items {
// Init
var newSub = &Item{}
*newSub = *sub
// A switch is more readable here
switch {
// Subtitle contains fragment start at
// |____________________| <- subtitle
// | |
// fragment start at fragment end at
case sub.StartAt < fragmentStartAt && sub.EndAt > fragmentStartAt:
sub.StartAt = fragmentStartAt
newSub.EndAt = fragmentStartAt
// Subtitle contains fragment end at
// |____________________| <- subtitle
// | |
// fragment start at fragment end at
case sub.StartAt < fragmentEndAt && sub.EndAt > fragmentEndAt:
sub.StartAt = fragmentEndAt
newSub.EndAt = fragmentEndAt
default:
continue
}
// Insert new sub
s.Items = append(s.Items[:i], append([]*Item{newSub}, s.Items[i:]...)...)
}
// Update fragments boundaries
fragmentStartAt += f
fragmentEndAt += f
}
// Order
s.Order()
}
// IsEmpty returns whether the subtitles are empty
func (s Subtitles) IsEmpty() bool {
return len(s.Items) == 0
}
// Merge merges subtitles i into subtitles
func (s *Subtitles) Merge(i *Subtitles) {
// Append items
s.Items = append(s.Items, i.Items...)
s.Order()
// Add regions
for _, region := range i.Regions {
if _, ok := s.Regions[region.ID]; !ok {
s.Regions[region.ID] = region
}
}
// Add styles
for _, style := range i.Styles {
if _, ok := s.Styles[style.ID]; !ok {
s.Styles[style.ID] = style
}
}
}
// Optimize optimizes subtitles
func (s *Subtitles) Optimize() {
// Nothing to optimize
if len(s.Items) == 0 {
return
}
// Remove unused regions and style
s.removeUnusedRegionsAndStyles()
}
// removeUnusedRegionsAndStyles removes unused regions and styles
func (s *Subtitles) removeUnusedRegionsAndStyles() {
// Loop through items
var usedRegions, usedStyles = make(map[string]bool), make(map[string]bool)
for _, item := range s.Items {
// Add region
if item.Region != nil {
usedRegions[item.Region.ID] = true
}
// Add style
if item.Style != nil {
usedStyles[item.Style.ID] = true
}
// Loop through lines
for _, line := range item.Lines {
// Loop through line items
for _, lineItem := range line.Items {
// Add style
if lineItem.Style != nil {
usedStyles[lineItem.Style.ID] = true
}
}
}
}
// Loop through regions
for id, region := range s.Regions {
if _, ok := usedRegions[region.ID]; ok {
if region.Style != nil {
usedStyles[region.Style.ID] = true
}
} else {
delete(s.Regions, id)
}
}
// Loop through style
for id, style := range s.Styles {
if _, ok := usedStyles[style.ID]; !ok {
delete(s.Styles, id)
}
}
}
// Order orders items
func (s *Subtitles) Order() {
// Nothing to do if less than 1 element
if len(s.Items) <= 1 {
return
}
// Order
var swapped = true
for swapped {
swapped = false
for index := 1; index < len(s.Items); index++ {
if s.Items[index-1].StartAt > s.Items[index].StartAt {
var tmp = s.Items[index-1]
s.Items[index-1] = s.Items[index]
s.Items[index] = tmp
swapped = true
}
}
}
}
// RemoveStyling removes the styling from the subtitles
func (s *Subtitles) RemoveStyling() {
s.Regions = map[string]*Region{}
s.Styles = map[string]*Style{}
for _, i := range s.Items {
i.Region = nil
i.Style = nil
i.InlineStyle = nil
for idxLine, l := range i.Lines {
for idxLineItem := range l.Items {
i.Lines[idxLine].Items[idxLineItem].InlineStyle = nil
i.Lines[idxLine].Items[idxLineItem].Style = nil
}
}
}
}
// Unfragment unfragments subtitles
func (s *Subtitles) Unfragment() {
// Nothing to do if less than 1 element
if len(s.Items) <= 1 {
return
}
// Order
s.Order()
// Loop through items
for i := 0; i < len(s.Items)-1; i++ {
for j := i + 1; j < len(s.Items); j++ {
// Items are the same
if s.Items[i].String() == s.Items[j].String() && s.Items[i].EndAt >= s.Items[j].StartAt {
// Only override end time if longer
if s.Items[i].EndAt < s.Items[j].EndAt {
s.Items[i].EndAt = s.Items[j].EndAt
}
s.Items = append(s.Items[:j], s.Items[j+1:]...)
j--
} else if s.Items[i].EndAt < s.Items[j].StartAt {
break
}
}
}
}
// Write writes subtitles to a file
func (s Subtitles) Write(dst string) (err error) {
// Create the file
var f *os.File
if f, err = os.Create(dst); err != nil {
err = fmt.Errorf("astisub: creating %s failed: %w", dst, err)
return
}
defer f.Close()
// Write the content
switch filepath.Ext(strings.ToLower(dst)) {
case ".srt":
err = s.WriteToSRT(f)
case ".ssa", ".ass":
err = s.WriteToSSA(f)
case ".stl":
err = s.WriteToSTL(f)
case ".ttml":
err = s.WriteToTTML(f)
case ".vtt":
err = s.WriteToWebVTT(f)
default:
err = ErrInvalidExtension
}
return
}
// parseDuration parses a duration in "00:00:00.000", "00:00:00,000" or "0:00:00:00" format
func parseDuration(i, millisecondSep string, numberOfMillisecondDigits int) (o time.Duration, err error) {
// Split milliseconds
var parts = strings.Split(i, millisecondSep)
var milliseconds int
var s string
if len(parts) >= 2 {
// Invalid number of millisecond digits
s = strings.TrimSpace(parts[len(parts)-1])
if len(s) > 3 {
err = fmt.Errorf("astisub: Invalid number of millisecond digits detected in %s", i)
return
}
// Parse milliseconds
if milliseconds, err = strconv.Atoi(s); err != nil {
err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err)
return
}
milliseconds *= int(math.Pow10(numberOfMillisecondDigits - len(s)))
s = strings.Join(parts[:len(parts)-1], millisecondSep)
} else {
s = i
}
// Split hours, minutes and seconds
parts = strings.Split(strings.TrimSpace(s), ":")
var partSeconds, partMinutes, partHours string
if len(parts) == 2 {
partSeconds = parts[1]
partMinutes = parts[0]
} else if len(parts) == 3 {
partSeconds = parts[2]
partMinutes = parts[1]
partHours = parts[0]
} else {
err = fmt.Errorf("astisub: No hours, minutes or seconds detected in %s", i)
return
}
// Parse seconds
var seconds int
s = strings.TrimSpace(partSeconds)
if seconds, err = strconv.Atoi(s); err != nil {
err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err)
return
}
// Parse minutes
var minutes int
s = strings.TrimSpace(partMinutes)
if minutes, err = strconv.Atoi(s); err != nil {
err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err)
return
}
// Parse hours
var hours int
if len(partHours) > 0 {
s = strings.TrimSpace(partHours)
if hours, err = strconv.Atoi(s); err != nil {
err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err)
return
}
}
// Generate output
o = time.Duration(milliseconds)*time.Millisecond + time.Duration(seconds)*time.Second + time.Duration(minutes)*time.Minute + time.Duration(hours)*time.Hour
return
}
// formatDuration formats a duration
func formatDuration(i time.Duration, millisecondSep string, numberOfMillisecondDigits int) (s string) {
// Parse hours
var hours = int(i / time.Hour)
var n = i % time.Hour
if hours < 10 {
s += "0"
}
s += strconv.Itoa(hours) + ":"
// Parse minutes
var minutes = int(n / time.Minute)
n = i % time.Minute
if minutes < 10 {
s += "0"
}
s += strconv.Itoa(minutes) + ":"
// Parse seconds
var seconds = int(n / time.Second)
n = i % time.Second
if seconds < 10 {
s += "0"
}
s += strconv.Itoa(seconds) + millisecondSep
// Parse milliseconds
var milliseconds = float64(n/time.Millisecond) / float64(1000)
s += fmt.Sprintf("%."+strconv.Itoa(numberOfMillisecondDigits)+"f", milliseconds)[2:]
return
}
// appendStringToBytesWithNewLine adds a string to bytes then adds a new line
func appendStringToBytesWithNewLine(i []byte, s string) (o []byte) {
o = append(i, []byte(s)...)
o = append(o, bytesLineSeparator...)
return
}

997
vendor/github.com/asticode/go-astisub/teletext.go generated vendored Normal file
View file

@ -0,0 +1,997 @@
package astisub
import (
"context"
"errors"
"fmt"
"io"
"log"
"math/bits"
"sort"
"strings"
"time"
"github.com/asticode/go-astikit"
"github.com/asticode/go-astits"
)
// Errors
var (
ErrNoValidTeletextPID = errors.New("astisub: no valid teletext PID")
)
type teletextCharset [96][]byte
type teletextNationalSubset [13][]byte
// Chapter: 15.2 | Page: 109 | Link: http://www.etsi.org/deliver/etsi_i_ets/300700_300799/300706/01_60/ets_300706e01p.pdf
// It is indexed by triplet1 then by national option subset code
var teletextCharsets = map[uint8]map[uint8]struct {
g0 *teletextCharset
g2 *teletextCharset
national *teletextNationalSubset
}{
0: {
0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetEnglish},
1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetFrench},
2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSwedishFinnishHungarian},
3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak},
4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman},
5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetPortugueseSpanish},
6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetItalian},
7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
},
1: {
0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetPolish},
1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetFrench},
2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSwedishFinnishHungarian},
3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak},
4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman},
5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetItalian},
7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
},
2: {
0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetEnglish},
1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetFrench},
2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSwedishFinnishHungarian},
3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak},
4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman},
5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetPortugueseSpanish},
6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetItalian},
7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
},
3: {
0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSerbianCroatianSlovenian},
6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin},
7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetRomanian},
},
4: {
0: {g0: teletextCharsetG0CyrillicOption1, g2: teletextCharsetG2Cyrillic},
1: {g0: teletextCharsetG0CyrillicOption2, g2: teletextCharsetG2Cyrillic},
2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetEstonian},
3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak},
4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman},
5: {g0: teletextCharsetG0CyrillicOption3, g2: teletextCharsetG2Cyrillic},
6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetLettishLithuanian},
},
6: {
3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetTurkish},
7: {g0: teletextCharsetG0Greek, g2: teletextCharsetG2Greek},
},
8: {
0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Arabic, national: teletextNationalSubsetEnglish},
1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Arabic, national: teletextNationalSubsetFrench},
7: {g0: teletextCharsetG0Arabic, g2: teletextCharsetG2Arabic},
},
10: {
5: {g0: teletextCharsetG0Hebrew, g2: teletextCharsetG2Arabic},
7: {g0: teletextCharsetG0Arabic, g2: teletextCharsetG2Arabic},
},
}
// Teletext G0 charsets
var (
teletextCharsetG0CyrillicOption1 = &teletextCharset{
[]byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0xd1, 0x8b},
[]byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d},
[]byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0xe3, 0x88, 0x80}, []byte{0x33}, []byte{0x34},
[]byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b},
[]byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xd0, 0xa7}, []byte{0xd0, 0x90},
[]byte{0xd0, 0x91}, []byte{0xd0, 0xa6}, []byte{0xd0, 0x94}, []byte{0xd0, 0x95}, []byte{0xd0, 0xa4},
[]byte{0xd0, 0x93}, []byte{0xd0, 0xa5}, []byte{0xd0, 0x98}, []byte{0xd0, 0x88}, []byte{0xd0, 0x9a},
[]byte{0xd0, 0x9b}, []byte{0xd0, 0x9c}, []byte{0xd0, 0x9d}, []byte{0xd0, 0x9e}, []byte{0xd0, 0x9f},
[]byte{0xd0, 0x8c}, []byte{0xd0, 0xa0}, []byte{0xd0, 0xa1}, []byte{0xd0, 0xa2}, []byte{0xd0, 0xa3},
[]byte{0xd0, 0x92}, []byte{0xd0, 0x83}, []byte{0xd0, 0x89}, []byte{0xd0, 0x8a}, []byte{0xd0, 0x97},
[]byte{0xd0, 0x8b}, []byte{0xd0, 0x96}, []byte{0xd0, 0x82}, []byte{0xd0, 0xa8}, []byte{0xd0, 0x8f},
[]byte{0xd1, 0x87}, []byte{0xd0, 0xb0}, []byte{0xd0, 0xb1}, []byte{0xd1, 0x86}, []byte{0xd0, 0xb4},
[]byte{0xd0, 0xb5}, []byte{0xd1, 0x84}, []byte{0xd0, 0xb3}, []byte{0xd1, 0x85}, []byte{0xd0, 0xb8},
[]byte{0xd0, 0xa8}, []byte{0xd0, 0xba}, []byte{0xd0, 0xbb}, []byte{0xd0, 0xbc}, []byte{0xd0, 0xbd},
[]byte{0xd0, 0xbe}, []byte{0xd0, 0xbf}, []byte{0xd0, 0xac}, []byte{0xd1, 0x80}, []byte{0xd1, 0x81},
[]byte{0xd1, 0x82}, []byte{0xd1, 0x83}, []byte{0xd0, 0xb2}, []byte{0xd0, 0xa3}, []byte{0xd0, 0xa9},
[]byte{0xd0, 0xaa}, []byte{0xd0, 0xb7}, []byte{0xd0, 0xab}, []byte{0xd0, 0xb6}, []byte{0xd0, 0xa2},
[]byte{0xd1, 0x88}, []byte{0xd0, 0xaf},
}
teletextCharsetG0CyrillicOption2 = &teletextCharset{
[]byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0xd1, 0x8b},
[]byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d},
[]byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34},
[]byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b},
[]byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xd0, 0xae}, []byte{0xd0, 0x90},
[]byte{0xd0, 0x91}, []byte{0xd0, 0xa6}, []byte{0xd0, 0x94}, []byte{0xd0, 0x95}, []byte{0xd0, 0xa4},
[]byte{0xd0, 0x93}, []byte{0xd0, 0xa5}, []byte{0xd0, 0x98}, []byte{0xd0, 0x99}, []byte{0xd0, 0x9a},
[]byte{0xd0, 0x9b}, []byte{0xd0, 0x9c}, []byte{0xd0, 0x9d}, []byte{0xd0, 0x9e}, []byte{0xd0, 0x9f},
[]byte{0xd0, 0xaf}, []byte{0xd0, 0xa0}, []byte{0xd0, 0xa1}, []byte{0xd0, 0xa2}, []byte{0xd0, 0xa3},
[]byte{0xd0, 0x96}, []byte{0xd0, 0x92}, []byte{0xd0, 0xac}, []byte{0xd0, 0xaa}, []byte{0xd0, 0x97},
[]byte{0xd0, 0xa8}, []byte{0xd0, 0xad}, []byte{0xd0, 0xa9}, []byte{0xd0, 0xa7}, []byte{0xd0, 0xab},
[]byte{0xd1, 0x8e}, []byte{0xd0, 0xb0}, []byte{0xd0, 0xb1}, []byte{0xd1, 0x86}, []byte{0xd0, 0xb4},
[]byte{0xd0, 0xb5}, []byte{0xd1, 0x84}, []byte{0xd0, 0xb3}, []byte{0xd1, 0x85}, []byte{0xd0, 0xb8},
[]byte{0xd0, 0xb9}, []byte{0xd0, 0xba}, []byte{0xd0, 0xbb}, []byte{0xd0, 0xbc}, []byte{0xd0, 0xbd},
[]byte{0xd0, 0xbe}, []byte{0xd0, 0xbf}, []byte{0xd1, 0x8f}, []byte{0xd1, 0x80}, []byte{0xd1, 0x81},
[]byte{0xd1, 0x82}, []byte{0xd1, 0x83}, []byte{0xd0, 0xb6}, []byte{0xd0, 0xb2}, []byte{0xd1, 0x8c},
[]byte{0xd1, 0x8a}, []byte{0xd0, 0xb7}, []byte{0xd1, 0x88}, []byte{0xd1, 0x8d}, []byte{0xd1, 0x89},
[]byte{0xd1, 0x87}, []byte{0xd1, 0x8b},
}
teletextCharsetG0CyrillicOption3 = &teletextCharset{
[]byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0xc3, 0xaf},
[]byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d},
[]byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34},
[]byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b},
[]byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xd0, 0xae}, []byte{0xd0, 0x90},
[]byte{0xd0, 0x91}, []byte{0xd0, 0xa6}, []byte{0xd0, 0x94}, []byte{0xd0, 0x95}, []byte{0xd0, 0xa4},
[]byte{0xd0, 0x93}, []byte{0xd0, 0xa5}, []byte{0xd0, 0x98}, []byte{0xd0, 0x99}, []byte{0xd0, 0x9a},
[]byte{0xd0, 0x9b}, []byte{0xd0, 0x9c}, []byte{0xd0, 0x9d}, []byte{0xd0, 0x9e}, []byte{0xd0, 0x9f},
[]byte{0xd0, 0xaf}, []byte{0xd0, 0xa0}, []byte{0xd0, 0xa1}, []byte{0xd0, 0xa2}, []byte{0xd0, 0xa3},
[]byte{0xd0, 0x96}, []byte{0xd0, 0x92}, []byte{0xd0, 0xac}, []byte{0x49}, []byte{0xd0, 0x97},
[]byte{0xd0, 0xa8}, []byte{0xd0, 0xad}, []byte{0xd0, 0xa9}, []byte{0xd0, 0xa7}, []byte{0xc3, 0x8f},
[]byte{0xd1, 0x8e}, []byte{0xd0, 0xb0}, []byte{0xd0, 0xb1}, []byte{0xd1, 0x86}, []byte{0xd0, 0xb4},
[]byte{0xd0, 0xb5}, []byte{0xd1, 0x84}, []byte{0xd0, 0xb3}, []byte{0xd1, 0x85}, []byte{0xd0, 0xb8},
[]byte{0xd0, 0xb9}, []byte{0xd0, 0xba}, []byte{0xd0, 0xbb}, []byte{0xd0, 0xbc}, []byte{0xd0, 0xbd},
[]byte{0xd0, 0xbe}, []byte{0xd0, 0xbf}, []byte{0xd1, 0x8f}, []byte{0xd1, 0x80}, []byte{0xd1, 0x81},
[]byte{0xd1, 0x82}, []byte{0xd1, 0x83}, []byte{0xd0, 0xb6}, []byte{0xd0, 0xb2}, []byte{0xd1, 0x8c},
[]byte{0x69}, []byte{0xd0, 0xb7}, []byte{0xd1, 0x88}, []byte{0xd1, 0x8d}, []byte{0xd1, 0x89},
[]byte{0xd1, 0x87}, []byte{0xc3, 0xbf},
}
teletextCharsetG0Greek = &teletextCharset{
[]byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0x26},
[]byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d},
[]byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34},
[]byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b},
[]byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xce, 0x90}, []byte{0xce, 0x91},
[]byte{0xce, 0x92}, []byte{0xce, 0x93}, []byte{0xce, 0x94}, []byte{0xce, 0x95}, []byte{0xce, 0x96},
[]byte{0xce, 0x97}, []byte{0xce, 0x98}, []byte{0xce, 0x99}, []byte{0xce, 0x9a}, []byte{0xce, 0x9b},
[]byte{0xce, 0x9c}, []byte{0xce, 0x9d}, []byte{0xce, 0x9e}, []byte{0xce, 0x9f}, []byte{0xce, 0xa0},
[]byte{0xce, 0xa1}, []byte{0xce, 0xa2}, []byte{0xce, 0xa3}, []byte{0xce, 0xa4}, []byte{0xce, 0xa5},
[]byte{0xce, 0xa6}, []byte{0xce, 0xa7}, []byte{0xce, 0xa8}, []byte{0xce, 0xa9}, []byte{0xce, 0xaa},
[]byte{0xce, 0xab}, []byte{0xce, 0xac}, []byte{0xce, 0xad}, []byte{0xce, 0xae}, []byte{0xce, 0xaf},
[]byte{0xce, 0xb0}, []byte{0xce, 0xb1}, []byte{0xce, 0xb2}, []byte{0xce, 0xb3}, []byte{0xce, 0xb4},
[]byte{0xce, 0xb5}, []byte{0xce, 0xb6}, []byte{0xce, 0xb7}, []byte{0xce, 0xb8}, []byte{0xce, 0xb9},
[]byte{0xce, 0xba}, []byte{0xce, 0xbb}, []byte{0xce, 0xbc}, []byte{0xce, 0xbd}, []byte{0xce, 0xbe},
[]byte{0xce, 0xbf}, []byte{0xcf, 0x80}, []byte{0xcf, 0x81}, []byte{0xcf, 0x82}, []byte{0xcf, 0x83},
[]byte{0xcf, 0x84}, []byte{0xcf, 0x85}, []byte{0xcf, 0x86}, []byte{0xcf, 0x87}, []byte{0xcf, 0x88},
[]byte{0xcf, 0x89}, []byte{0xcf, 0x8a}, []byte{0xcf, 0x8b}, []byte{0xcf, 0x8c}, []byte{0xcf, 0x8d},
[]byte{0xcf, 0x8e}, []byte{0xcf, 0x8f},
}
teletextCharsetG0Latin = &teletextCharset{
[]byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0xc2, 0xa3}, []byte{0x24}, []byte{0x25}, []byte{0x26},
[]byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d},
[]byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34},
[]byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b},
[]byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0x40}, []byte{0x41}, []byte{0x42},
[]byte{0x43}, []byte{0x44}, []byte{0x45}, []byte{0x46}, []byte{0x47}, []byte{0x48}, []byte{0x49},
[]byte{0x4a}, []byte{0x4b}, []byte{0x4c}, []byte{0x4d}, []byte{0x4e}, []byte{0x4f}, []byte{0x50},
[]byte{0x51}, []byte{0x52}, []byte{0x53}, []byte{0x54}, []byte{0x55}, []byte{0x56}, []byte{0x57},
[]byte{0x58}, []byte{0x59}, []byte{0x5a}, []byte{0xc2, 0xab}, []byte{0xc2, 0xbd}, []byte{0xc2, 0xbb},
[]byte{0x5e}, []byte{0x23}, []byte{0x2d}, []byte{0x61}, []byte{0x62}, []byte{0x63}, []byte{0x64},
[]byte{0x65}, []byte{0x66}, []byte{0x67}, []byte{0x68}, []byte{0x69}, []byte{0x6a}, []byte{0x6b},
[]byte{0x6c}, []byte{0x6d}, []byte{0x6e}, []byte{0x6f}, []byte{0x70}, []byte{0x71}, []byte{0x72},
[]byte{0x73}, []byte{0x74}, []byte{0x75}, []byte{0x76}, []byte{0x77}, []byte{0x78}, []byte{0x79},
[]byte{0x7a}, []byte{0xc2, 0xbc}, []byte{0xc2, 0xa6}, []byte{0xc2, 0xbe}, []byte{0xc3, 0xb7}, []byte{0x7f},
}
// TODO Add
teletextCharsetG0Arabic = teletextCharsetG0Latin
teletextCharsetG0Hebrew = teletextCharsetG0Latin
)
// Teletext G2 charsets
var (
teletextCharsetG2Latin = &teletextCharset{
[]byte{0x20}, []byte{0xc2, 0xa1}, []byte{0xc2, 0xa2}, []byte{0xc2, 0xa3}, []byte{0x24},
[]byte{0xc2, 0xa5}, []byte{0x23}, []byte{0xc2, 0xa7}, []byte{0xc2, 0xa4}, []byte{0xe2, 0x80, 0x98},
[]byte{0xe2, 0x80, 0x9c}, []byte{0xc2, 0xab}, []byte{0xe2, 0x86, 0x90}, []byte{0xe2, 0x86, 0x91},
[]byte{0xe2, 0x86, 0x92}, []byte{0xe2, 0x86, 0x93}, []byte{0xc2, 0xb0}, []byte{0xc2, 0xb1},
[]byte{0xc2, 0xb2}, []byte{0xc2, 0xb3}, []byte{0xc3, 0x97}, []byte{0xc2, 0xb5}, []byte{0xc2, 0xb6},
[]byte{0xc2, 0xb7}, []byte{0xc3, 0xb7}, []byte{0xe2, 0x80, 0x99}, []byte{0xe2, 0x80, 0x9d},
[]byte{0xc2, 0xbb}, []byte{0xc2, 0xbc}, []byte{0xc2, 0xbd}, []byte{0xc2, 0xbe}, []byte{0xc2, 0xbf},
[]byte{0x20}, []byte{0xcc, 0x80}, []byte{0xcc, 0x81}, []byte{0xcc, 0x82}, []byte{0xcc, 0x83},
[]byte{0xcc, 0x84}, []byte{0xcc, 0x86}, []byte{0xcc, 0x87}, []byte{0xcc, 0x88}, []byte{0x00},
[]byte{0xcc, 0x8a}, []byte{0xcc, 0xa7}, []byte{0x5f}, []byte{0xcc, 0x8b}, []byte{0xcc, 0xa8},
[]byte{0xcc, 0x8c}, []byte{0xe2, 0x80, 0x95}, []byte{0xc2, 0xb9}, []byte{0xc2, 0xae}, []byte{0xc2, 0xa9},
[]byte{0xe2, 0x84, 0xa2}, []byte{0xe2, 0x99, 0xaa}, []byte{0xe2, 0x82, 0xac}, []byte{0xe2, 0x80, 0xb0},
[]byte{0xce, 0xb1}, []byte{0x00}, []byte{0x00}, []byte{0x00}, []byte{0xe2, 0x85, 0x9b},
[]byte{0xe2, 0x85, 0x9c}, []byte{0xe2, 0x85, 0x9d}, []byte{0xe2, 0x85, 0x9e}, []byte{0xce, 0xa9},
[]byte{0xc3, 0x86}, []byte{0xc4, 0x90}, []byte{0xc2, 0xaa}, []byte{0xc4, 0xa6}, []byte{0x00},
[]byte{0xc4, 0xb2}, []byte{0xc4, 0xbf}, []byte{0xc5, 0x81}, []byte{0xc3, 0x98}, []byte{0xc5, 0x92},
[]byte{0xc2, 0xba}, []byte{0xc3, 0x9e}, []byte{0xc5, 0xa6}, []byte{0xc5, 0x8a}, []byte{0xc5, 0x89},
[]byte{0xc4, 0xb8}, []byte{0xc3, 0xa6}, []byte{0xc4, 0x91}, []byte{0xc3, 0xb0}, []byte{0xc4, 0xa7},
[]byte{0xc4, 0xb1}, []byte{0xc4, 0xb3}, []byte{0xc5, 0x80}, []byte{0xc5, 0x82}, []byte{0xc3, 0xb8},
[]byte{0xc5, 0x93}, []byte{0xc3, 0x9f}, []byte{0xc3, 0xbe}, []byte{0xc5, 0xa7}, []byte{0xc5, 0x8b},
[]byte{0x20},
}
// TODO Add
teletextCharsetG2Arabic = teletextCharsetG2Latin
teletextCharsetG2Cyrillic = teletextCharsetG2Latin
teletextCharsetG2Greek = teletextCharsetG2Latin
)
var teletextNationalSubsetCharactersPositionInG0 = [13]uint8{0x03, 0x04, 0x20, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x5b, 0x5c, 0x5d, 0x5e}
// Teletext national subsets
var (
teletextNationalSubsetCzechSlovak = &teletextNationalSubset{
[]byte{0x23}, []byte{0xc5, 0xaf}, []byte{0xc4, 0x8d}, []byte{0xc5, 0xa5}, []byte{0xc5, 0xbe},
[]byte{0xc3, 0xbd}, []byte{0xc3, 0xad}, []byte{0xc5, 0x99}, []byte{0xc3, 0xa9}, []byte{0xc3, 0xa1},
[]byte{0xc4, 0x9b}, []byte{0xc3, 0xba}, []byte{0xc5, 0xa1},
}
teletextNationalSubsetEnglish = &teletextNationalSubset{
[]byte{0xc2, 0xa3}, []byte{0x24}, []byte{0x40}, []byte{0xc2, 0xab}, []byte{0xc2, 0xbd}, []byte{0xc2, 0xbb},
[]byte{0x5e}, []byte{0x23}, []byte{0x2d}, []byte{0xc2, 0xbc}, []byte{0xc2, 0xa6}, []byte{0xc2, 0xbe},
[]byte{0xc3, 0xb7},
}
teletextNationalSubsetEstonian = &teletextNationalSubset{
[]byte{0x23}, []byte{0xc3, 0xb5}, []byte{0xc5, 0xa0}, []byte{0xc3, 0x84}, []byte{0xc3, 0x96},
[]byte{0xc5, 0xbe}, []byte{0xc3, 0x9c}, []byte{0xc3, 0x95}, []byte{0xc5, 0xa1}, []byte{0xc3, 0xa4},
[]byte{0xc3, 0xb6}, []byte{0xc5, 0xbe}, []byte{0xc3, 0xbc},
}
teletextNationalSubsetFrench = &teletextNationalSubset{
[]byte{0xc3, 0xa9}, []byte{0xc3, 0xaf}, []byte{0xc3, 0xa0}, []byte{0xc3, 0xab}, []byte{0xc3, 0xaa},
[]byte{0xc3, 0xb9}, []byte{0xc3, 0xae}, []byte{0x23}, []byte{0xc3, 0xa8}, []byte{0xc3, 0xa2},
[]byte{0xc3, 0xb4}, []byte{0xc3, 0xbb}, []byte{0xc3, 0xa7},
}
teletextNationalSubsetGerman = &teletextNationalSubset{
[]byte{0x23}, []byte{0x24}, []byte{0xc2, 0xa7}, []byte{0xc3, 0x84}, []byte{0xc3, 0x96}, []byte{0xc3, 0x9c},
[]byte{0x5e}, []byte{0x5f}, []byte{0xc2, 0xb0}, []byte{0xc3, 0xa4}, []byte{0xc3, 0xb6}, []byte{0xc3, 0xbc},
[]byte{0xc3, 0x9f},
}
teletextNationalSubsetItalian = &teletextNationalSubset{
[]byte{0xc2, 0xa3}, []byte{0x24}, []byte{0xc3, 0xa9}, []byte{0xc2, 0xb0}, []byte{0xc3, 0xa7},
[]byte{0xc2, 0xbb}, []byte{0x5e}, []byte{0x23}, []byte{0xc3, 0xb9}, []byte{0xc3, 0xa0}, []byte{0xc3, 0xb2},
[]byte{0xc3, 0xa8}, []byte{0xc3, 0xac},
}
teletextNationalSubsetLettishLithuanian = &teletextNationalSubset{
[]byte{0x23}, []byte{0x24}, []byte{0xc5, 0xa0}, []byte{0xc4, 0x97}, []byte{0xc4, 0x99}, []byte{0xc5, 0xbd},
[]byte{0xc4, 0x8d}, []byte{0xc5, 0xab}, []byte{0xc5, 0xa1}, []byte{0xc4, 0x85}, []byte{0xc5, 0xb3},
[]byte{0xc5, 0xbe}, []byte{0xc4, 0xaf},
}
teletextNationalSubsetPolish = &teletextNationalSubset{
[]byte{0x23}, []byte{0xc5, 0x84}, []byte{0xc4, 0x85}, []byte{0xc5, 0xbb}, []byte{0xc5, 0x9a},
[]byte{0xc5, 0x81}, []byte{0xc4, 0x87}, []byte{0xc3, 0xb3}, []byte{0xc4, 0x99}, []byte{0xc5, 0xbc},
[]byte{0xc5, 0x9b}, []byte{0xc5, 0x82}, []byte{0xc5, 0xba},
}
teletextNationalSubsetPortugueseSpanish = &teletextNationalSubset{
[]byte{0xc3, 0xa7}, []byte{0x24}, []byte{0xc2, 0xa1}, []byte{0xc3, 0xa1}, []byte{0xc3, 0xa9},
[]byte{0xc3, 0xad}, []byte{0xc3, 0xb3}, []byte{0xc3, 0xba}, []byte{0xc2, 0xbf}, []byte{0xc3, 0xbc},
[]byte{0xc3, 0xb1}, []byte{0xc3, 0xa8}, []byte{0xc3, 0xa0},
}
teletextNationalSubsetRomanian = &teletextNationalSubset{
[]byte{0x23}, []byte{0xc2, 0xa4}, []byte{0xc5, 0xa2}, []byte{0xc3, 0x82}, []byte{0xc5, 0x9e},
[]byte{0xc4, 0x82}, []byte{0xc3, 0x8e}, []byte{0xc4, 0xb1}, []byte{0xc5, 0xa3}, []byte{0xc3, 0xa2},
[]byte{0xc5, 0x9f}, []byte{0xc4, 0x83}, []byte{0xc3, 0xae},
}
teletextNationalSubsetSerbianCroatianSlovenian = &teletextNationalSubset{
[]byte{0x23}, []byte{0xc3, 0x8b}, []byte{0xc4, 0x8c}, []byte{0xc4, 0x86}, []byte{0xc5, 0xbd},
[]byte{0xc4, 0x90}, []byte{0xc5, 0xa0}, []byte{0xc3, 0xab}, []byte{0xc4, 0x8d}, []byte{0xc4, 0x87},
[]byte{0xc5, 0xbe}, []byte{0xc4, 0x91}, []byte{0xc5, 0xa1},
}
teletextNationalSubsetSwedishFinnishHungarian = &teletextNationalSubset{
[]byte{0x23}, []byte{0xc2, 0xa4}, []byte{0xc3, 0x89}, []byte{0xc3, 0x84}, []byte{0xc3, 0x96},
[]byte{0xc3, 0x85}, []byte{0xc3, 0x9c}, []byte{0x5f}, []byte{0xc3, 0xa9}, []byte{0xc3, 0xa4},
[]byte{0xc3, 0xb6}, []byte{0xc3, 0xa5}, []byte{0xc3, 0xbc},
}
teletextNationalSubsetTurkish = &teletextNationalSubset{
[]byte{0x54}, []byte{0xc4, 0x9f}, []byte{0xc4, 0xb0}, []byte{0xc5, 0x9e}, []byte{0xc3, 0x96},
[]byte{0xc3, 0x87}, []byte{0xc3, 0x9c}, []byte{0xc4, 0x9e}, []byte{0xc4, 0xb1}, []byte{0xc5, 0x9f},
[]byte{0xc3, 0xb6}, []byte{0xc3, 0xa7}, []byte{0xc3, 0xbc},
}
)
// Teletext PES data types
const (
teletextPESDataTypeEBU = "EBU"
teletextPESDataTypeUnknown = "unknown"
)
func teletextPESDataType(dataIdentifier uint8) string {
switch {
case dataIdentifier >= 0x10 && dataIdentifier <= 0x1f:
return teletextPESDataTypeEBU
}
return teletextPESDataTypeUnknown
}
// Teletext PES data unit ids
const (
teletextPESDataUnitIDEBUNonSubtitleData = 0x2
teletextPESDataUnitIDEBUSubtitleData = 0x3
teletextPESDataUnitIDStuffing = 0xff
)
// TeletextOptions represents teletext options
type TeletextOptions struct {
Page int
PID int
}
// ReadFromTeletext parses a teletext content
// http://www.etsi.org/deliver/etsi_en/300400_300499/300472/01.03.01_60/en_300472v010301p.pdf
// http://www.etsi.org/deliver/etsi_i_ets/300700_300799/300706/01_60/ets_300706e01p.pdf
// TODO Update README
// TODO Add tests
func ReadFromTeletext(r io.Reader, o TeletextOptions) (s *Subtitles, err error) {
// Init
s = &Subtitles{}
var dmx = astits.NewDemuxer(context.Background(), r)
// Get the teletext PID
var pid uint16
if pid, err = teletextPID(dmx, o); err != nil {
if err != ErrNoValidTeletextPID {
err = fmt.Errorf("astisub: getting teletext PID failed: %w", err)
}
return
}
// Create character decoder
cd := newTeletextCharacterDecoder()
// Create page buffer
b := newTeletextPageBuffer(o.Page, cd)
// Loop in data
var firstTime, lastTime time.Time
var d *astits.DemuxerData
var ps []*teletextPage
for {
// Fetch next data
if d, err = dmx.NextData(); err != nil {
if err == astits.ErrNoMorePackets {
err = nil
break
}
err = fmt.Errorf("astisub: fetching next data failed: %w", err)
return
}
// We only parse PES data
if d.PES == nil {
continue
}
// This data is not of interest to us
if d.PID != pid || d.PES.Header.StreamID != astits.StreamIDPrivateStream1 {
continue
}
// Get time
t := teletextDataTime(d)
if t.IsZero() {
continue
}
// First and last time
if firstTime.IsZero() || firstTime.After(t) {
firstTime = t
}
if lastTime.IsZero() || lastTime.Before(t) {
lastTime = t
}
// Append pages
ps = append(ps, b.process(d.PES, t)...)
}
// Dump buffer
ps = append(ps, b.dump(lastTime)...)
// Parse pages
for _, p := range ps {
p.parse(s, cd, firstTime)
}
return
}
// TODO Add tests
func teletextDataTime(d *astits.DemuxerData) time.Time {
if d.PES.Header != nil && d.PES.Header.OptionalHeader != nil && d.PES.Header.OptionalHeader.PTS != nil {
return d.PES.Header.OptionalHeader.PTS.Time()
} else if d.FirstPacket != nil && d.FirstPacket.AdaptationField != nil && d.FirstPacket.AdaptationField.PCR != nil {
return d.FirstPacket.AdaptationField.PCR.Time()
}
return time.Time{}
}
// If the PID teletext option is not indicated, it will walk through the ts data until it reaches a PMT packet to
// detect the first valid teletext PID
// TODO Add tests
func teletextPID(dmx *astits.Demuxer, o TeletextOptions) (pid uint16, err error) {
// PID is in the options
if o.PID > 0 {
pid = uint16(o.PID)
return
}
// Loop in data
var d *astits.DemuxerData
for {
// Fetch next data
if d, err = dmx.NextData(); err != nil {
if err == astits.ErrNoMorePackets {
err = ErrNoValidTeletextPID
return
}
err = fmt.Errorf("astisub: fetching next data failed: %w", err)
return
}
// PMT data
if d.PMT != nil {
// Retrieve valid teletext PIDs
var pids []uint16
for _, s := range d.PMT.ElementaryStreams {
for _, dsc := range s.ElementaryStreamDescriptors {
if dsc.Tag == astits.DescriptorTagTeletext || dsc.Tag == astits.DescriptorTagVBITeletext {
pids = append(pids, s.ElementaryPID)
}
}
}
// No valid teletext PIDs
if len(pids) == 0 {
err = ErrNoValidTeletextPID
return
}
// Set pid
pid = pids[0]
log.Printf("astisub: no teletext pid specified, using pid %d", pid)
// Rewind
if _, err = dmx.Rewind(); err != nil {
err = fmt.Errorf("astisub: rewinding failed: %w", err)
return
}
return
}
}
}
type teletextPageBuffer struct {
cd *teletextCharacterDecoder
currentPage *teletextPage
donePages []*teletextPage
magazineNumber uint8
pageNumber int
receiving bool
}
func newTeletextPageBuffer(page int, cd *teletextCharacterDecoder) *teletextPageBuffer {
return &teletextPageBuffer{
cd: cd,
magazineNumber: uint8(page / 100),
pageNumber: page % 100,
}
}
// TODO Add tests
func (b *teletextPageBuffer) dump(lastTime time.Time) (ps []*teletextPage) {
if b.currentPage != nil {
b.currentPage.end = lastTime
ps = []*teletextPage{b.currentPage}
}
return
}
// TODO Add tests
func (b *teletextPageBuffer) process(d *astits.PESData, t time.Time) (ps []*teletextPage) {
// Data identifier
var offset int
dataIdentifier := uint8(d.Data[offset])
offset += 1
// Check data type
if teletextPESDataType(dataIdentifier) != teletextPESDataTypeEBU {
return
}
// Loop through data units
for offset < len(d.Data) {
// ID
id := uint8(d.Data[offset])
offset += 1
// Length
length := uint8(d.Data[offset])
offset += 1
// Offset end
offsetEnd := offset + int(length)
if offsetEnd > len(d.Data) {
break
}
// Parse data unit
b.parseDataUnit(d.Data[offset:offsetEnd], id, t)
// Seek to end of data unit
offset = offsetEnd
}
// Dump buffer
ps = b.donePages
b.donePages = []*teletextPage(nil)
return ps
}
// TODO Add tests
func (b *teletextPageBuffer) parseDataUnit(i []byte, id uint8, t time.Time) {
// Check id
if id != teletextPESDataUnitIDEBUSubtitleData {
return
}
// Field parity: i[0]&0x20 > 0
// Line offset: uint8(i[0] & 0x1f)
// Framing code
framingCode := uint8(i[1])
// Check framing code
if framingCode != 0xe4 {
return
}
// Magazine number and packet number
h1, ok := astikit.ByteHamming84Decode(i[2])
if !ok {
return
}
h2, ok := astikit.ByteHamming84Decode(i[3])
if !ok {
return
}
h := h2<<4 | h1
magazineNumber := h & 0x7
if magazineNumber == 0 {
magazineNumber = 8
}
packetNumber := h >> 3
// Parse packet
b.parsePacket(i[4:], magazineNumber, packetNumber, t)
}
// TODO Add tests
func (b *teletextPageBuffer) parsePacket(i []byte, magazineNumber, packetNumber uint8, t time.Time) {
if packetNumber == 0 {
b.parsePacketHeader(i, magazineNumber, t)
} else if b.receiving && magazineNumber == b.magazineNumber && (packetNumber >= 1 && packetNumber <= 25) {
b.parsePacketData(i, packetNumber)
} else {
// Designation code
designationCode, ok := astikit.ByteHamming84Decode(i[0])
if !ok {
return
}
// Parse packet
if b.receiving && magazineNumber == b.magazineNumber && packetNumber == 26 {
// TODO Implement
} else if b.receiving && magazineNumber == b.magazineNumber && packetNumber == 28 {
b.parsePacket28And29(i[1:], packetNumber, designationCode)
} else if magazineNumber == b.magazineNumber && packetNumber == 29 {
b.parsePacket28And29(i[1:], packetNumber, designationCode)
} else if magazineNumber == 8 && packetNumber == 30 {
b.parsePacket30(i, designationCode)
}
}
}
// TODO Add tests
func (b *teletextPageBuffer) parsePacketHeader(i []byte, magazineNumber uint8, t time.Time) (transmissionDone bool) {
// Page number units
pageNumberUnits, ok := astikit.ByteHamming84Decode(i[0])
if !ok {
return
}
// Page number tens
pageNumberTens, ok := astikit.ByteHamming84Decode(i[1])
if !ok {
return
}
pageNumber := int(pageNumberTens)*10 + int(pageNumberUnits)
// 0xff is a reserved page number value
if pageNumberTens == 0xf && pageNumberUnits == 0xf {
return
}
// Update magazine and page number
if b.magazineNumber == 0 && b.pageNumber == 0 {
// C6
controlBits, ok := astikit.ByteHamming84Decode(i[5])
if !ok {
return
}
subtitleFlag := controlBits&0x8 > 0
// This is a subtitle page
if subtitleFlag {
b.magazineNumber = magazineNumber
b.pageNumber = pageNumber
log.Printf("astisub: no teletext page specified, using page %d%.2d", b.magazineNumber, b.pageNumber)
}
}
// C11 --> C14
controlBits, ok := astikit.ByteHamming84Decode(i[7])
if !ok {
return
}
magazineSerial := controlBits&0x1 > 0
charsetCode := controlBits >> 1
// Page transmission is done
if b.receiving && ((magazineSerial && pageNumber != b.pageNumber) ||
(!magazineSerial && pageNumber != b.pageNumber && magazineNumber == b.magazineNumber)) {
b.receiving = false
return
}
// Invalid magazine or page number
if pageNumber != b.pageNumber || magazineNumber != b.magazineNumber {
return
}
// Now that we know when the previous page ends we can add it to the done slice
if b.currentPage != nil {
b.currentPage.end = t
b.donePages = append(b.donePages, b.currentPage)
}
// Reset
b.receiving = true
b.currentPage = newTeletextPage(charsetCode, t)
return
}
// TODO Add tests
func (b *teletextPageBuffer) parsePacketData(i []byte, packetNumber uint8) {
// Make sure the map is initialized
if _, ok := b.currentPage.data[packetNumber]; !ok {
b.currentPage.data[packetNumber] = make([]byte, 40)
}
// Loop through input
b.currentPage.rows = append(b.currentPage.rows, int(packetNumber))
for idx := uint8(0); idx < 40; idx++ {
v, ok := astikit.ByteParity(bits.Reverse8(i[idx]))
if !ok {
v = 0
}
b.currentPage.data[packetNumber][idx] = v
}
}
// TODO Add tests
func (b *teletextPageBuffer) parsePacket28And29(i []byte, packetNumber, designationCode uint8) {
// Invalid designation code
if designationCode != 0 && designationCode != 4 {
return
}
// Triplet 1
// TODO triplet1 should be the results of hamming 24/18 decoding
triplet1 := uint32(i[2])<<16 | uint32(i[1])<<8 | uint32(i[0])
// We only process x/28 format 1
if packetNumber == 28 && triplet1&0xf > 0 {
return
}
// Update character decoder
if packetNumber == 28 {
b.cd.setTripletX28(triplet1)
} else {
b.cd.setTripletM29(triplet1)
}
}
// TODO Add tests
func (b *teletextPageBuffer) parsePacket30(i []byte, designationCode uint8) {
// Switch on designation code to determine format
switch designationCode {
case 0, 1:
b.parsePacket30Format1(i)
case 2, 3:
b.parsePacket30Format2(i)
}
}
func (b *teletextPageBuffer) parsePacket30Format1(i []byte) {
// TODO Implement
}
func (b *teletextPageBuffer) parsePacket30Format2(i []byte) {
// TODO Implement
}
type teletextCharacterDecoder struct {
c teletextCharset
lastPageCharsetCode *uint8
tripletM29 *uint32
tripletX28 *uint32
}
func newTeletextCharacterDecoder() *teletextCharacterDecoder {
return &teletextCharacterDecoder{}
}
// TODO Add tests
func (d *teletextCharacterDecoder) setTripletM29(i uint32) {
if *d.tripletM29 != i {
d.tripletM29 = astikit.UInt32Ptr(i)
d.updateCharset(d.lastPageCharsetCode, true)
}
}
// TODO Add tests
func (d *teletextCharacterDecoder) setTripletX28(i uint32) {
if *d.tripletX28 != i {
d.tripletX28 = astikit.UInt32Ptr(i)
d.updateCharset(d.lastPageCharsetCode, true)
}
}
// TODO Add tests
func (d *teletextCharacterDecoder) decode(i byte) []byte {
if i < 0x20 {
return []byte{}
}
return d.c[i-0x20]
}
// TODO Add tests
func (d *teletextCharacterDecoder) updateCharset(pageCharsetCode *uint8, force bool) {
// Charset is up to date
if d.lastPageCharsetCode != nil && *pageCharsetCode == *d.lastPageCharsetCode && !force {
return
}
d.lastPageCharsetCode = pageCharsetCode
// Get triplet
var triplet uint32
if d.tripletX28 != nil {
triplet = *d.tripletX28
} else if d.tripletM29 != nil {
triplet = *d.tripletM29
}
// Get charsets
d.c = *teletextCharsetG0Latin
var nationalOptionSubset *teletextNationalSubset
if v1, ok := teletextCharsets[uint8((triplet&0x3f80)>>10)]; ok {
if v2, ok := v1[*pageCharsetCode]; ok {
d.c = *v2.g0
nationalOptionSubset = v2.national
}
}
// Update g0 with national option subset
if nationalOptionSubset != nil {
for k, v := range nationalOptionSubset {
d.c[teletextNationalSubsetCharactersPositionInG0[k]] = v
}
}
}
type teletextPage struct {
charsetCode uint8
data map[uint8][]byte
end time.Time
rows []int
start time.Time
}
func newTeletextPage(charsetCode uint8, start time.Time) *teletextPage {
return &teletextPage{
charsetCode: charsetCode,
data: make(map[uint8][]byte),
start: start,
}
}
func (p *teletextPage) parse(s *Subtitles, d *teletextCharacterDecoder, firstTime time.Time) {
// Update charset
d.updateCharset(astikit.UInt8Ptr(p.charsetCode), false)
// No data
if len(p.data) == 0 {
return
}
// Order rows
sort.Ints(p.rows)
// Create item
i := &Item{
EndAt: p.end.Sub(firstTime),
StartAt: p.start.Sub(firstTime),
}
// Loop through rows
for _, idxRow := range p.rows {
parseTeletextRow(i, d, nil, p.data[uint8(idxRow)])
}
// Append item
s.Items = append(s.Items, i)
}
type decoder interface {
decode(i byte) []byte
}
type styler interface {
hasBeenSet() bool
hasChanged(s *StyleAttributes) bool
parseSpacingAttribute(i byte)
propagateStyleAttributes(s *StyleAttributes)
update(sa *StyleAttributes)
}
func parseTeletextRow(i *Item, d decoder, fs func() styler, row []byte) {
// Loop through columns
var l = Line{}
var li = LineItem{InlineStyle: &StyleAttributes{}}
var started bool
var s styler
for _, v := range row {
// Create specific styler
if fs != nil {
s = fs()
}
// Get spacing attributes
var color *Color
var doubleHeight, doubleSize, doubleWidth *bool
switch v {
case 0x0:
color = ColorBlack
case 0x1:
color = ColorRed
case 0x2:
color = ColorGreen
case 0x3:
color = ColorYellow
case 0x4:
color = ColorBlue
case 0x5:
color = ColorMagenta
case 0x6:
color = ColorCyan
case 0x7:
color = ColorWhite
case 0xa:
started = false
case 0xb:
started = true
case 0xc:
doubleHeight = astikit.BoolPtr(false)
doubleSize = astikit.BoolPtr(false)
doubleWidth = astikit.BoolPtr(false)
case 0xd:
doubleHeight = astikit.BoolPtr(true)
case 0xe:
doubleWidth = astikit.BoolPtr(true)
case 0xf:
doubleSize = astikit.BoolPtr(true)
default:
if s != nil {
s.parseSpacingAttribute(v)
}
}
// Style has been set
if color != nil || doubleHeight != nil || doubleSize != nil || doubleWidth != nil || (s != nil && s.hasBeenSet()) {
// Style has changed
if color != li.InlineStyle.TeletextColor || doubleHeight != li.InlineStyle.TeletextDoubleHeight ||
doubleSize != li.InlineStyle.TeletextDoubleSize || doubleWidth != li.InlineStyle.TeletextDoubleWidth ||
(s != nil && s.hasChanged(li.InlineStyle)) {
// Line has started
if started {
// Append line item
appendTeletextLineItem(&l, li, s)
// Create new line item
sa := &StyleAttributes{}
*sa = *li.InlineStyle
li = LineItem{InlineStyle: sa}
}
// Update style attributes
if color != nil && color != li.InlineStyle.TeletextColor {
li.InlineStyle.TeletextColor = color
}
if doubleHeight != nil && doubleHeight != li.InlineStyle.TeletextDoubleHeight {
li.InlineStyle.TeletextDoubleHeight = doubleHeight
}
if doubleSize != nil && doubleSize != li.InlineStyle.TeletextDoubleSize {
li.InlineStyle.TeletextDoubleSize = doubleSize
}
if doubleWidth != nil && doubleWidth != li.InlineStyle.TeletextDoubleWidth {
li.InlineStyle.TeletextDoubleWidth = doubleWidth
}
if s != nil {
s.update(li.InlineStyle)
}
}
} else if started {
// Append text
li.Text += string(d.decode(v))
}
}
// Append line item
appendTeletextLineItem(&l, li, s)
// Append line
if len(l.Items) > 0 {
i.Lines = append(i.Lines, l)
}
}
func appendTeletextLineItem(l *Line, li LineItem, s styler) {
// There's some text
if len(strings.TrimSpace(li.Text)) > 0 {
// Make sure inline style exists
if li.InlineStyle == nil {
li.InlineStyle = &StyleAttributes{}
}
// Get number of spaces before
li.InlineStyle.TeletextSpacesBefore = astikit.IntPtr(0)
for _, c := range li.Text {
if c == ' ' {
*li.InlineStyle.TeletextSpacesBefore++
} else {
break
}
}
// Get number of spaces after
li.InlineStyle.TeletextSpacesAfter = astikit.IntPtr(0)
for idx := len(li.Text) - 1; idx >= 0; idx-- {
if li.Text[idx] == ' ' {
*li.InlineStyle.TeletextSpacesAfter++
} else {
break
}
}
// Propagate style attributes
li.InlineStyle.propagateTeletextAttributes()
if s != nil {
s.propagateStyleAttributes(li.InlineStyle)
}
// Append line item
li.Text = strings.TrimSpace(li.Text)
l.Items = append(l.Items, li)
}
}

686
vendor/github.com/asticode/go-astisub/ttml.go generated vendored Normal file
View file

@ -0,0 +1,686 @@
package astisub
import (
"encoding/xml"
"fmt"
"io"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/asticode/go-astikit"
)
// https://www.w3.org/TR/ttaf1-dfxp/
// http://www.skynav.com:8080/ttv/check
// https://www.speechpad.com/captions/ttml
// TTML languages
const (
ttmlLanguageChinese = "zh"
ttmlLanguageEnglish = "en"
ttmlLanguageJapanese = "ja"
ttmlLanguageFrench = "fr"
ttmlLanguageNorwegian = "no"
)
// TTML language mapping
var ttmlLanguageMapping = astikit.NewBiMap().
Set(ttmlLanguageChinese, LanguageChinese).
Set(ttmlLanguageEnglish, LanguageEnglish).
Set(ttmlLanguageFrench, LanguageFrench).
Set(ttmlLanguageJapanese, LanguageJapanese).
Set(ttmlLanguageNorwegian, LanguageNorwegian)
// TTML Clock Time Frames and Offset Time
var (
ttmlRegexpClockTimeFrames = regexp.MustCompile(`\:[\d]+$`)
ttmlRegexpOffsetTime = regexp.MustCompile(`^(\d+(\.\d+)?)(h|m|s|ms|f|t)$`)
)
// TTMLIn represents an input TTML that must be unmarshaled
// We split it from the output TTML as we can't add strict namespace without breaking retrocompatibility
type TTMLIn struct {
Framerate int `xml:"frameRate,attr"`
Lang string `xml:"lang,attr"`
Metadata TTMLInMetadata `xml:"head>metadata"`
Regions []TTMLInRegion `xml:"head>layout>region"`
Styles []TTMLInStyle `xml:"head>styling>style"`
Subtitles []TTMLInSubtitle `xml:"body>div>p"`
Tickrate int `xml:"tickRate,attr"`
XMLName xml.Name `xml:"tt"`
}
// metadata returns the Metadata of the TTML
func (t TTMLIn) metadata() (m *Metadata) {
m = &Metadata{
Framerate: t.Framerate,
Title: t.Metadata.Title,
TTMLCopyright: t.Metadata.Copyright,
}
if v, ok := ttmlLanguageMapping.Get(astikit.StrPad(t.Lang, ' ', 2, astikit.PadCut)); ok {
m.Language = v.(string)
}
return
}
// TTMLInMetadata represents an input TTML Metadata
type TTMLInMetadata struct {
Copyright string `xml:"copyright"`
Title string `xml:"title"`
}
// TTMLInStyleAttributes represents input TTML style attributes
type TTMLInStyleAttributes struct {
BackgroundColor *string `xml:"backgroundColor,attr,omitempty"`
Color *string `xml:"color,attr,omitempty"`
Direction *string `xml:"direction,attr,omitempty"`
Display *string `xml:"display,attr,omitempty"`
DisplayAlign *string `xml:"displayAlign,attr,omitempty"`
Extent *string `xml:"extent,attr,omitempty"`
FontFamily *string `xml:"fontFamily,attr,omitempty"`
FontSize *string `xml:"fontSize,attr,omitempty"`
FontStyle *string `xml:"fontStyle,attr,omitempty"`
FontWeight *string `xml:"fontWeight,attr,omitempty"`
LineHeight *string `xml:"lineHeight,attr,omitempty"`
Opacity *string `xml:"opacity,attr,omitempty"`
Origin *string `xml:"origin,attr,omitempty"`
Overflow *string `xml:"overflow,attr,omitempty"`
Padding *string `xml:"padding,attr,omitempty"`
ShowBackground *string `xml:"showBackground,attr,omitempty"`
TextAlign *string `xml:"textAlign,attr,omitempty"`
TextDecoration *string `xml:"textDecoration,attr,omitempty"`
TextOutline *string `xml:"textOutline,attr,omitempty"`
UnicodeBidi *string `xml:"unicodeBidi,attr,omitempty"`
Visibility *string `xml:"visibility,attr,omitempty"`
WrapOption *string `xml:"wrapOption,attr,omitempty"`
WritingMode *string `xml:"writingMode,attr,omitempty"`
ZIndex *int `xml:"zIndex,attr,omitempty"`
}
// StyleAttributes converts TTMLInStyleAttributes into a StyleAttributes
func (s TTMLInStyleAttributes) styleAttributes() (o *StyleAttributes) {
o = &StyleAttributes{
TTMLBackgroundColor: s.BackgroundColor,
TTMLColor: s.Color,
TTMLDirection: s.Direction,
TTMLDisplay: s.Display,
TTMLDisplayAlign: s.DisplayAlign,
TTMLExtent: s.Extent,
TTMLFontFamily: s.FontFamily,
TTMLFontSize: s.FontSize,
TTMLFontStyle: s.FontStyle,
TTMLFontWeight: s.FontWeight,
TTMLLineHeight: s.LineHeight,
TTMLOpacity: s.Opacity,
TTMLOrigin: s.Origin,
TTMLOverflow: s.Overflow,
TTMLPadding: s.Padding,
TTMLShowBackground: s.ShowBackground,
TTMLTextAlign: s.TextAlign,
TTMLTextDecoration: s.TextDecoration,
TTMLTextOutline: s.TextOutline,
TTMLUnicodeBidi: s.UnicodeBidi,
TTMLVisibility: s.Visibility,
TTMLWrapOption: s.WrapOption,
TTMLWritingMode: s.WritingMode,
TTMLZIndex: s.ZIndex,
}
o.propagateTTMLAttributes()
return
}
// TTMLInHeader represents an input TTML header
type TTMLInHeader struct {
ID string `xml:"id,attr,omitempty"`
Style string `xml:"style,attr,omitempty"`
TTMLInStyleAttributes
}
// TTMLInRegion represents an input TTML region
type TTMLInRegion struct {
TTMLInHeader
XMLName xml.Name `xml:"region"`
}
// TTMLInStyle represents an input TTML style
type TTMLInStyle struct {
TTMLInHeader
XMLName xml.Name `xml:"style"`
}
// TTMLInSubtitle represents an input TTML subtitle
type TTMLInSubtitle struct {
Begin *TTMLInDuration `xml:"begin,attr,omitempty"`
End *TTMLInDuration `xml:"end,attr,omitempty"`
ID string `xml:"id,attr,omitempty"`
Items string `xml:",innerxml"` // We must store inner XML here since there's no tag to describe both any tag and chardata
Region string `xml:"region,attr,omitempty"`
Style string `xml:"style,attr,omitempty"`
TTMLInStyleAttributes
}
// TTMLInItems represents input TTML items
type TTMLInItems []TTMLInItem
// UnmarshalXML implements the XML unmarshaler interface
func (i *TTMLInItems) UnmarshalXML(d *xml.Decoder, start xml.StartElement) (err error) {
// Get next tokens
var t xml.Token
for {
// Get next token
if t, err = d.Token(); err != nil {
if err == io.EOF {
break
}
err = fmt.Errorf("astisub: getting next token failed: %w", err)
return
}
// Start element
if se, ok := t.(xml.StartElement); ok {
var e = TTMLInItem{}
if err = d.DecodeElement(&e, &se); err != nil {
err = fmt.Errorf("astisub: decoding xml.StartElement failed: %w", err)
return
}
*i = append(*i, e)
} else if b, ok := t.(xml.CharData); ok {
var str = strings.TrimSpace(string(b))
if len(str) > 0 {
*i = append(*i, TTMLInItem{Text: str})
}
}
}
return nil
}
// TTMLInItem represents an input TTML item
type TTMLInItem struct {
Style string `xml:"style,attr,omitempty"`
Text string `xml:",chardata"`
TTMLInStyleAttributes
XMLName xml.Name
}
// TTMLInDuration represents an input TTML duration
type TTMLInDuration struct {
d time.Duration
frames, framerate int // Framerate is in frame/s
ticks, tickrate int // Tickrate is in ticks/s
}
// UnmarshalText implements the TextUnmarshaler interface
// Possible formats are:
// - hh:mm:ss.mmm
// - hh:mm:ss:fff (fff being frames)
// - [ticks]t ([ticks] being the tick amount)
func (d *TTMLInDuration) UnmarshalText(i []byte) (err error) {
// Reset duration
d.d = time.Duration(0)
d.frames = 0
d.ticks = 0
// Check offset time
text := string(i)
if matches := ttmlRegexpOffsetTime.FindStringSubmatch(text); matches != nil {
// Parse value
var value float64
if value, err = strconv.ParseFloat(matches[1], 64); err != nil {
err = fmt.Errorf("astisub: failed to parse value %s", matches[1])
return
}
// Parse metric
metric := matches[3]
// Update duration
if metric == "t" {
d.ticks = int(value)
} else if metric == "f" {
d.frames = int(value)
} else {
// Get timebase
var timebase time.Duration
switch metric {
case "h":
timebase = time.Hour
case "m":
timebase = time.Minute
case "s":
timebase = time.Second
case "ms":
timebase = time.Millisecond
default:
err = fmt.Errorf("astisub: invalid metric %s", metric)
return
}
// Update duration
d.d = time.Duration(value * float64(timebase.Nanoseconds()))
}
return
}
// Extract clock time frames
if indexes := ttmlRegexpClockTimeFrames.FindStringIndex(text); indexes != nil {
// Parse frames
var s = text[indexes[0]+1 : indexes[1]]
if d.frames, err = strconv.Atoi(s); err != nil {
err = fmt.Errorf("astisub: atoi %s failed: %w", s, err)
return
}
// Update text
text = text[:indexes[0]] + ".000"
}
d.d, err = parseDuration(text, ".", 3)
return
}
// duration returns the input TTML Duration's time.Duration
func (d TTMLInDuration) duration() (o time.Duration) {
if d.ticks > 0 && d.tickrate > 0 {
return time.Duration(float64(d.ticks) * 1e9 / float64(d.tickrate))
}
o = d.d
if d.frames > 0 && d.framerate > 0 {
o += time.Duration(float64(d.frames) / float64(d.framerate) * float64(time.Second.Nanoseconds()))
}
return
}
// ReadFromTTML parses a .ttml content
func ReadFromTTML(i io.Reader) (o *Subtitles, err error) {
// Init
o = NewSubtitles()
// Unmarshal XML
var ttml TTMLIn
if err = xml.NewDecoder(i).Decode(&ttml); err != nil {
err = fmt.Errorf("astisub: xml decoding failed: %w", err)
return
}
// Add metadata
o.Metadata = ttml.metadata()
// Loop through styles
var parentStyles = make(map[string]*Style)
for _, ts := range ttml.Styles {
var s = &Style{
ID: ts.ID,
InlineStyle: ts.TTMLInStyleAttributes.styleAttributes(),
}
o.Styles[s.ID] = s
if len(ts.Style) > 0 {
parentStyles[ts.Style] = s
}
}
// Take care of parent styles
for id, s := range parentStyles {
if _, ok := o.Styles[id]; !ok {
err = fmt.Errorf("astisub: Style %s requested by style %s doesn't exist", id, s.ID)
return
}
s.Style = o.Styles[id]
}
// Loop through regions
for _, tr := range ttml.Regions {
var r = &Region{
ID: tr.ID,
InlineStyle: tr.TTMLInStyleAttributes.styleAttributes(),
}
if len(tr.Style) > 0 {
if _, ok := o.Styles[tr.Style]; !ok {
err = fmt.Errorf("astisub: Style %s requested by region %s doesn't exist", tr.Style, r.ID)
return
}
r.Style = o.Styles[tr.Style]
}
o.Regions[r.ID] = r
}
// Loop through subtitles
for _, ts := range ttml.Subtitles {
// Init item
ts.Begin.framerate = ttml.Framerate
ts.Begin.tickrate = ttml.Tickrate
ts.End.framerate = ttml.Framerate
ts.End.tickrate = ttml.Tickrate
var s = &Item{
EndAt: ts.End.duration(),
InlineStyle: ts.TTMLInStyleAttributes.styleAttributes(),
StartAt: ts.Begin.duration(),
}
// Add region
if len(ts.Region) > 0 {
if _, ok := o.Regions[ts.Region]; !ok {
err = fmt.Errorf("astisub: Region %s requested by subtitle between %s and %s doesn't exist", ts.Region, s.StartAt, s.EndAt)
return
}
s.Region = o.Regions[ts.Region]
}
// Add style
if len(ts.Style) > 0 {
if _, ok := o.Styles[ts.Style]; !ok {
err = fmt.Errorf("astisub: Style %s requested by subtitle between %s and %s doesn't exist", ts.Style, s.StartAt, s.EndAt)
return
}
s.Style = o.Styles[ts.Style]
}
// Unmarshal items
var items = TTMLInItems{}
if err = xml.Unmarshal([]byte("<span>"+ts.Items+"</span>"), &items); err != nil {
err = fmt.Errorf("astisub: unmarshaling items failed: %w", err)
return
}
// Loop through texts
var l = &Line{}
for _, tt := range items {
// New line specified with the "br" tag
if strings.ToLower(tt.XMLName.Local) == "br" {
s.Lines = append(s.Lines, *l)
l = &Line{}
continue
}
// New line decoded as a line break. This can happen if there's a "br" tag within the text since
// since the go xml unmarshaler will unmarshal a "br" tag as a line break if the field has the
// chardata xml tag.
for idx, li := range strings.Split(tt.Text, "\n") {
// New line
if idx > 0 {
s.Lines = append(s.Lines, *l)
l = &Line{}
}
// Init line item
var t = LineItem{
InlineStyle: tt.TTMLInStyleAttributes.styleAttributes(),
Text: strings.TrimSpace(li),
}
// Add style
if len(tt.Style) > 0 {
if _, ok := o.Styles[tt.Style]; !ok {
err = fmt.Errorf("astisub: Style %s requested by item with text %s doesn't exist", tt.Style, tt.Text)
return
}
t.Style = o.Styles[tt.Style]
}
// Append items
l.Items = append(l.Items, t)
}
}
s.Lines = append(s.Lines, *l)
// Append subtitle
o.Items = append(o.Items, s)
}
return
}
// TTMLOut represents an output TTML that must be marshaled
// We split it from the input TTML as this time we'll add strict namespaces
type TTMLOut struct {
Lang string `xml:"xml:lang,attr,omitempty"`
Metadata *TTMLOutMetadata `xml:"head>metadata,omitempty"`
Styles []TTMLOutStyle `xml:"head>styling>style,omitempty"` //!\\ Order is important! Keep Styling above Layout
Regions []TTMLOutRegion `xml:"head>layout>region,omitempty"`
Subtitles []TTMLOutSubtitle `xml:"body>div>p,omitempty"`
XMLName xml.Name `xml:"http://www.w3.org/ns/ttml tt"`
XMLNamespaceTTM string `xml:"xmlns:ttm,attr"`
XMLNamespaceTTS string `xml:"xmlns:tts,attr"`
}
// TTMLOutMetadata represents an output TTML Metadata
type TTMLOutMetadata struct {
Copyright string `xml:"ttm:copyright,omitempty"`
Title string `xml:"ttm:title,omitempty"`
}
// TTMLOutStyleAttributes represents output TTML style attributes
type TTMLOutStyleAttributes struct {
BackgroundColor *string `xml:"tts:backgroundColor,attr,omitempty"`
Color *string `xml:"tts:color,attr,omitempty"`
Direction *string `xml:"tts:direction,attr,omitempty"`
Display *string `xml:"tts:display,attr,omitempty"`
DisplayAlign *string `xml:"tts:displayAlign,attr,omitempty"`
Extent *string `xml:"tts:extent,attr,omitempty"`
FontFamily *string `xml:"tts:fontFamily,attr,omitempty"`
FontSize *string `xml:"tts:fontSize,attr,omitempty"`
FontStyle *string `xml:"tts:fontStyle,attr,omitempty"`
FontWeight *string `xml:"tts:fontWeight,attr,omitempty"`
LineHeight *string `xml:"tts:lineHeight,attr,omitempty"`
Opacity *string `xml:"tts:opacity,attr,omitempty"`
Origin *string `xml:"tts:origin,attr,omitempty"`
Overflow *string `xml:"tts:overflow,attr,omitempty"`
Padding *string `xml:"tts:padding,attr,omitempty"`
ShowBackground *string `xml:"tts:showBackground,attr,omitempty"`
TextAlign *string `xml:"tts:textAlign,attr,omitempty"`
TextDecoration *string `xml:"tts:textDecoration,attr,omitempty"`
TextOutline *string `xml:"tts:textOutline,attr,omitempty"`
UnicodeBidi *string `xml:"tts:unicodeBidi,attr,omitempty"`
Visibility *string `xml:"tts:visibility,attr,omitempty"`
WrapOption *string `xml:"tts:wrapOption,attr,omitempty"`
WritingMode *string `xml:"tts:writingMode,attr,omitempty"`
ZIndex *int `xml:"tts:zIndex,attr,omitempty"`
}
// ttmlOutStyleAttributesFromStyleAttributes converts StyleAttributes into a TTMLOutStyleAttributes
func ttmlOutStyleAttributesFromStyleAttributes(s *StyleAttributes) TTMLOutStyleAttributes {
if s == nil {
return TTMLOutStyleAttributes{}
}
return TTMLOutStyleAttributes{
BackgroundColor: s.TTMLBackgroundColor,
Color: s.TTMLColor,
Direction: s.TTMLDirection,
Display: s.TTMLDisplay,
DisplayAlign: s.TTMLDisplayAlign,
Extent: s.TTMLExtent,
FontFamily: s.TTMLFontFamily,
FontSize: s.TTMLFontSize,
FontStyle: s.TTMLFontStyle,
FontWeight: s.TTMLFontWeight,
LineHeight: s.TTMLLineHeight,
Opacity: s.TTMLOpacity,
Origin: s.TTMLOrigin,
Overflow: s.TTMLOverflow,
Padding: s.TTMLPadding,
ShowBackground: s.TTMLShowBackground,
TextAlign: s.TTMLTextAlign,
TextDecoration: s.TTMLTextDecoration,
TextOutline: s.TTMLTextOutline,
UnicodeBidi: s.TTMLUnicodeBidi,
Visibility: s.TTMLVisibility,
WrapOption: s.TTMLWrapOption,
WritingMode: s.TTMLWritingMode,
ZIndex: s.TTMLZIndex,
}
}
// TTMLOutHeader represents an output TTML header
type TTMLOutHeader struct {
ID string `xml:"xml:id,attr,omitempty"`
Style string `xml:"style,attr,omitempty"`
TTMLOutStyleAttributes
}
// TTMLOutRegion represents an output TTML region
type TTMLOutRegion struct {
TTMLOutHeader
XMLName xml.Name `xml:"region"`
}
// TTMLOutStyle represents an output TTML style
type TTMLOutStyle struct {
TTMLOutHeader
XMLName xml.Name `xml:"style"`
}
// TTMLOutSubtitle represents an output TTML subtitle
type TTMLOutSubtitle struct {
Begin TTMLOutDuration `xml:"begin,attr"`
End TTMLOutDuration `xml:"end,attr"`
ID string `xml:"id,attr,omitempty"`
Items []TTMLOutItem
Region string `xml:"region,attr,omitempty"`
Style string `xml:"style,attr,omitempty"`
TTMLOutStyleAttributes
}
// TTMLOutItem represents an output TTML Item
type TTMLOutItem struct {
Style string `xml:"style,attr,omitempty"`
Text string `xml:",chardata"`
TTMLOutStyleAttributes
XMLName xml.Name
}
// TTMLOutDuration represents an output TTML duration
type TTMLOutDuration time.Duration
// MarshalText implements the TextMarshaler interface
func (t TTMLOutDuration) MarshalText() ([]byte, error) {
return []byte(formatDuration(time.Duration(t), ".", 3)), nil
}
// WriteToTTML writes subtitles in .ttml format
func (s Subtitles) WriteToTTML(o io.Writer) (err error) {
// Do not write anything if no subtitles
if len(s.Items) == 0 {
return ErrNoSubtitlesToWrite
}
// Init TTML
var ttml = TTMLOut{
XMLNamespaceTTM: "http://www.w3.org/ns/ttml#metadata",
XMLNamespaceTTS: "http://www.w3.org/ns/ttml#styling",
}
// Add metadata
if s.Metadata != nil {
if v, ok := ttmlLanguageMapping.GetInverse(s.Metadata.Language); ok {
ttml.Lang = v.(string)
}
if len(s.Metadata.TTMLCopyright) > 0 || len(s.Metadata.Title) > 0 {
ttml.Metadata = &TTMLOutMetadata{
Copyright: s.Metadata.TTMLCopyright,
Title: s.Metadata.Title,
}
}
}
// Add regions
var k []string
for _, region := range s.Regions {
k = append(k, region.ID)
}
sort.Strings(k)
for _, id := range k {
var ttmlRegion = TTMLOutRegion{TTMLOutHeader: TTMLOutHeader{
ID: s.Regions[id].ID,
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(s.Regions[id].InlineStyle),
}}
if s.Regions[id].Style != nil {
ttmlRegion.Style = s.Regions[id].Style.ID
}
ttml.Regions = append(ttml.Regions, ttmlRegion)
}
// Add styles
k = []string{}
for _, style := range s.Styles {
k = append(k, style.ID)
}
sort.Strings(k)
for _, id := range k {
var ttmlStyle = TTMLOutStyle{TTMLOutHeader: TTMLOutHeader{
ID: s.Styles[id].ID,
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(s.Styles[id].InlineStyle),
}}
if s.Styles[id].Style != nil {
ttmlStyle.Style = s.Styles[id].Style.ID
}
ttml.Styles = append(ttml.Styles, ttmlStyle)
}
// Add items
for _, item := range s.Items {
// Init subtitle
var ttmlSubtitle = TTMLOutSubtitle{
Begin: TTMLOutDuration(item.StartAt),
End: TTMLOutDuration(item.EndAt),
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(item.InlineStyle),
}
// Add region
if item.Region != nil {
ttmlSubtitle.Region = item.Region.ID
}
// Add style
if item.Style != nil {
ttmlSubtitle.Style = item.Style.ID
}
// Add lines
for _, line := range item.Lines {
// Loop through line items
for idx, lineItem := range line.Items {
// Init ttml item
var ttmlItem = TTMLOutItem{
Text: lineItem.Text,
TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(lineItem.InlineStyle),
XMLName: xml.Name{Local: "span"},
}
// condition to avoid adding space as the last character.
if idx < len(line.Items)-1 {
ttmlItem.Text = ttmlItem.Text + " "
}
// Add style
if lineItem.Style != nil {
ttmlItem.Style = lineItem.Style.ID
}
// Add ttml item
ttmlSubtitle.Items = append(ttmlSubtitle.Items, ttmlItem)
}
// Add line break
ttmlSubtitle.Items = append(ttmlSubtitle.Items, TTMLOutItem{XMLName: xml.Name{Local: "br"}})
}
// Remove last line break
if len(ttmlSubtitle.Items) > 0 {
ttmlSubtitle.Items = ttmlSubtitle.Items[:len(ttmlSubtitle.Items)-1]
}
// Append subtitle
ttml.Subtitles = append(ttml.Subtitles, ttmlSubtitle)
}
// Marshal XML
var e = xml.NewEncoder(o)
e.Indent("", " ")
if err = e.Encode(ttml); err != nil {
err = fmt.Errorf("astisub: xml encoding failed: %w", err)
return
}
return
}

537
vendor/github.com/asticode/go-astisub/webvtt.go generated vendored Normal file
View file

@ -0,0 +1,537 @@
package astisub
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"regexp"
"sort"
"strconv"
"strings"
"time"
"golang.org/x/net/html"
)
// https://www.w3.org/TR/webvtt1/
// Constants
const (
webvttBlockNameComment = "comment"
webvttBlockNameRegion = "region"
webvttBlockNameStyle = "style"
webvttBlockNameText = "text"
webvttTimeBoundariesSeparator = " --> "
webvttTimestampMap = "X-TIMESTAMP-MAP"
)
// Vars
var (
bytesWebVTTItalicEndTag = []byte("</i>")
bytesWebVTTItalicStartTag = []byte("<i>")
bytesWebVTTTimeBoundariesSeparator = []byte(webvttTimeBoundariesSeparator)
webVTTRegexpStartTag = regexp.MustCompile(`(<v([\.\w]*)([\s\w]+)+>)`)
)
// parseDurationWebVTT parses a .vtt duration
func parseDurationWebVTT(i string) (time.Duration, error) {
return parseDuration(i, ".", 3)
}
// https://tools.ietf.org/html/rfc8216#section-3.5
// Eg., `X-TIMESTAMP-MAP=LOCAL:00:00:00.000,MPEGTS:900000` => 10s
// `X-TIMESTAMP-MAP=LOCAL:00:00:00.000,MPEGTS:180000` => 2s
func parseTimestampMapWebVTT(line string) (timeOffset time.Duration, err error) {
splits := strings.Split(line, "=")
if len(splits) <= 1 {
err = fmt.Errorf("astisub: invalid X-TIMESTAMP-MAP, no '=' found")
return
}
right := splits[1]
var local time.Duration
var mpegts int64
for _, split := range strings.Split(right, ",") {
splits := strings.SplitN(split, ":", 2)
if len(splits) <= 1 {
err = fmt.Errorf("astisub: invalid X-TIMESTAMP-MAP, part %q didn't contain ':'", right)
return
}
switch strings.ToLower(strings.TrimSpace(splits[0])) {
case "local":
local, err = parseDurationWebVTT(splits[1])
if err != nil {
err = fmt.Errorf("astisub: parsing webvtt duration failed: %w", err)
return
}
case "mpegts":
mpegts, err = strconv.ParseInt(splits[1], 10, 0)
if err != nil {
err = fmt.Errorf("astisub: parsing int %s failed: %w", splits[1], err)
return
}
}
}
timeOffset = time.Duration(mpegts)*time.Second/90000 - local
return
}
// ReadFromWebVTT parses a .vtt content
// TODO Tags (u, i, b)
// TODO Class
func ReadFromWebVTT(i io.Reader) (o *Subtitles, err error) {
// Init
o = NewSubtitles()
var scanner = bufio.NewScanner(i)
var line string
var lineNum int
// Skip the header
for scanner.Scan() {
lineNum++
line = scanner.Text()
line = strings.TrimPrefix(line, string(BytesBOM))
if fs := strings.Fields(line); len(fs) > 0 && fs[0] == "WEBVTT" {
break
}
}
// Scan
var item = &Item{}
var blockName string
var comments []string
var index int
var timeOffset time.Duration
for scanner.Scan() {
// Fetch line
line = strings.TrimSpace(scanner.Text())
lineNum++
switch {
// Comment
case strings.HasPrefix(line, "NOTE "):
blockName = webvttBlockNameComment
comments = append(comments, strings.TrimPrefix(line, "NOTE "))
// Empty line
case len(line) == 0:
// Reset block name
blockName = ""
// Region
case strings.HasPrefix(line, "Region: "):
// Add region styles
var r = &Region{InlineStyle: &StyleAttributes{}}
for _, part := range strings.Split(strings.TrimPrefix(line, "Region: "), " ") {
// Split on "="
var split = strings.Split(part, "=")
if len(split) <= 1 {
err = fmt.Errorf("astisub: line %d: Invalid region style %s", lineNum, part)
return
}
// Switch on key
switch split[0] {
case "id":
r.ID = split[1]
case "lines":
if r.InlineStyle.WebVTTLines, err = strconv.Atoi(split[1]); err != nil {
err = fmt.Errorf("atoi of %s failed: %w", split[1], err)
return
}
case "regionanchor":
r.InlineStyle.WebVTTRegionAnchor = split[1]
case "scroll":
r.InlineStyle.WebVTTScroll = split[1]
case "viewportanchor":
r.InlineStyle.WebVTTViewportAnchor = split[1]
case "width":
r.InlineStyle.WebVTTWidth = split[1]
}
}
r.InlineStyle.propagateWebVTTAttributes()
// Add region
o.Regions[r.ID] = r
// Style
case strings.HasPrefix(line, "STYLE"):
blockName = webvttBlockNameStyle
// Time boundaries
case strings.Contains(line, webvttTimeBoundariesSeparator):
// Set block name
blockName = webvttBlockNameText
// Init new item
item = &Item{
Comments: comments,
Index: index,
InlineStyle: &StyleAttributes{},
}
// Reset index
index = 0
// Split line on time boundaries
var left = strings.Split(line, webvttTimeBoundariesSeparator)
// Split line on space to get remaining of time data
var right = strings.Split(left[1], " ")
// Parse time boundaries
if item.StartAt, err = parseDurationWebVTT(left[0]); err != nil {
err = fmt.Errorf("astisub: line %d: parsing webvtt duration %s failed: %w", lineNum, left[0], err)
return
}
if item.EndAt, err = parseDurationWebVTT(right[0]); err != nil {
err = fmt.Errorf("astisub: line %d: parsing webvtt duration %s failed: %w", lineNum, right[0], err)
return
}
// Parse style
if len(right) > 1 {
// Add styles
for index := 1; index < len(right); index++ {
// Empty
if right[index] == "" {
continue
}
// Split line on ":"
var split = strings.Split(right[index], ":")
if len(split) <= 1 {
err = fmt.Errorf("astisub: line %d: Invalid inline style '%s'", lineNum, right[index])
return
}
// Switch on key
switch split[0] {
case "align":
item.InlineStyle.WebVTTAlign = split[1]
case "line":
item.InlineStyle.WebVTTLine = split[1]
case "position":
item.InlineStyle.WebVTTPosition = split[1]
case "region":
if _, ok := o.Regions[split[1]]; !ok {
err = fmt.Errorf("astisub: line %d: Unknown region %s", lineNum, split[1])
return
}
item.Region = o.Regions[split[1]]
case "size":
item.InlineStyle.WebVTTSize = split[1]
case "vertical":
item.InlineStyle.WebVTTVertical = split[1]
}
}
}
item.InlineStyle.propagateWebVTTAttributes()
// Reset comments
comments = []string{}
// Append item
o.Items = append(o.Items, item)
case strings.HasPrefix(line, webvttTimestampMap):
if len(item.Lines) > 0 {
err = errors.New("astisub: found timestamp map after processing subtitle items")
return
}
timeOffset, err = parseTimestampMapWebVTT(line)
if err != nil {
err = fmt.Errorf("astisub: parsing webvtt timestamp map failed: %w", err)
return
}
// Text
default:
// Switch on block name
switch blockName {
case webvttBlockNameComment:
comments = append(comments, line)
case webvttBlockNameStyle:
// TODO Do something with the style
case webvttBlockNameText:
// Parse line
if l := parseTextWebVTT(line); len(l.Items) > 0 {
item.Lines = append(item.Lines, l)
}
default:
// This is the ID
index, _ = strconv.Atoi(line)
}
}
}
if timeOffset > 0 {
o.Add(timeOffset)
}
return
}
// parseTextWebVTT parses the input line to fill the Line
func parseTextWebVTT(i string) (o Line) {
// Create tokenizer
tr := html.NewTokenizer(strings.NewReader(i))
// Loop
italic := false
for {
// Get next tag
t := tr.Next()
// Process error
if err := tr.Err(); err != nil {
break
}
switch t {
case html.EndTagToken:
// Parse italic
if bytes.Equal(tr.Raw(), bytesWebVTTItalicEndTag) {
italic = false
continue
}
case html.StartTagToken:
// Parse voice name
if matches := webVTTRegexpStartTag.FindStringSubmatch(string(tr.Raw())); len(matches) > 3 {
if s := strings.TrimSpace(matches[3]); s != "" {
o.VoiceName = s
}
continue
}
// Parse italic
if bytes.Equal(tr.Raw(), bytesWebVTTItalicStartTag) {
italic = true
continue
}
case html.TextToken:
if s := strings.TrimSpace(string(tr.Raw())); s != "" {
// Get style attribute
var sa *StyleAttributes
if italic {
sa = &StyleAttributes{
WebVTTItalics: italic,
}
sa.propagateWebVTTAttributes()
}
// Append item
o.Items = append(o.Items, LineItem{
InlineStyle: sa,
Text: s,
})
}
}
}
return
}
// formatDurationWebVTT formats a .vtt duration
func formatDurationWebVTT(i time.Duration) string {
return formatDuration(i, ".", 3)
}
// WriteToWebVTT writes subtitles in .vtt format
func (s Subtitles) WriteToWebVTT(o io.Writer) (err error) {
// Do not write anything if no subtitles
if len(s.Items) == 0 {
err = ErrNoSubtitlesToWrite
return
}
// Add header
var c []byte
c = append(c, []byte("WEBVTT\n\n")...)
// Add regions
var k []string
for _, region := range s.Regions {
k = append(k, region.ID)
}
sort.Strings(k)
for _, id := range k {
c = append(c, []byte("Region: id="+s.Regions[id].ID)...)
if s.Regions[id].InlineStyle.WebVTTLines != 0 {
c = append(c, bytesSpace...)
c = append(c, []byte("lines="+strconv.Itoa(s.Regions[id].InlineStyle.WebVTTLines))...)
} else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTLines != 0 {
c = append(c, bytesSpace...)
c = append(c, []byte("lines="+strconv.Itoa(s.Regions[id].Style.InlineStyle.WebVTTLines))...)
}
if s.Regions[id].InlineStyle.WebVTTRegionAnchor != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("regionanchor="+s.Regions[id].InlineStyle.WebVTTRegionAnchor)...)
} else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTRegionAnchor != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("regionanchor="+s.Regions[id].Style.InlineStyle.WebVTTRegionAnchor)...)
}
if s.Regions[id].InlineStyle.WebVTTScroll != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("scroll="+s.Regions[id].InlineStyle.WebVTTScroll)...)
} else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTScroll != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("scroll="+s.Regions[id].Style.InlineStyle.WebVTTScroll)...)
}
if s.Regions[id].InlineStyle.WebVTTViewportAnchor != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("viewportanchor="+s.Regions[id].InlineStyle.WebVTTViewportAnchor)...)
} else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTViewportAnchor != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("viewportanchor="+s.Regions[id].Style.InlineStyle.WebVTTViewportAnchor)...)
}
if s.Regions[id].InlineStyle.WebVTTWidth != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("width="+s.Regions[id].InlineStyle.WebVTTWidth)...)
} else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTWidth != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("width="+s.Regions[id].Style.InlineStyle.WebVTTWidth)...)
}
c = append(c, bytesLineSeparator...)
}
if len(s.Regions) > 0 {
c = append(c, bytesLineSeparator...)
}
// Loop through subtitles
for index, item := range s.Items {
// Add comments
if len(item.Comments) > 0 {
c = append(c, []byte("NOTE ")...)
for _, comment := range item.Comments {
c = append(c, []byte(comment)...)
c = append(c, bytesLineSeparator...)
}
c = append(c, bytesLineSeparator...)
}
// Add time boundaries
c = append(c, []byte(strconv.Itoa(index+1))...)
c = append(c, bytesLineSeparator...)
c = append(c, []byte(formatDurationWebVTT(item.StartAt))...)
c = append(c, bytesWebVTTTimeBoundariesSeparator...)
c = append(c, []byte(formatDurationWebVTT(item.EndAt))...)
// Add styles
if item.InlineStyle != nil {
if item.InlineStyle.WebVTTAlign != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("align:"+item.InlineStyle.WebVTTAlign)...)
} else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTAlign != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("align:"+item.Style.InlineStyle.WebVTTAlign)...)
}
if item.InlineStyle.WebVTTLine != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("line:"+item.InlineStyle.WebVTTLine)...)
} else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTLine != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("line:"+item.Style.InlineStyle.WebVTTLine)...)
}
if item.InlineStyle.WebVTTPosition != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("position:"+item.InlineStyle.WebVTTPosition)...)
} else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTPosition != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("position:"+item.Style.InlineStyle.WebVTTPosition)...)
}
if item.Region != nil {
c = append(c, bytesSpace...)
c = append(c, []byte("region:"+item.Region.ID)...)
}
if item.InlineStyle.WebVTTSize != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("size:"+item.InlineStyle.WebVTTSize)...)
} else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTSize != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("size:"+item.Style.InlineStyle.WebVTTSize)...)
}
if item.InlineStyle.WebVTTVertical != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("vertical:"+item.InlineStyle.WebVTTVertical)...)
} else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTVertical != "" {
c = append(c, bytesSpace...)
c = append(c, []byte("vertical:"+item.Style.InlineStyle.WebVTTVertical)...)
}
}
// Add new line
c = append(c, bytesLineSeparator...)
// Loop through lines
for _, l := range item.Lines {
c = append(c, l.webVTTBytes()...)
}
// Add new line
c = append(c, bytesLineSeparator...)
}
// Remove last new line
c = c[:len(c)-1]
// Write
if _, err = o.Write(c); err != nil {
err = fmt.Errorf("astisub: writing failed: %w", err)
return
}
return
}
func (l Line) webVTTBytes() (c []byte) {
if l.VoiceName != "" {
c = append(c, []byte("<v "+l.VoiceName+">")...)
}
for idx, li := range l.Items {
c = append(c, li.webVTTBytes()...)
// condition to avoid adding space as the last character.
if idx < len(l.Items)-1 {
c = append(c, []byte(" ")...)
}
}
c = append(c, bytesLineSeparator...)
return
}
func (li LineItem) webVTTBytes() (c []byte) {
// Get color
var color string
if li.InlineStyle != nil && li.InlineStyle.TTMLColor != nil {
color = cssColor(*li.InlineStyle.TTMLColor)
}
// Get italics
i := li.InlineStyle != nil && li.InlineStyle.WebVTTItalics
// Append
if color != "" {
c = append(c, []byte("<c."+color+">")...)
}
if i {
c = append(c, []byte("<i>")...)
}
c = append(c, []byte(li.Text)...)
if i {
c = append(c, []byte("</i>")...)
}
if color != "" {
c = append(c, []byte("</c>")...)
}
return
}
func cssColor(rgb string) string {
colors := map[string]string{
"#00ffff": "cyan", // narrator, thought
"#ffff00": "yellow", // out of vision
"#ff0000": "red", // noises
"#ff00ff": "magenta", // song
"#00ff00": "lime", // foreign speak
}
return colors[strings.ToLower(rgb)] // returning the empty string is ok
}

5
vendor/github.com/asticode/go-astits/.gitignore generated vendored Normal file
View file

@ -0,0 +1,5 @@
.DS_Store
Thumbs.db
.idea/
cover*
test

14
vendor/github.com/asticode/go-astits/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,14 @@
language: go
go:
- 1.x
- tip
install:
- go get -t ./...
- go get golang.org/x/tools/cmd/cover
- go get github.com/mattn/goveralls
matrix:
allow_failures:
- go: tip
script:
- go test -race -v -coverprofile=coverage.out
- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci

21
vendor/github.com/asticode/go-astits/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Quentin Renard
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

193
vendor/github.com/asticode/go-astits/README.md generated vendored Normal file
View file

@ -0,0 +1,193 @@
[![GoReportCard](http://goreportcard.com/badge/github.com/asticode/go-astits)](http://goreportcard.com/report/github.com/asticode/go-astits)
[![GoDoc](https://godoc.org/github.com/asticode/go-astits?status.svg)](https://godoc.org/github.com/asticode/go-astits)
[![Travis](https://travis-ci.org/asticode/go-astits.svg?branch=master)](https://travis-ci.org/asticode/go-astits#)
[![Coveralls](https://coveralls.io/repos/github/asticode/go-astits/badge.svg?branch=master)](https://coveralls.io/github/asticode/go-astits)
This is a Golang library to natively demux and mux MPEG Transport Streams (ts) in GO.
WARNING: this library is not yet production ready. Use at your own risks!
# Installation
To install the library use the following:
go get -u github.com/asticode/go-astits/...
# Before looking at the code...
The transport stream is made of packets.<br>
Each packet has a header, an optional adaptation field and a payload.<br>
Several payloads can be appended and parsed as a data.
```
TRANSPORT STREAM
+--------------------------------------------------------------------------------------------------+
| |
PACKET PACKET
+----------------------------------------------+----------------------------------------------+----
| | |
+--------+---------------------------+---------+--------+---------------------------+---------+
| HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | ...
+--------+---------------------------+---------+--------+---------------------------+---------+
| | | |
+---------+ +---------+
| |
+----------------------------------------------+
DATA
```
# Using the library in your code
WARNING: the code below doesn't handle errors for readability purposes. However you SHOULD!
## Demux
```go
// Create a cancellable context in case you want to stop reading packets/data any time you want
ctx, cancel := context.WithCancel(context.Background())
// Handle SIGTERM signal
ch := make(chan os.Signal, 1)
signal.Notify(ch, syscall.SIGTERM)
go func() {
<-ch
cancel()
}()
// Open your file or initialize any kind of io.Reader
// Buffering using bufio.Reader is recommended for performance
f, _ := os.Open("/path/to/file.ts")
defer f.Close()
// Create the demuxer
dmx := astits.NewDemuxer(ctx, f)
for {
// Get the next data
d, _ := dmx.NextData()
// Data is a PMT data
if d.PMT != nil {
// Loop through elementary streams
for _, es := range d.PMT.ElementaryStreams {
fmt.Printf("Stream detected: %d\n", es.ElementaryPID)
}
return
}
}
```
## Mux
```go
// Create a cancellable context in case you want to stop writing packets/data any time you want
ctx, cancel := context.WithCancel(context.Background())
// Handle SIGTERM signal
ch := make(chan os.Signal, 1)
signal.Notify(ch, syscall.SIGTERM)
go func() {
<-ch
cancel()
}()
// Create your file or initialize any kind of io.Writer
// Buffering using bufio.Writer is recommended for performance
f, _ := os.Create("/path/to/file.ts")
defer f.Close()
// Create the muxer
mx := astits.NewMuxer(ctx, f)
// Add an elementary stream
mx.AddElementaryStream(astits.PMTElementaryStream{
ElementaryPID: 1,
StreamType: astits.StreamTypeMetadata,
})
// Write tables
// Using that function is not mandatory, WriteData will retransmit tables from time to time
mx.WriteTables()
// Write data
mx.WriteData(&astits.MuxerData{
PES: &astits.PESData{
Data: []byte("test"),
},
PID: 1,
})
```
## Options
In order to pass options to the demuxer or the muxer, look for the methods prefixed with `DemuxerOpt` or `MuxerOpt` and add them upon calling `NewDemuxer` or `NewMuxer` :
```go
// This is your custom packets parser
p := func(ps []*astits.Packet) (ds []*astits.Data, skip bool, err error) {
// This is your logic
skip = true
return
}
// Now you can create a demuxer with the proper options
dmx := NewDemuxer(ctx, f, DemuxerOptPacketSize(192), DemuxerOptPacketsParser(p))
```
# CLI
This library provides 2 CLIs that will automatically get installed in `GOPATH/bin` on `go get` execution.
## astits-probe
### List streams
$ astits-probe -i <path to your file> -f <format: text|json (default: text)>
### List packets
$ astits-probe packets -i <path to your file>
### List data
$ astits-probe data -i <path to your file> -d <data type: eit|nit|... (repeatable argument | if empty, all data types are shown)>
## astits-es-split
### Split streams into separate .ts files
$ astits-es-split <path to your file> -o <path to output dir>
# Features and roadmap
- [x] Add demuxer
- [x] Add muxer
- [x] Demux PES packets
- [x] Mux PES packets
- [x] Demux PAT packets
- [x] Mux PAT packets
- [x] Demux PMT packets
- [x] Mux PMT packets
- [x] Demux EIT packets
- [ ] Mux EIT packets
- [x] Demux NIT packets
- [ ] Mux NIT packets
- [x] Demux SDT packets
- [ ] Mux SDT packets
- [x] Demux TOT packets
- [ ] Mux TOT packets
- [ ] Demux BAT packets
- [ ] Mux BAT packets
- [ ] Demux DIT packets
- [ ] Mux DIT packets
- [ ] Demux RST packets
- [ ] Mux RST packets
- [ ] Demux SIT packets
- [ ] Mux SIT packets
- [ ] Mux ST packets
- [ ] Demux TDT packets
- [ ] Mux TDT packets
- [ ] Demux TSDT packets
- [ ] Mux TSDT packets

View file

@ -0,0 +1,29 @@
package astits
import (
"time"
)
// ClockReference represents a clock reference
// Base is based on a 90 kHz clock and extension is based on a 27 MHz clock
type ClockReference struct {
Base, Extension int64
}
// newClockReference builds a new clock reference
func newClockReference(base, extension int64) *ClockReference {
return &ClockReference{
Base: base,
Extension: extension,
}
}
// Duration converts the clock reference into duration
func (p ClockReference) Duration() time.Duration {
return time.Duration(p.Base*1e9/90000) + time.Duration(p.Extension*1e9/27000000)
}
// Time converts the clock reference into time
func (p ClockReference) Time() time.Time {
return time.Unix(0, p.Duration().Nanoseconds())
}

25
vendor/github.com/asticode/go-astits/crc32.go generated vendored Normal file
View file

@ -0,0 +1,25 @@
package astits
const (
crc32Polynomial = uint32(0xffffffff)
)
// computeCRC32 computes a CRC32
// https://stackoverflow.com/questions/35034042/how-to-calculate-crc32-in-psi-si-packet
func computeCRC32(bs []byte) uint32 {
return updateCRC32(crc32Polynomial, bs)
}
func updateCRC32(crc32 uint32, bs []byte) uint32 {
for _, b := range bs {
for i := 0; i < 8; i++ {
if (crc32 >= uint32(0x80000000)) != (b >= uint8(0x80)) {
crc32 = (crc32 << 1) ^ 0x04C11DB7
} else {
crc32 = crc32 << 1
}
b <<= 1
}
}
return crc32
}

117
vendor/github.com/asticode/go-astits/data.go generated vendored Normal file
View file

@ -0,0 +1,117 @@
package astits
import (
"fmt"
"github.com/asticode/go-astikit"
)
// PIDs
const (
PIDPAT uint16 = 0x0 // Program Association Table (PAT) contains a directory listing of all Program Map Tables.
PIDCAT uint16 = 0x1 // Conditional Access Table (CAT) contains a directory listing of all ITU-T Rec. H.222 entitlement management message streams used by Program Map Tables.
PIDTSDT uint16 = 0x2 // Transport Stream Description Table (TSDT) contains descriptors related to the overall transport stream
PIDNull uint16 = 0x1fff // Null Packet (used for fixed bandwidth padding)
)
// DemuxerData represents a data parsed by Demuxer
type DemuxerData struct {
EIT *EITData
FirstPacket *Packet
NIT *NITData
PAT *PATData
PES *PESData
PID uint16
PMT *PMTData
SDT *SDTData
TOT *TOTData
}
// MuxerData represents a data to be written by Muxer
type MuxerData struct {
PID uint16
AdaptationField *PacketAdaptationField
PES *PESData
}
// parseData parses a payload spanning over multiple packets and returns a set of data
func parseData(ps []*Packet, prs PacketsParser, pm programMap) (ds []*DemuxerData, err error) {
// Use custom parser first
if prs != nil {
var skip bool
if ds, skip, err = prs(ps); err != nil {
err = fmt.Errorf("astits: custom packets parsing failed: %w", err)
return
} else if skip {
return
}
}
// Get payload length
var l int
for _, p := range ps {
l += len(p.Payload)
}
// Append payload
var payload = make([]byte, l)
var c int
for _, p := range ps {
c += copy(payload[c:], p.Payload)
}
// Create reader
i := astikit.NewBytesIterator(payload)
// Parse PID
pid := ps[0].Header.PID
// Parse payload
if pid == PIDCAT {
// Information in a CAT payload is private and dependent on the CA system. Use the PacketsParser
// to parse this type of payload
} else if isPSIPayload(pid, pm) {
// Parse PSI data
var psiData *PSIData
if psiData, err = parsePSIData(i); err != nil {
err = fmt.Errorf("astits: parsing PSI data failed: %w", err)
return
}
// Append data
ds = psiData.toData(ps[0], pid)
} else if isPESPayload(payload) {
// Parse PES data
var pesData *PESData
if pesData, err = parsePESData(i); err != nil {
err = fmt.Errorf("astits: parsing PES data failed: %w", err)
return
}
// Append data
ds = append(ds, &DemuxerData{
FirstPacket: ps[0],
PES: pesData,
PID: pid,
})
}
return
}
// isPSIPayload checks whether the payload is a PSI one
func isPSIPayload(pid uint16, pm programMap) bool {
return pid == PIDPAT || // PAT
pm.exists(pid) || // PMT
((pid >= 0x10 && pid <= 0x14) || (pid >= 0x1e && pid <= 0x1f)) //DVB
}
// isPESPayload checks whether the payload is a PES one
func isPESPayload(i []byte) bool {
// Packet is not big enough
if len(i) < 3 {
return false
}
// Check prefix
return uint32(i[0])<<16|uint32(i[1])<<8|uint32(i[2]) == 1
}

124
vendor/github.com/asticode/go-astits/data_eit.go generated vendored Normal file
View file

@ -0,0 +1,124 @@
package astits
import (
"fmt"
"time"
"github.com/asticode/go-astikit"
)
// EITData represents an EIT data
// Page: 36 | Chapter: 5.2.4 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf
type EITData struct {
Events []*EITDataEvent
LastTableID uint8
OriginalNetworkID uint16
SegmentLastSectionNumber uint8
ServiceID uint16
TransportStreamID uint16
}
// EITDataEvent represents an EIT data event
type EITDataEvent struct {
Descriptors []*Descriptor
Duration time.Duration
EventID uint16
HasFreeCSAMode bool // When true indicates that access to one or more streams may be controlled by a CA system.
RunningStatus uint8
StartTime time.Time
}
// parseEITSection parses an EIT section
func parseEITSection(i *astikit.BytesIterator, offsetSectionsEnd int, tableIDExtension uint16) (d *EITData, err error) {
// Create data
d = &EITData{ServiceID: tableIDExtension}
// Get next 2 bytes
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Transport stream ID
d.TransportStreamID = uint16(bs[0])<<8 | uint16(bs[1])
// Get next 2 bytes
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Original network ID
d.OriginalNetworkID = uint16(bs[0])<<8 | uint16(bs[1])
// Get next byte
var b byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Segment last section number
d.SegmentLastSectionNumber = uint8(b)
// Get next byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Last table ID
d.LastTableID = uint8(b)
// Loop until end of section data is reached
for i.Offset() < offsetSectionsEnd {
// Get next 2 bytes
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Event ID
var e = &EITDataEvent{}
e.EventID = uint16(bs[0])<<8 | uint16(bs[1])
// Start time
if e.StartTime, err = parseDVBTime(i); err != nil {
err = fmt.Errorf("astits: parsing DVB time")
return
}
// Duration
if e.Duration, err = parseDVBDurationSeconds(i); err != nil {
err = fmt.Errorf("astits: parsing DVB duration seconds failed: %w", err)
return
}
// Get next byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Running status
e.RunningStatus = uint8(b) >> 5
// Free CA mode
e.HasFreeCSAMode = uint8(b&0x10) > 0
// We need to rewind since the current byte is used by the descriptor as well
i.Skip(-1)
// Descriptors
if e.Descriptors, err = parseDescriptors(i); err != nil {
err = fmt.Errorf("astits: parsing descriptors failed: %w", err)
return
}
// Add event
d.Events = append(d.Events, e)
}
return
}

80
vendor/github.com/asticode/go-astits/data_nit.go generated vendored Normal file
View file

@ -0,0 +1,80 @@
package astits
import (
"fmt"
"github.com/asticode/go-astikit"
)
// NITData represents a NIT data
// Page: 29 | Chapter: 5.2.1 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf
type NITData struct {
NetworkDescriptors []*Descriptor
NetworkID uint16
TransportStreams []*NITDataTransportStream
}
// NITDataTransportStream represents a NIT data transport stream
type NITDataTransportStream struct {
OriginalNetworkID uint16
TransportDescriptors []*Descriptor
TransportStreamID uint16
}
// parseNITSection parses a NIT section
func parseNITSection(i *astikit.BytesIterator, tableIDExtension uint16) (d *NITData, err error) {
// Create data
d = &NITData{NetworkID: tableIDExtension}
// Network descriptors
if d.NetworkDescriptors, err = parseDescriptors(i); err != nil {
err = fmt.Errorf("astits: parsing descriptors failed: %w", err)
return
}
// Get next bytes
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Transport stream loop length
transportStreamLoopLength := int(uint16(bs[0]&0xf)<<8 | uint16(bs[1]))
// Transport stream loop
offsetEnd := i.Offset() + transportStreamLoopLength
for i.Offset() < offsetEnd {
// Create transport stream
ts := &NITDataTransportStream{}
// Get next bytes
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Transport stream ID
ts.TransportStreamID = uint16(bs[0])<<8 | uint16(bs[1])
// Get next bytes
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Original network ID
ts.OriginalNetworkID = uint16(bs[0])<<8 | uint16(bs[1])
// Transport descriptors
if ts.TransportDescriptors, err = parseDescriptors(i); err != nil {
err = fmt.Errorf("astits: parsing descriptors failed: %w", err)
return
}
// Append transport stream
d.TransportStreams = append(d.TransportStreams, ts)
}
return
}

63
vendor/github.com/asticode/go-astits/data_pat.go generated vendored Normal file
View file

@ -0,0 +1,63 @@
package astits
import (
"fmt"
"github.com/asticode/go-astikit"
)
const (
patSectionEntryBytesSize = 4 // 16 bits + 3 reserved + 13 bits = 32 bits
)
// PATData represents a PAT data
// https://en.wikipedia.org/wiki/Program-specific_information
type PATData struct {
Programs []*PATProgram
TransportStreamID uint16
}
// PATProgram represents a PAT program
type PATProgram struct {
ProgramMapID uint16 // The packet identifier that contains the associated PMT
ProgramNumber uint16 // Relates to the Table ID extension in the associated PMT. A value of 0 is reserved for a NIT packet identifier.
}
// parsePATSection parses a PAT section
func parsePATSection(i *astikit.BytesIterator, offsetSectionsEnd int, tableIDExtension uint16) (d *PATData, err error) {
// Create data
d = &PATData{TransportStreamID: tableIDExtension}
// Loop until end of section data is reached
for i.Offset() < offsetSectionsEnd {
// Get next bytes
var bs []byte
if bs, err = i.NextBytesNoCopy(4); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Append program
d.Programs = append(d.Programs, &PATProgram{
ProgramMapID: uint16(bs[2]&0x1f)<<8 | uint16(bs[3]),
ProgramNumber: uint16(bs[0])<<8 | uint16(bs[1]),
})
}
return
}
func calcPATSectionLength(d *PATData) uint16 {
return uint16(4 * len(d.Programs))
}
func writePATSection(w *astikit.BitsWriter, d *PATData) (int, error) {
b := astikit.NewBitsWriterBatch(w)
for _, p := range d.Programs {
b.Write(p.ProgramNumber)
b.WriteN(uint8(0xff), 3)
b.WriteN(p.ProgramMapID, 13)
}
return len(d.Programs) * patSectionEntryBytesSize, b.Err()
}

747
vendor/github.com/asticode/go-astits/data_pes.go generated vendored Normal file
View file

@ -0,0 +1,747 @@
package astits
import (
"fmt"
"github.com/asticode/go-astikit"
)
// P-STD buffer scales
const (
PSTDBufferScale128Bytes = 0
PSTDBufferScale1024Bytes = 1
)
// PTS DTS indicator
const (
PTSDTSIndicatorBothPresent = 3
PTSDTSIndicatorIsForbidden = 1
PTSDTSIndicatorNoPTSOrDTS = 0
PTSDTSIndicatorOnlyPTS = 2
)
// Stream IDs
const (
StreamIDPrivateStream1 = 189
StreamIDPaddingStream = 190
StreamIDPrivateStream2 = 191
)
// Trick mode controls
const (
TrickModeControlFastForward = 0
TrickModeControlFastReverse = 3
TrickModeControlFreezeFrame = 2
TrickModeControlSlowMotion = 1
TrickModeControlSlowReverse = 4
)
const (
pesHeaderLength = 6
ptsOrDTSByteLength = 5
escrLength = 6
dsmTrickModeLength = 1
)
// PESData represents a PES data
// https://en.wikipedia.org/wiki/Packetized_elementary_stream
// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
// http://happy.emu.id.au/lab/tut/dttb/dtbtut4b.htm
type PESData struct {
Data []byte
Header *PESHeader
}
// PESHeader represents a packet PES header
type PESHeader struct {
OptionalHeader *PESOptionalHeader
PacketLength uint16 // Specifies the number of bytes remaining in the packet after this field. Can be zero. If the PES packet length is set to zero, the PES packet can be of any length. A value of zero for the PES packet length can be used only when the PES packet payload is a video elementary stream.
StreamID uint8 // Examples: Audio streams (0xC0-0xDF), Video streams (0xE0-0xEF)
}
// PESOptionalHeader represents a PES optional header
type PESOptionalHeader struct {
AdditionalCopyInfo uint8
CRC uint16
DataAlignmentIndicator bool // True indicates that the PES packet header is immediately followed by the video start code or audio syncword
DSMTrickMode *DSMTrickMode
DTS *ClockReference
ESCR *ClockReference
ESRate uint32
Extension2Data []byte
Extension2Length uint8
HasAdditionalCopyInfo bool
HasCRC bool
HasDSMTrickMode bool
HasESCR bool
HasESRate bool
HasExtension bool
HasExtension2 bool
HasOptionalFields bool
HasPackHeaderField bool
HasPrivateData bool
HasProgramPacketSequenceCounter bool
HasPSTDBuffer bool
HeaderLength uint8
IsCopyrighted bool
IsOriginal bool
MarkerBits uint8
MPEG1OrMPEG2ID uint8
OriginalStuffingLength uint8
PacketSequenceCounter uint8
PackField uint8
Priority bool
PrivateData []byte
PSTDBufferScale uint8
PSTDBufferSize uint16
PTS *ClockReference
PTSDTSIndicator uint8
ScramblingControl uint8
}
// DSMTrickMode represents a DSM trick mode
// https://books.google.fr/books?id=vwUrAwAAQBAJ&pg=PT501&lpg=PT501&dq=dsm+trick+mode+control&source=bl&ots=fI-9IHXMRL&sig=PWnhxrsoMWNQcl1rMCPmJGNO9Ds&hl=fr&sa=X&ved=0ahUKEwjogafD8bjXAhVQ3KQKHeHKD5oQ6AEINDAB#v=onepage&q=dsm%20trick%20mode%20control&f=false
type DSMTrickMode struct {
FieldID uint8
FrequencyTruncation uint8
IntraSliceRefresh uint8
RepeatControl uint8
TrickModeControl uint8
}
func (h *PESHeader) IsVideoStream() bool {
return h.StreamID == 0xe0 ||
h.StreamID == 0xfd
}
// parsePESData parses a PES data
func parsePESData(i *astikit.BytesIterator) (d *PESData, err error) {
// Create data
d = &PESData{}
// Skip first 3 bytes that are there to identify the PES payload
i.Seek(3)
// Parse header
var dataStart, dataEnd int
if d.Header, dataStart, dataEnd, err = parsePESHeader(i); err != nil {
err = fmt.Errorf("astits: parsing PES header failed: %w", err)
return
}
// Seek to data
i.Seek(dataStart)
// Extract data
if d.Data, err = i.NextBytes(dataEnd - dataStart); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
return
}
// hasPESOptionalHeader checks whether the data has a PES optional header
func hasPESOptionalHeader(streamID uint8) bool {
return streamID != StreamIDPaddingStream && streamID != StreamIDPrivateStream2
}
// parsePESData parses a PES header
func parsePESHeader(i *astikit.BytesIterator) (h *PESHeader, dataStart, dataEnd int, err error) {
// Create header
h = &PESHeader{}
// Get next byte
var b byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Stream ID
h.StreamID = uint8(b)
// Get next bytes
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Length
h.PacketLength = uint16(bs[0])<<8 | uint16(bs[1])
// Update data end
if h.PacketLength > 0 {
dataEnd = i.Offset() + int(h.PacketLength)
} else {
dataEnd = i.Len()
}
// Optional header
if hasPESOptionalHeader(h.StreamID) {
if h.OptionalHeader, dataStart, err = parsePESOptionalHeader(i); err != nil {
err = fmt.Errorf("astits: parsing PES optional header failed: %w", err)
return
}
} else {
dataStart = i.Offset()
}
return
}
// parsePESOptionalHeader parses a PES optional header
func parsePESOptionalHeader(i *astikit.BytesIterator) (h *PESOptionalHeader, dataStart int, err error) {
// Create header
h = &PESOptionalHeader{}
// Get next byte
var b byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Marker bits
h.MarkerBits = uint8(b) >> 6
// Scrambling control
h.ScramblingControl = uint8(b) >> 4 & 0x3
// Priority
h.Priority = uint8(b)&0x8 > 0
// Data alignment indicator
h.DataAlignmentIndicator = uint8(b)&0x4 > 0
// Copyrighted
h.IsCopyrighted = uint(b)&0x2 > 0
// Original or copy
h.IsOriginal = uint8(b)&0x1 > 0
// Get next byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// PTS DST indicator
h.PTSDTSIndicator = uint8(b) >> 6 & 0x3
// Flags
h.HasESCR = uint8(b)&0x20 > 0
h.HasESRate = uint8(b)&0x10 > 0
h.HasDSMTrickMode = uint8(b)&0x8 > 0
h.HasAdditionalCopyInfo = uint8(b)&0x4 > 0
h.HasCRC = uint8(b)&0x2 > 0
h.HasExtension = uint8(b)&0x1 > 0
// Get next byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Header length
h.HeaderLength = uint8(b)
// Update data start
dataStart = i.Offset() + int(h.HeaderLength)
// PTS/DTS
if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS {
if h.PTS, err = parsePTSOrDTS(i); err != nil {
err = fmt.Errorf("astits: parsing PTS failed: %w", err)
return
}
} else if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent {
if h.PTS, err = parsePTSOrDTS(i); err != nil {
err = fmt.Errorf("astits: parsing PTS failed: %w", err)
return
}
if h.DTS, err = parsePTSOrDTS(i); err != nil {
err = fmt.Errorf("astits: parsing PTS failed: %w", err)
return
}
}
// ESCR
if h.HasESCR {
if h.ESCR, err = parseESCR(i); err != nil {
err = fmt.Errorf("astits: parsing ESCR failed: %w", err)
return
}
}
// ES rate
if h.HasESRate {
var bs []byte
if bs, err = i.NextBytesNoCopy(3); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
h.ESRate = uint32(bs[0])&0x7f<<15 | uint32(bs[1])<<7 | uint32(bs[2])>>1
}
// Trick mode
if h.HasDSMTrickMode {
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
h.DSMTrickMode = parseDSMTrickMode(b)
}
// Additional copy info
if h.HasAdditionalCopyInfo {
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
h.AdditionalCopyInfo = b & 0x7f
}
// CRC
if h.HasCRC {
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
h.CRC = uint16(bs[0])>>8 | uint16(bs[1])
}
// Extension
if h.HasExtension {
// Get next byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Flags
h.HasPrivateData = b&0x80 > 0
h.HasPackHeaderField = b&0x40 > 0
h.HasProgramPacketSequenceCounter = b&0x20 > 0
h.HasPSTDBuffer = b&0x10 > 0
h.HasExtension2 = b&0x1 > 0
// Private data
if h.HasPrivateData {
if h.PrivateData, err = i.NextBytes(16); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
}
// Pack field length
if h.HasPackHeaderField {
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// TODO it's only a length of pack_header, should read it all. now it's wrong
h.PackField = uint8(b)
}
// Program packet sequence counter
if h.HasProgramPacketSequenceCounter {
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
h.PacketSequenceCounter = uint8(bs[0]) & 0x7f
h.MPEG1OrMPEG2ID = uint8(bs[1]) >> 6 & 0x1
h.OriginalStuffingLength = uint8(bs[1]) & 0x3f
}
// P-STD buffer
if h.HasPSTDBuffer {
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
h.PSTDBufferScale = bs[0] >> 5 & 0x1
h.PSTDBufferSize = uint16(bs[0])&0x1f<<8 | uint16(bs[1])
}
// Extension 2
if h.HasExtension2 {
// Length
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
h.Extension2Length = uint8(b) & 0x7f
// Data
if h.Extension2Data, err = i.NextBytes(int(h.Extension2Length)); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
}
}
return
}
// parseDSMTrickMode parses a DSM trick mode
func parseDSMTrickMode(i byte) (m *DSMTrickMode) {
m = &DSMTrickMode{}
m.TrickModeControl = i >> 5
if m.TrickModeControl == TrickModeControlFastForward || m.TrickModeControl == TrickModeControlFastReverse {
m.FieldID = i >> 3 & 0x3
m.IntraSliceRefresh = i >> 2 & 0x1
m.FrequencyTruncation = i & 0x3
} else if m.TrickModeControl == TrickModeControlFreezeFrame {
m.FieldID = i >> 3 & 0x3
} else if m.TrickModeControl == TrickModeControlSlowMotion || m.TrickModeControl == TrickModeControlSlowReverse {
m.RepeatControl = i & 0x1f
}
return
}
// parsePTSOrDTS parses a PTS or a DTS
func parsePTSOrDTS(i *astikit.BytesIterator) (cr *ClockReference, err error) {
var bs []byte
if bs, err = i.NextBytesNoCopy(5); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
cr = newClockReference(int64(uint64(bs[0])>>1&0x7<<30|uint64(bs[1])<<22|uint64(bs[2])>>1&0x7f<<15|uint64(bs[3])<<7|uint64(bs[4])>>1&0x7f), 0)
return
}
// parseESCR parses an ESCR
func parseESCR(i *astikit.BytesIterator) (cr *ClockReference, err error) {
var bs []byte
if bs, err = i.NextBytesNoCopy(6); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
escr := uint64(bs[0])>>3&0x7<<39 | uint64(bs[0])&0x3<<37 | uint64(bs[1])<<29 | uint64(bs[2])>>3<<24 | uint64(bs[2])&0x3<<22 | uint64(bs[3])<<14 | uint64(bs[4])>>3<<9 | uint64(bs[4])&0x3<<7 | uint64(bs[5])>>1
cr = newClockReference(int64(escr>>9), int64(escr&0x1ff))
return
}
// will count how many total bytes and payload bytes will be written when writePESData is called with the same arguments
// should be used by the caller of writePESData to determine AF stuffing size needed to be applied
// since the length of video PES packets are often zero, we can't just stuff it with 0xff-s at the end
func calcPESDataLength(h *PESHeader, payloadLeft []byte, isPayloadStart bool, bytesAvailable int) (totalBytes, payloadBytes int) {
totalBytes += pesHeaderLength
if isPayloadStart {
totalBytes += int(calcPESOptionalHeaderLength(h.OptionalHeader))
}
bytesAvailable -= totalBytes
if len(payloadLeft) < bytesAvailable {
payloadBytes = len(payloadLeft)
} else {
payloadBytes = bytesAvailable
}
return
}
// first packet will contain PES header with optional PES header and payload, if possible
// all consequential packets will contain just payload
// for the last packet caller must add AF with stuffing, see calcPESDataLength
func writePESData(w *astikit.BitsWriter, h *PESHeader, payloadLeft []byte, isPayloadStart bool, bytesAvailable int) (totalBytesWritten, payloadBytesWritten int, err error) {
if isPayloadStart {
var n int
n, err = writePESHeader(w, h, len(payloadLeft))
if err != nil {
return
}
totalBytesWritten += n
}
payloadBytesWritten = bytesAvailable - totalBytesWritten
if payloadBytesWritten > len(payloadLeft) {
payloadBytesWritten = len(payloadLeft)
}
err = w.Write(payloadLeft[:payloadBytesWritten])
if err != nil {
return
}
totalBytesWritten += payloadBytesWritten
return
}
func writePESHeader(w *astikit.BitsWriter, h *PESHeader, payloadSize int) (int, error) {
b := astikit.NewBitsWriterBatch(w)
b.WriteN(uint32(0x000001), 24) // packet_start_code_prefix
b.Write(h.StreamID)
pesPacketLength := 0
if !h.IsVideoStream() {
pesPacketLength = payloadSize
if hasPESOptionalHeader(h.StreamID) {
pesPacketLength += int(calcPESOptionalHeaderLength(h.OptionalHeader))
}
if pesPacketLength > 0xffff {
pesPacketLength = 0
}
}
b.Write(uint16(pesPacketLength))
bytesWritten := pesHeaderLength
if hasPESOptionalHeader(h.StreamID) {
n, err := writePESOptionalHeader(w, h.OptionalHeader)
if err != nil {
return 0, err
}
bytesWritten += n
}
return bytesWritten, b.Err()
}
func calcPESOptionalHeaderLength(h *PESOptionalHeader) uint8 {
if h == nil {
return 0
}
return 3 + calcPESOptionalHeaderDataLength(h)
}
func calcPESOptionalHeaderDataLength(h *PESOptionalHeader) (length uint8) {
if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS {
length += ptsOrDTSByteLength
} else if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent {
length += 2 * ptsOrDTSByteLength
}
if h.HasESCR {
length += escrLength
}
if h.HasESRate {
length += 3
}
if h.HasDSMTrickMode {
length += dsmTrickModeLength
}
if h.HasAdditionalCopyInfo {
length++
}
if h.HasCRC {
//length += 4 // TODO
}
if h.HasExtension {
length++
if h.HasPrivateData {
length += 16
}
if h.HasPackHeaderField {
// TODO
}
if h.HasProgramPacketSequenceCounter {
length += 2
}
if h.HasPSTDBuffer {
length += 2
}
if h.HasExtension2 {
length += 1 + uint8(len(h.Extension2Data))
}
}
return
}
func writePESOptionalHeader(w *astikit.BitsWriter, h *PESOptionalHeader) (int, error) {
if h == nil {
return 0, nil
}
b := astikit.NewBitsWriterBatch(w)
b.WriteN(uint8(0b10), 2) // marker bits
b.WriteN(h.ScramblingControl, 2)
b.Write(h.Priority)
b.Write(h.DataAlignmentIndicator)
b.Write(h.IsCopyrighted)
b.Write(h.IsOriginal)
b.WriteN(h.PTSDTSIndicator, 2)
b.Write(h.HasESCR)
b.Write(h.HasESRate)
b.Write(h.HasDSMTrickMode)
b.Write(h.HasAdditionalCopyInfo)
b.Write(false) // CRC of previous PES packet. not supported yet
//b.Write(h.HasCRC)
b.Write(h.HasExtension)
pesOptionalHeaderDataLength := calcPESOptionalHeaderDataLength(h)
b.Write(pesOptionalHeaderDataLength)
bytesWritten := 3
if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS {
n, err := writePTSOrDTS(w, 0b0010, h.PTS)
if err != nil {
return 0, err
}
bytesWritten += n
}
if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent {
n, err := writePTSOrDTS(w, 0b0011, h.PTS)
if err != nil {
return 0, err
}
bytesWritten += n
n, err = writePTSOrDTS(w, 0b0001, h.DTS)
if err != nil {
return 0, err
}
bytesWritten += n
}
if h.HasESCR {
n, err := writeESCR(w, h.ESCR)
if err != nil {
return 0, err
}
bytesWritten += n
}
if h.HasESRate {
b.Write(true)
b.WriteN(h.ESRate, 22)
b.Write(true)
bytesWritten += 3
}
if h.HasDSMTrickMode {
n, err := writeDSMTrickMode(w, h.DSMTrickMode)
if err != nil {
return 0, err
}
bytesWritten += n
}
if h.HasAdditionalCopyInfo {
b.Write(true) // marker_bit
b.WriteN(h.AdditionalCopyInfo, 7)
bytesWritten++
}
if h.HasCRC {
// TODO, not supported
}
if h.HasExtension {
// exp 10110001
// act 10111111
b.Write(h.HasPrivateData)
b.Write(false) // TODO pack_header_field_flag, not implemented
//b.Write(h.HasPackHeaderField)
b.Write(h.HasProgramPacketSequenceCounter)
b.Write(h.HasPSTDBuffer)
b.WriteN(uint8(0xff), 3) // reserved
b.Write(h.HasExtension2)
bytesWritten++
if h.HasPrivateData {
b.WriteBytesN(h.PrivateData, 16, 0)
bytesWritten += 16
}
if h.HasPackHeaderField {
// TODO (see parsePESOptionalHeader)
}
if h.HasProgramPacketSequenceCounter {
b.Write(true) // marker_bit
b.WriteN(h.PacketSequenceCounter, 7)
b.Write(true) // marker_bit
b.WriteN(h.MPEG1OrMPEG2ID, 1)
b.WriteN(h.OriginalStuffingLength, 6)
bytesWritten += 2
}
if h.HasPSTDBuffer {
b.WriteN(uint8(0b01), 2)
b.WriteN(h.PSTDBufferScale, 1)
b.WriteN(h.PSTDBufferSize, 13)
bytesWritten += 2
}
if h.HasExtension2 {
b.Write(true) // marker_bit
b.WriteN(uint8(len(h.Extension2Data)), 7)
b.Write(h.Extension2Data)
bytesWritten += 1 + len(h.Extension2Data)
}
}
return bytesWritten, b.Err()
}
func writeDSMTrickMode(w *astikit.BitsWriter, m *DSMTrickMode) (int, error) {
b := astikit.NewBitsWriterBatch(w)
b.WriteN(m.TrickModeControl, 3)
if m.TrickModeControl == TrickModeControlFastForward || m.TrickModeControl == TrickModeControlFastReverse {
b.WriteN(m.FieldID, 2)
b.Write(m.IntraSliceRefresh == 1) // it should be boolean
b.WriteN(m.FrequencyTruncation, 2)
} else if m.TrickModeControl == TrickModeControlFreezeFrame {
b.WriteN(m.FieldID, 2)
b.WriteN(uint8(0xff), 3) // reserved
} else if m.TrickModeControl == TrickModeControlSlowMotion || m.TrickModeControl == TrickModeControlSlowReverse {
b.WriteN(m.RepeatControl, 5)
} else {
b.WriteN(uint8(0xff), 5) // reserved
}
return dsmTrickModeLength, b.Err()
}
func writeESCR(w *astikit.BitsWriter, cr *ClockReference) (int, error) {
b := astikit.NewBitsWriterBatch(w)
b.WriteN(uint8(0xff), 2)
b.WriteN(uint64(cr.Base>>30), 3)
b.Write(true)
b.WriteN(uint64(cr.Base>>15), 15)
b.Write(true)
b.WriteN(uint64(cr.Base), 15)
b.Write(true)
b.WriteN(uint64(cr.Extension), 9)
b.Write(true)
return escrLength, b.Err()
}
func writePTSOrDTS(w *astikit.BitsWriter, flag uint8, cr *ClockReference) (bytesWritten int, retErr error) {
b := astikit.NewBitsWriterBatch(w)
b.WriteN(flag, 4)
b.WriteN(uint64(cr.Base>>30), 3)
b.Write(true)
b.WriteN(uint64(cr.Base>>15), 15)
b.Write(true)
b.WriteN(uint64(cr.Base), 15)
b.Write(true)
return ptsOrDTSByteLength, b.Err()
}

256
vendor/github.com/asticode/go-astits/data_pmt.go generated vendored Normal file
View file

@ -0,0 +1,256 @@
package astits
import (
"fmt"
"github.com/asticode/go-astikit"
)
type StreamType uint8
// Stream types
const (
StreamTypeMPEG1Video StreamType = 0x01
StreamTypeMPEG2Video StreamType = 0x02
StreamTypeMPEG1Audio StreamType = 0x03 // ISO/IEC 11172-3
StreamTypeMPEG2HalvedSampleRateAudio StreamType = 0x04 // ISO/IEC 13818-3
StreamTypeMPEG2Audio StreamType = 0x04
StreamTypePrivateSection StreamType = 0x05
StreamTypePrivateData StreamType = 0x06
StreamTypeMPEG2PacketizedData StreamType = 0x06 // Rec. ITU-T H.222 | ISO/IEC 13818-1 i.e., DVB subtitles/VBI and AC-3
StreamTypeADTS StreamType = 0x0F // ISO/IEC 13818-7 Audio with ADTS transport syntax
StreamTypeAACAudio StreamType = 0x0f
StreamTypeMPEG4Video StreamType = 0x10
StreamTypeAACLATMAudio StreamType = 0x11
StreamTypeMetadata StreamType = 0x15
StreamTypeH264Video StreamType = 0x1B // Rec. ITU-T H.264 | ISO/IEC 14496-10
StreamTypeH265Video StreamType = 0x24 // Rec. ITU-T H.265 | ISO/IEC 23008-2
StreamTypeHEVCVideo StreamType = 0x24
StreamTypeCAVSVideo StreamType = 0x42
StreamTypeVC1Video StreamType = 0xea
StreamTypeDIRACVideo StreamType = 0xd1
StreamTypeAC3Audio StreamType = 0x81
StreamTypeDTSAudio StreamType = 0x82
StreamTypeTRUEHDAudio StreamType = 0x83
StreamTypeEAC3Audio StreamType = 0x87
)
// PMTData represents a PMT data
// https://en.wikipedia.org/wiki/Program-specific_information
type PMTData struct {
ElementaryStreams []*PMTElementaryStream
PCRPID uint16 // The packet identifier that contains the program clock reference used to improve the random access accuracy of the stream's timing that is derived from the program timestamp. If this is unused. then it is set to 0x1FFF (all bits on).
ProgramDescriptors []*Descriptor // Program descriptors
ProgramNumber uint16
}
// PMTElementaryStream represents a PMT elementary stream
type PMTElementaryStream struct {
ElementaryPID uint16 // The packet identifier that contains the stream type data.
ElementaryStreamDescriptors []*Descriptor // Elementary stream descriptors
StreamType StreamType // This defines the structure of the data contained within the elementary packet identifier.
}
// parsePMTSection parses a PMT section
func parsePMTSection(i *astikit.BytesIterator, offsetSectionsEnd int, tableIDExtension uint16) (d *PMTData, err error) {
// Create data
d = &PMTData{ProgramNumber: tableIDExtension}
// Get next bytes
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// PCR PID
d.PCRPID = uint16(bs[0]&0x1f)<<8 | uint16(bs[1])
// Program descriptors
if d.ProgramDescriptors, err = parseDescriptors(i); err != nil {
err = fmt.Errorf("astits: parsing descriptors failed: %w", err)
return
}
// Loop until end of section data is reached
for i.Offset() < offsetSectionsEnd {
// Create stream
e := &PMTElementaryStream{}
// Get next byte
var b byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Stream type
e.StreamType = StreamType(b)
// Get next bytes
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Elementary PID
e.ElementaryPID = uint16(bs[0]&0x1f)<<8 | uint16(bs[1])
// Elementary descriptors
if e.ElementaryStreamDescriptors, err = parseDescriptors(i); err != nil {
err = fmt.Errorf("astits: parsing descriptors failed: %w", err)
return
}
// Add elementary stream
d.ElementaryStreams = append(d.ElementaryStreams, e)
}
return
}
func calcPMTProgramInfoLength(d *PMTData) uint16 {
ret := uint16(2) // program_info_length
ret += calcDescriptorsLength(d.ProgramDescriptors)
for _, es := range d.ElementaryStreams {
ret += 5 // stream_type, elementary_pid, es_info_length
ret += calcDescriptorsLength(es.ElementaryStreamDescriptors)
}
return ret
}
func calcPMTSectionLength(d *PMTData) uint16 {
ret := uint16(4)
ret += calcDescriptorsLength(d.ProgramDescriptors)
for _, es := range d.ElementaryStreams {
ret += 5
ret += calcDescriptorsLength(es.ElementaryStreamDescriptors)
}
return ret
}
func writePMTSection(w *astikit.BitsWriter, d *PMTData) (int, error) {
b := astikit.NewBitsWriterBatch(w)
// TODO split into sections
b.WriteN(uint8(0xff), 3)
b.WriteN(d.PCRPID, 13)
bytesWritten := 2
n, err := writeDescriptorsWithLength(w, d.ProgramDescriptors)
if err != nil {
return 0, err
}
bytesWritten += n
for _, es := range d.ElementaryStreams {
b.Write(uint8(es.StreamType))
b.WriteN(uint8(0xff), 3)
b.WriteN(es.ElementaryPID, 13)
bytesWritten += 3
n, err = writeDescriptorsWithLength(w, es.ElementaryStreamDescriptors)
if err != nil {
return 0, err
}
bytesWritten += n
}
return bytesWritten, b.Err()
}
func (t StreamType) IsVideo() bool {
switch t {
case StreamTypeMPEG1Video,
StreamTypeMPEG2Video,
StreamTypeMPEG4Video,
StreamTypeH264Video,
StreamTypeH265Video,
StreamTypeCAVSVideo,
StreamTypeVC1Video,
StreamTypeDIRACVideo:
return true
}
return false
}
func (t StreamType) IsAudio() bool {
switch t {
case StreamTypeMPEG1Audio,
StreamTypeMPEG2Audio,
StreamTypeAACAudio,
StreamTypeAACLATMAudio,
StreamTypeAC3Audio,
StreamTypeDTSAudio,
StreamTypeTRUEHDAudio,
StreamTypeEAC3Audio:
return true
}
return false
}
func (t StreamType) String() string {
switch t {
case StreamTypeMPEG1Video:
return "MPEG1 Video"
case StreamTypeMPEG2Video:
return "MPEG2 Video"
case StreamTypeMPEG1Audio:
return "MPEG1 Audio"
case StreamTypeMPEG2Audio:
return "MPEG2 Audio"
case StreamTypePrivateSection:
return "Private Section"
case StreamTypePrivateData:
return "Private Data"
case StreamTypeAACAudio:
return "AAC Audio"
case StreamTypeMPEG4Video:
return "MPEG4 Video"
case StreamTypeAACLATMAudio:
return "AAC LATM Audio"
case StreamTypeMetadata:
return "Metadata"
case StreamTypeH264Video:
return "H264 Video"
case StreamTypeH265Video:
return "H265 Video"
case StreamTypeCAVSVideo:
return "CAVS Video"
case StreamTypeVC1Video:
return "VC1 Video"
case StreamTypeDIRACVideo:
return "DIRAC Video"
case StreamTypeAC3Audio:
return "AC3 Audio"
case StreamTypeDTSAudio:
return "DTS Audio"
case StreamTypeTRUEHDAudio:
return "TRUEHD Audio"
case StreamTypeEAC3Audio:
return "EAC3 Audio"
}
return "Unknown"
}
func (t StreamType) ToPESStreamID() uint8 {
switch t {
case StreamTypeMPEG1Video, StreamTypeMPEG2Video, StreamTypeMPEG4Video, StreamTypeH264Video,
StreamTypeH265Video, StreamTypeCAVSVideo, StreamTypeVC1Video:
return 0xe0
case StreamTypeDIRACVideo:
return 0xfd
case StreamTypeMPEG2Audio, StreamTypeAACAudio, StreamTypeAACLATMAudio:
return 0xc0
case StreamTypeAC3Audio, StreamTypeEAC3Audio: // m2ts_mode???
return 0xfd
case StreamTypePrivateSection, StreamTypePrivateData, StreamTypeMetadata:
return 0xfc
default:
return 0xbd
}
}

608
vendor/github.com/asticode/go-astits/data_psi.go generated vendored Normal file
View file

@ -0,0 +1,608 @@
package astits
import (
"fmt"
"github.com/asticode/go-astikit"
)
// PSI table IDs
const (
PSITableTypeBAT = "BAT"
PSITableTypeDIT = "DIT"
PSITableTypeEIT = "EIT"
PSITableTypeNIT = "NIT"
PSITableTypeNull = "Null"
PSITableTypePAT = "PAT"
PSITableTypePMT = "PMT"
PSITableTypeRST = "RST"
PSITableTypeSDT = "SDT"
PSITableTypeSIT = "SIT"
PSITableTypeST = "ST"
PSITableTypeTDT = "TDT"
PSITableTypeTOT = "TOT"
PSITableTypeUnknown = "Unknown"
)
type PSITableID uint16
const (
PSITableIDPAT PSITableID = 0x00
PSITableIDPMT PSITableID = 0x02
PSITableIDBAT PSITableID = 0x4a
PSITableIDDIT PSITableID = 0x7e
PSITableIDRST PSITableID = 0x71
PSITableIDSIT PSITableID = 0x7f
PSITableIDST PSITableID = 0x72
PSITableIDTDT PSITableID = 0x70
PSITableIDTOT PSITableID = 0x73
PSITableIDNull PSITableID = 0xff
PSITableIDEITStart PSITableID = 0x4e
PSITableIDEITEnd PSITableID = 0x6f
PSITableIDSDTVariant1 PSITableID = 0x42
PSITableIDSDTVariant2 PSITableID = 0x46
PSITableIDNITVariant1 PSITableID = 0x40
PSITableIDNITVariant2 PSITableID = 0x41
)
// PSIData represents a PSI data
// https://en.wikipedia.org/wiki/Program-specific_information
type PSIData struct {
PointerField int // Present at the start of the TS packet payload signaled by the payload_unit_start_indicator bit in the TS header. Used to set packet alignment bytes or content before the start of tabled payload data.
Sections []*PSISection
}
// PSISection represents a PSI section
type PSISection struct {
CRC32 uint32 // A checksum of the entire table excluding the pointer field, pointer filler bytes and the trailing CRC32.
Header *PSISectionHeader
Syntax *PSISectionSyntax
}
// PSISectionHeader represents a PSI section header
type PSISectionHeader struct {
PrivateBit bool // The PAT, PMT, and CAT all set this to 0. Other tables set this to 1.
SectionLength uint16 // The number of bytes that follow for the syntax section (with CRC value) and/or table data. These bytes must not exceed a value of 1021.
SectionSyntaxIndicator bool // A flag that indicates if the syntax section follows the section length. The PAT, PMT, and CAT all set this to 1.
TableID PSITableID // Table Identifier, that defines the structure of the syntax section and other contained data. As an exception, if this is the byte that immediately follow previous table section and is set to 0xFF, then it indicates that the repeat of table section end here and the rest of TS data payload shall be stuffed with 0xFF. Consequently the value 0xFF shall not be used for the Table Identifier.
TableType string
}
// PSISectionSyntax represents a PSI section syntax
type PSISectionSyntax struct {
Data *PSISectionSyntaxData
Header *PSISectionSyntaxHeader
}
// PSISectionSyntaxHeader represents a PSI section syntax header
type PSISectionSyntaxHeader struct {
CurrentNextIndicator bool // Indicates if data is current in effect or is for future use. If the bit is flagged on, then the data is to be used at the present moment.
LastSectionNumber uint8 // This indicates which table is the last table in the sequence of tables.
SectionNumber uint8 // This is an index indicating which table this is in a related sequence of tables. The first table starts from 0.
TableIDExtension uint16 // Informational only identifier. The PAT uses this for the transport stream identifier and the PMT uses this for the Program number.
VersionNumber uint8 // Syntax version number. Incremented when data is changed and wrapped around on overflow for values greater than 32.
}
// PSISectionSyntaxData represents a PSI section syntax data
type PSISectionSyntaxData struct {
EIT *EITData
NIT *NITData
PAT *PATData
PMT *PMTData
SDT *SDTData
TOT *TOTData
}
// parsePSIData parses a PSI data
func parsePSIData(i *astikit.BytesIterator) (d *PSIData, err error) {
// Init data
d = &PSIData{}
// Get next byte
var b byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Pointer field
d.PointerField = int(b)
// Pointer filler bytes
i.Skip(d.PointerField)
// Parse sections
var s *PSISection
var stop bool
for i.HasBytesLeft() && !stop {
if s, stop, err = parsePSISection(i); err != nil {
err = fmt.Errorf("astits: parsing PSI table failed: %w", err)
return
}
d.Sections = append(d.Sections, s)
}
return
}
// parsePSISection parses a PSI section
func parsePSISection(i *astikit.BytesIterator) (s *PSISection, stop bool, err error) {
// Init section
s = &PSISection{}
// Parse header
var offsetStart, offsetSectionsEnd, offsetEnd int
if s.Header, offsetStart, _, offsetSectionsEnd, offsetEnd, err = parsePSISectionHeader(i); err != nil {
err = fmt.Errorf("astits: parsing PSI section header failed: %w", err)
return
}
// Check whether we need to stop the parsing
if shouldStopPSIParsing(s.Header.TableID) {
stop = true
return
}
// Check whether there's a syntax section
if s.Header.SectionLength > 0 {
// Parse syntax
if s.Syntax, err = parsePSISectionSyntax(i, s.Header, offsetSectionsEnd); err != nil {
err = fmt.Errorf("astits: parsing PSI section syntax failed: %w", err)
return
}
// Process CRC32
if s.Header.TableID.hasCRC32() {
// Seek to the end of the sections
i.Seek(offsetSectionsEnd)
// Parse CRC32
if s.CRC32, err = parseCRC32(i); err != nil {
err = fmt.Errorf("astits: parsing CRC32 failed: %w", err)
return
}
// Get CRC32 data
i.Seek(offsetStart)
var crc32Data []byte
if crc32Data, err = i.NextBytesNoCopy(offsetSectionsEnd - offsetStart); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Compute CRC32
crc32 := computeCRC32(crc32Data)
// Check CRC32
if crc32 != s.CRC32 {
err = fmt.Errorf("astits: Table CRC32 %x != computed CRC32 %x", s.CRC32, crc32)
return
}
}
}
// Seek to the end of the section
i.Seek(offsetEnd)
return
}
// parseCRC32 parses a CRC32
func parseCRC32(i *astikit.BytesIterator) (c uint32, err error) {
var bs []byte
if bs, err = i.NextBytesNoCopy(4); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
c = uint32(bs[0])<<24 | uint32(bs[1])<<16 | uint32(bs[2])<<8 | uint32(bs[3])
return
}
// shouldStopPSIParsing checks whether the PSI parsing should be stopped
func shouldStopPSIParsing(tableID PSITableID) bool {
return tableID == PSITableIDNull ||
tableID.isUnknown()
}
// parsePSISectionHeader parses a PSI section header
func parsePSISectionHeader(i *astikit.BytesIterator) (h *PSISectionHeader, offsetStart, offsetSectionsStart, offsetSectionsEnd, offsetEnd int, err error) {
// Init
h = &PSISectionHeader{}
offsetStart = i.Offset()
// Get next byte
var b byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Table ID
h.TableID = PSITableID(b)
// Table type
h.TableType = h.TableID.Type()
// Check whether we need to stop the parsing
if shouldStopPSIParsing(h.TableID) {
return
}
// Get next bytes
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Section syntax indicator
h.SectionSyntaxIndicator = bs[0]&0x80 > 0
// Private bit
h.PrivateBit = bs[0]&0x40 > 0
// Section length
h.SectionLength = uint16(bs[0]&0xf)<<8 | uint16(bs[1])
// Offsets
offsetSectionsStart = i.Offset()
offsetEnd = offsetSectionsStart + int(h.SectionLength)
offsetSectionsEnd = offsetEnd
if h.TableID.hasCRC32() {
offsetSectionsEnd -= 4
}
return
}
// PSITableID.Type() returns the psi table type based on the table id
// Page: 28 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf
// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf
func (t PSITableID) Type() string {
switch {
case t == PSITableIDBAT:
return PSITableTypeBAT
case t >= PSITableIDEITStart && t <= PSITableIDEITEnd:
return PSITableTypeEIT
case t == PSITableIDDIT:
return PSITableTypeDIT
case t == PSITableIDNITVariant1, t == PSITableIDNITVariant2:
return PSITableTypeNIT
case t == PSITableIDNull:
return PSITableTypeNull
case t == PSITableIDPAT:
return PSITableTypePAT
case t == PSITableIDPMT:
return PSITableTypePMT
case t == PSITableIDRST:
return PSITableTypeRST
case t == PSITableIDSDTVariant1, t == PSITableIDSDTVariant2:
return PSITableTypeSDT
case t == PSITableIDSIT:
return PSITableTypeSIT
case t == PSITableIDST:
return PSITableTypeST
case t == PSITableIDTDT:
return PSITableTypeTDT
case t == PSITableIDTOT:
return PSITableTypeTOT
default:
return PSITableTypeUnknown
}
}
// hasPSISyntaxHeader checks whether the section has a syntax header
func (t PSITableID) hasPSISyntaxHeader() bool {
return t == PSITableIDPAT ||
t == PSITableIDPMT ||
t == PSITableIDNITVariant1 || t == PSITableIDNITVariant2 ||
t == PSITableIDSDTVariant1 || t == PSITableIDSDTVariant2 ||
(t >= PSITableIDEITStart && t <= PSITableIDEITEnd)
}
// hasCRC32 checks whether the table has a CRC32
func (t PSITableID) hasCRC32() bool {
return t == PSITableIDPAT ||
t == PSITableIDPMT ||
t == PSITableIDTOT ||
t == PSITableIDNITVariant1 || t == PSITableIDNITVariant2 ||
t == PSITableIDSDTVariant1 || t == PSITableIDSDTVariant2 ||
(t >= PSITableIDEITStart && t <= PSITableIDEITEnd)
}
func (t PSITableID) isUnknown() bool {
switch t {
case PSITableIDBAT,
PSITableIDDIT,
PSITableIDNITVariant1, PSITableIDNITVariant2,
PSITableIDNull,
PSITableIDPAT,
PSITableIDPMT,
PSITableIDRST,
PSITableIDSDTVariant1, PSITableIDSDTVariant2,
PSITableIDSIT,
PSITableIDST,
PSITableIDTDT,
PSITableIDTOT:
return false
}
if t >= PSITableIDEITStart && t <= PSITableIDEITEnd {
return false
}
return true
}
// parsePSISectionSyntax parses a PSI section syntax
func parsePSISectionSyntax(i *astikit.BytesIterator, h *PSISectionHeader, offsetSectionsEnd int) (s *PSISectionSyntax, err error) {
// Init
s = &PSISectionSyntax{}
// Header
if h.TableID.hasPSISyntaxHeader() {
if s.Header, err = parsePSISectionSyntaxHeader(i); err != nil {
err = fmt.Errorf("astits: parsing PSI section syntax header failed: %w", err)
return
}
}
// Parse data
if s.Data, err = parsePSISectionSyntaxData(i, h, s.Header, offsetSectionsEnd); err != nil {
err = fmt.Errorf("astits: parsing PSI section syntax data failed: %w", err)
return
}
return
}
// parsePSISectionSyntaxHeader parses a PSI section syntax header
func parsePSISectionSyntaxHeader(i *astikit.BytesIterator) (h *PSISectionSyntaxHeader, err error) {
// Init
h = &PSISectionSyntaxHeader{}
// Get next 2 bytes
var bs []byte
if bs, err = i.NextBytesNoCopy(2); err != nil {
err = fmt.Errorf("astits: fetching next bytes failed: %w", err)
return
}
// Table ID extension
h.TableIDExtension = uint16(bs[0])<<8 | uint16(bs[1])
// Get next byte
var b byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Version number
h.VersionNumber = uint8(b&0x3f) >> 1
// Current/Next indicator
h.CurrentNextIndicator = b&0x1 > 0
// Get next byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Section number
h.SectionNumber = uint8(b)
// Get next byte
if b, err = i.NextByte(); err != nil {
err = fmt.Errorf("astits: fetching next byte failed: %w", err)
return
}
// Last section number
h.LastSectionNumber = uint8(b)
return
}
// parsePSISectionSyntaxData parses a PSI section data
func parsePSISectionSyntaxData(i *astikit.BytesIterator, h *PSISectionHeader, sh *PSISectionSyntaxHeader, offsetSectionsEnd int) (d *PSISectionSyntaxData, err error) {
// Init
d = &PSISectionSyntaxData{}
// Switch on table type
switch h.TableID {
case PSITableIDBAT:
// TODO Parse BAT
case PSITableIDDIT:
// TODO Parse DIT
case PSITableIDNITVariant1, PSITableIDNITVariant2:
if d.NIT, err = parseNITSection(i, sh.TableIDExtension); err != nil {
err = fmt.Errorf("astits: parsing NIT section failed: %w", err)
return
}
case PSITableIDPAT:
if d.PAT, err = parsePATSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil {
err = fmt.Errorf("astits: parsing PAT section failed: %w", err)
return
}
case PSITableIDPMT:
if d.PMT, err = parsePMTSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil {
err = fmt.Errorf("astits: parsing PMT section failed: %w", err)
return
}
case PSITableIDRST:
// TODO Parse RST
case PSITableIDSDTVariant1, PSITableIDSDTVariant2:
if d.SDT, err = parseSDTSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil {
err = fmt.Errorf("astits: parsing PMT section failed: %w", err)
return
}
case PSITableIDSIT:
// TODO Parse SIT
case PSITableIDST:
// TODO Parse ST
case PSITableIDTOT:
if d.TOT, err = parseTOTSection(i); err != nil {
err = fmt.Errorf("astits: parsing TOT section failed: %w", err)
return
}
case PSITableIDTDT:
// TODO Parse TDT
}
if h.TableID >= PSITableIDEITStart && h.TableID <= PSITableIDEITEnd {
if d.EIT, err = parseEITSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil {
err = fmt.Errorf("astits: parsing EIT section failed: %w", err)
return
}
}
return
}
// toData parses the PSI tables and returns a set of DemuxerData
func (d *PSIData) toData(firstPacket *Packet, pid uint16) (ds []*DemuxerData) {
// Loop through sections
for _, s := range d.Sections {
// Switch on table type
switch s.Header.TableID {
case PSITableIDNITVariant1, PSITableIDNITVariant2:
ds = append(ds, &DemuxerData{FirstPacket: firstPacket, NIT: s.Syntax.Data.NIT, PID: pid})
case PSITableIDPAT:
ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PAT: s.Syntax.Data.PAT, PID: pid})
case PSITableIDPMT:
ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PID: pid, PMT: s.Syntax.Data.PMT})
case PSITableIDSDTVariant1, PSITableIDSDTVariant2:
ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PID: pid, SDT: s.Syntax.Data.SDT})
case PSITableIDTOT:
ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PID: pid, TOT: s.Syntax.Data.TOT})
}
if s.Header.TableID >= PSITableIDEITStart && s.Header.TableID <= PSITableIDEITEnd {
ds = append(ds, &DemuxerData{EIT: s.Syntax.Data.EIT, FirstPacket: firstPacket, PID: pid})
}
}
return
}
func writePSIData(w *astikit.BitsWriter, d *PSIData) (int, error) {
b := astikit.NewBitsWriterBatch(w)
b.Write(uint8(d.PointerField))
for i := 0; i < d.PointerField; i++ {
b.Write(uint8(0x00))
}
bytesWritten := 1 + d.PointerField
if err := b.Err(); err != nil {
return 0, err
}
for _, s := range d.Sections {
n, err := writePSISection(w, s)
if err != nil {
return 0, err
}
bytesWritten += n
}
return bytesWritten, nil
}
func calcPSISectionLength(s *PSISection) uint16 {
ret := uint16(0)
if s.Header.TableID.hasPSISyntaxHeader() {
ret += 5 // PSI syntax header length
}
switch s.Header.TableID {
case PSITableIDPAT:
ret += calcPATSectionLength(s.Syntax.Data.PAT)
case PSITableIDPMT:
ret += calcPMTSectionLength(s.Syntax.Data.PMT)
}
if s.Header.TableID.hasCRC32() {
ret += 4
}
return ret
}
func writePSISection(w *astikit.BitsWriter, s *PSISection) (int, error) {
if s.Header.TableID != PSITableIDPAT && s.Header.TableID != PSITableIDPMT {
return 0, fmt.Errorf("writePSISection: table %s is not implemented", s.Header.TableID.Type())
}
b := astikit.NewBitsWriterBatch(w)
sectionLength := calcPSISectionLength(s)
sectionCRC32 := crc32Polynomial
if s.Header.TableID.hasCRC32() {
w.SetWriteCallback(func(bs []byte) {
sectionCRC32 = updateCRC32(sectionCRC32, bs)
})
defer w.SetWriteCallback(nil)
}
b.Write(uint8(s.Header.TableID))
b.Write(s.Header.SectionSyntaxIndicator)
b.Write(s.Header.PrivateBit)
b.WriteN(uint8(0xff), 2)
b.WriteN(sectionLength, 12)
bytesWritten := 3
if s.Header.SectionLength > 0 {
n, err := writePSISectionSyntax(w, s)
if err != nil {
return 0, err
}
bytesWritten += n
if s.Header.TableID.hasCRC32() {
b.Write(sectionCRC32)
bytesWritten += 4
}
}
return bytesWritten, b.Err()
}
func writePSISectionSyntax(w *astikit.BitsWriter, s *PSISection) (int, error) {
bytesWritten := 0
if s.Header.TableID.hasPSISyntaxHeader() {
n, err := writePSISectionSyntaxHeader(w, s.Syntax.Header)
if err != nil {
return 0, err
}
bytesWritten += n
}
n, err := writePSISectionSyntaxData(w, s.Syntax.Data, s.Header.TableID)
if err != nil {
return 0, err
}
bytesWritten += n
return bytesWritten, nil
}
func writePSISectionSyntaxHeader(w *astikit.BitsWriter, h *PSISectionSyntaxHeader) (int, error) {
b := astikit.NewBitsWriterBatch(w)
b.Write(h.TableIDExtension)
b.WriteN(uint8(0xff), 2)
b.WriteN(h.VersionNumber, 5)
b.Write(h.CurrentNextIndicator)
b.Write(h.SectionNumber)
b.Write(h.LastSectionNumber)
return 5, b.Err()
}
func writePSISectionSyntaxData(w *astikit.BitsWriter, d *PSISectionSyntaxData, tableID PSITableID) (int, error) {
switch tableID {
// TODO write other table types
case PSITableIDPAT:
return writePATSection(w, d.PAT)
case PSITableIDPMT:
return writePMTSection(w, d.PMT)
}
return 0, nil
}

Some files were not shown because too many files have changed in this diff Show more