diff --git a/go.mod b/go.mod index 6350fc4bd..b225d3150 100644 --- a/go.mod +++ b/go.mod @@ -46,6 +46,7 @@ require ( ) require ( + github.com/asticode/go-astisub v0.20.0 github.com/go-chi/httplog v0.2.1 github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 github.com/hashicorp/golang-lru v0.5.4 @@ -59,6 +60,8 @@ require ( require ( github.com/agnivade/levenshtein v1.1.1 // indirect github.com/antchfx/xpath v1.2.0 // indirect + github.com/asticode/go-astikit v0.20.0 // indirect + github.com/asticode/go-astits v1.8.0 // indirect github.com/chromedp/sysutil v1.0.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect diff --git a/go.sum b/go.sum index fa971588d..43ca36369 100644 --- a/go.sum +++ b/go.sum @@ -104,6 +104,12 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/asticode/go-astikit v0.20.0 h1:+7N+J4E4lWx2QOkRdOf6DafWJMv6O4RRfgClwQokrH8= +github.com/asticode/go-astikit v0.20.0/go.mod h1:h4ly7idim1tNhaVkdVBeXQZEE3L0xblP7fCWbgwipF0= +github.com/asticode/go-astisub v0.20.0 h1:mKuLwgGkQj35RRHFiTcq+2hgR7g1mHiYiIkr9UNTmXw= +github.com/asticode/go-astisub v0.20.0/go.mod h1:WTkuSzFB+Bp7wezuSf2Oxulj5A8zu2zLRVFf6bIFQK8= +github.com/asticode/go-astits v1.8.0 h1:rf6aiiGn/QhlFjNON1n5plqF3Fs025XLUwiQ0NB6oZg= +github.com/asticode/go-astits v1.8.0/go.mod h1:DkOWmBNQpnr9mv24KfZjq4JawCFX1FCqjLVGvO0DygQ= github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go-v2 v1.3.2/go.mod h1:7OaACgj2SX3XGWnrIjGlJM22h6yD6MEWKvm7levnnM8= github.com/aws/aws-sdk-go-v2 v1.6.0/go.mod h1:tI4KhsR5VkzlUa2DZAdwx7wCAYGwkZZ1H31PYrBFx1w= @@ -625,6 +631,7 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/profile v1.4.0/go.mod h1:NWz/XGvpEW1FyYQ7fCx4dqYBLlfTcE+A9FLAkNKqjFE= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= diff --git a/gqlgen.yml b/gqlgen.yml index 7ddc3be69..8762177d3 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -44,3 +44,6 @@ models: model: github.com/stashapp/stash/pkg/models.SavedFilter StashID: model: github.com/stashapp/stash/pkg/models.StashID + SceneCaption: + model: github.com/stashapp/stash/pkg/models.SceneCaption + diff --git a/graphql/documents/data/scene-slim.graphql b/graphql/documents/data/scene-slim.graphql index 6046b6e92..c3d759e61 100644 --- a/graphql/documents/data/scene-slim.graphql +++ b/graphql/documents/data/scene-slim.graphql @@ -13,6 +13,10 @@ fragment SlimSceneData on Scene { phash interactive interactive_speed + captions { + language_code + caption_type + } file { size @@ -35,6 +39,7 @@ fragment SlimSceneData on Scene { sprite funscript interactive_heatmap + caption } scene_markers { diff --git a/graphql/documents/data/scene.graphql b/graphql/documents/data/scene.graphql index 81ca48041..0cbd73468 100644 --- a/graphql/documents/data/scene.graphql +++ b/graphql/documents/data/scene.graphql @@ -13,6 +13,10 @@ fragment SceneData on Scene { phash interactive interactive_speed + captions { + language_code + caption_type + } created_at updated_at @@ -37,6 +41,7 @@ fragment SceneData on Scene { sprite funscript interactive_heatmap + caption } scene_markers { diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 58c6d72fd..a3dbb5287 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -174,6 +174,8 @@ input SceneFilterType { interactive: Boolean """Filter by InteractiveSpeed""" interactive_speed: IntCriterionInput + """Filter by captions""" + captions: StringCriterionInput } input MovieFilterType { diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index aa046e613..ff405415a 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -19,6 +19,7 @@ type ScenePathsType { sprite: String # Resolver funscript: String # Resolver interactive_heatmap: String # Resolver + caption: String # Resolver } type SceneMovie { @@ -26,6 +27,11 @@ type SceneMovie { scene_index: Int } +type SceneCaption { + language_code: String! + caption_type: String! +} + type Scene { id: ID! checksum: String @@ -41,6 +47,7 @@ type Scene { phash: String interactive: Boolean! interactive_speed: Int + captions: [SceneCaption!] created_at: Time! updated_at: Time! file_mod_time: Time diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 59801e5dd..4fd583d5b 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -98,6 +98,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S spritePath := builder.GetSpriteURL() chaptersVttPath := builder.GetChaptersVTTURL() funscriptPath := builder.GetFunscriptURL() + captionBasePath := builder.GetCaptionURL() interactiveHeatmap := builder.GetInteractiveHeatmapURL() return &models.ScenePathsType{ @@ -110,6 +111,7 @@ func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*models.S Sprite: &spritePath, Funscript: &funscriptPath, InteractiveHeatmap: &interactiveHeatmap, + Caption: &captionBasePath, }, nil } @@ -124,6 +126,17 @@ func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (re return ret, nil } +func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.SceneCaption, err error) { + if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { + ret, err = repo.Scene().GetCaptions(obj.ID) + return err + }); err != nil { + return nil, err + } + + return ret, err +} + func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret []*models.Gallery, err error) { if err := r.withReadTxn(ctx, func(repo models.ReaderRepository) error { ret, err = repo.Gallery().FindBySceneID(obj.ID) diff --git a/internal/api/routes_scene.go b/internal/api/routes_scene.go index 2ff716ae7..3612da72d 100644 --- a/internal/api/routes_scene.go +++ b/internal/api/routes_scene.go @@ -1,6 +1,7 @@ package api import ( + "bytes" "context" "net/http" "strconv" @@ -41,6 +42,7 @@ func (rs sceneRoutes) Routes() chi.Router { r.Get("/vtt/chapter", rs.ChapterVtt) r.Get("/funscript", rs.Funscript) r.Get("/interactive_heatmap", rs.InteractiveHeatmap) + r.Get("/caption", rs.CaptionLang) r.Get("/scene_marker/{sceneMarkerId}/stream", rs.SceneMarkerStream) r.Get("/scene_marker/{sceneMarkerId}/preview", rs.SceneMarkerPreview) @@ -284,6 +286,46 @@ func (rs sceneRoutes) InteractiveHeatmap(w http.ResponseWriter, r *http.Request) http.ServeFile(w, r, filepath) } +func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang string, ext string) { + s := r.Context().Value(sceneKey).(*models.Scene) + + if err := rs.txnManager.WithReadTxn(r.Context(), func(repo models.ReaderRepository) error { + var err error + captions, err := repo.Scene().GetCaptions(s.ID) + for _, caption := range captions { + if lang == caption.LanguageCode && ext == caption.CaptionType { + sub, err := scene.ReadSubs(caption.Path(s.Path)) + if err == nil { + var b bytes.Buffer + err = sub.WriteToWebVTT(&b) + if err == nil { + w.Header().Set("Content-Type", "text/vtt") + w.Header().Add("Cache-Control", "no-cache") + _, _ = b.WriteTo(w) + } + return err + } + logger.Debugf("Error while reading subs: %v", err) + } + } + return err + }); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } +} + +func (rs sceneRoutes) CaptionLang(w http.ResponseWriter, r *http.Request) { + // serve caption based on lang query param, if provided + if err := r.ParseForm(); err != nil { + logger.Warnf("[caption] error parsing query form: %v", err) + } + + l := r.Form.Get("lang") + ext := r.Form.Get("type") + rs.Caption(w, r, l, ext) +} + func (rs sceneRoutes) VttThumbs(w http.ResponseWriter, r *http.Request) { scene := r.Context().Value(sceneKey).(*models.Scene) w.Header().Set("Content-Type", "text/vtt") diff --git a/internal/api/urlbuilders/scene.go b/internal/api/urlbuilders/scene.go index f54ab4ef0..650fa0218 100644 --- a/internal/api/urlbuilders/scene.go +++ b/internal/api/urlbuilders/scene.go @@ -67,6 +67,10 @@ func (b SceneURLBuilder) GetFunscriptURL() string { return b.BaseURL + "/scene/" + b.SceneID + "/funscript" } +func (b SceneURLBuilder) GetCaptionURL() string { + return b.BaseURL + "/scene/" + b.SceneID + "/caption" +} + func (b SceneURLBuilder) GetInteractiveHeatmapURL() string { return b.BaseURL + "/scene/" + b.SceneID + "/interactive_heatmap" } diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go index d24d1f80c..209e89ceb 100644 --- a/internal/manager/manager_tasks.go +++ b/internal/manager/manager_tasks.go @@ -12,6 +12,7 @@ import ( "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" ) func isGallery(pathname string) bool { @@ -19,6 +20,10 @@ func isGallery(pathname string) bool { return fsutil.MatchExtension(pathname, gExt) } +func isCaptions(pathname string) bool { + return fsutil.MatchExtension(pathname, scene.CaptionExts) +} + func isVideo(pathname string) bool { vidExt := config.GetInstance().GetVideoExtensions() return fsutil.MatchExtension(pathname, vidExt) diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index 9f3decec5..05ffe168e 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -16,6 +16,7 @@ import ( "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/scene/generate" "github.com/stashapp/stash/pkg/utils" ) @@ -277,6 +278,8 @@ func (t *ScanTask) Start(ctx context.Context) { s = t.scanScene(ctx) case isImage(path): t.scanImage(ctx) + case isCaptions(path): + t.associateCaptions(ctx) } }) @@ -351,6 +354,7 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { vidExt := config.GetVideoExtensions() imgExt := config.GetImageExtensions() gExt := config.GetGalleryExtensions() + capExt := scene.CaptionExts excludeVidRegex := generateRegexps(config.GetExcludes()) excludeImgRegex := generateRegexps(config.GetImageExcludes()) @@ -394,6 +398,10 @@ func walkFilesToScan(s *models.StashConfig, f filepath.WalkFunc) error { } } + if fsutil.MatchExtension(path, capExt) { + return f(path, info, err) + } + return nil }) } diff --git a/internal/manager/task_scan_scene.go b/internal/manager/task_scan_scene.go index 70c33d5f4..218a2e012 100644 --- a/internal/manager/task_scan_scene.go +++ b/internal/manager/task_scan_scene.go @@ -2,7 +2,9 @@ package manager import ( "context" + "path/filepath" + "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" @@ -78,3 +80,48 @@ func (t *ScanTask) scanScene(ctx context.Context) *models.Scene { return retScene } + +// associates captions to scene/s with the same basename +func (t *ScanTask) associateCaptions(ctx context.Context) { + vExt := config.GetInstance().GetVideoExtensions() + captionPath := t.file.Path() + captionLang := scene.GetCaptionsLangFromPath(captionPath) + + relatedFiles := scene.GenerateCaptionCandidates(captionPath, vExt) + if err := t.TxnManager.WithTxn(ctx, func(r models.Repository) error { + var err error + sqb := r.Scene() + + for _, scenePath := range relatedFiles { + s, er := sqb.FindByPath(scenePath) + + if er != nil { + logger.Errorf("Error searching for scene %s: %v", scenePath, er) + continue + } + if s != nil { // found related Scene + logger.Debugf("Matched captions to scene %s", s.Path) + captions, er := sqb.GetCaptions(s.ID) + if er == nil { + fileExt := filepath.Ext(captionPath) + ext := fileExt[1:] + if !scene.IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present + newCaption := &models.SceneCaption{ + LanguageCode: captionLang, + Filename: filepath.Base(captionPath), + CaptionType: ext, + } + captions = append(captions, newCaption) + er = sqb.UpdateCaptions(s.ID, captions) + if er == nil { + logger.Debugf("Updated captions for scene %s. Added %s", s.Path, captionLang) + } + } + } + } + } + return err + }); err != nil { + logger.Error(err.Error()) + } +} diff --git a/pkg/database/database.go b/pkg/database/database.go index 457b81797..3fa260716 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -23,7 +23,7 @@ import ( var DB *sqlx.DB var WriteMu sync.Mutex var dbPath string -var appSchemaVersion uint = 30 +var appSchemaVersion uint = 31 var databaseSchemaVersion uint //go:embed migrations/*.sql diff --git a/pkg/database/migrations/29_interactive_speed.up.sql b/pkg/database/migrations/29_interactive_speed.up.sql index bec6ced29..4a944ca3d 100644 --- a/pkg/database/migrations/29_interactive_speed.up.sql +++ b/pkg/database/migrations/29_interactive_speed.up.sql @@ -1 +1 @@ -ALTER TABLE `scenes` ADD COLUMN `interactive_speed` int \ No newline at end of file +ALTER TABLE `scenes` ADD COLUMN `interactive_speed` int diff --git a/pkg/database/migrations/31_scenes_captions.up.sql b/pkg/database/migrations/31_scenes_captions.up.sql new file mode 100644 index 000000000..133341c36 --- /dev/null +++ b/pkg/database/migrations/31_scenes_captions.up.sql @@ -0,0 +1,8 @@ +CREATE TABLE `scene_captions` ( + `scene_id` integer, + `language_code` varchar(255) NOT NULL, + `filename` varchar(255) NOT NULL, + `caption_type` varchar(255) NOT NULL, + primary key (`scene_id`, `language_code`, `caption_type`), + foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE +); diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index 8bbd2e78b..9731147fe 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index 630c1c0d2..5a13ad986 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/MovieReaderWriter.go b/pkg/models/mocks/MovieReaderWriter.go index 3f80f12a3..288eb6fd4 100644 --- a/pkg/models/mocks/MovieReaderWriter.go +++ b/pkg/models/mocks/MovieReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 0ccaddb33..485f75170 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/SavedFilterReaderWriter.go b/pkg/models/mocks/SavedFilterReaderWriter.go index ce8f40546..987fdd5fc 100644 --- a/pkg/models/mocks/SavedFilterReaderWriter.go +++ b/pkg/models/mocks/SavedFilterReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/SceneMarkerReaderWriter.go b/pkg/models/mocks/SceneMarkerReaderWriter.go index ebecb8e4d..2e6fea3a0 100644 --- a/pkg/models/mocks/SceneMarkerReaderWriter.go +++ b/pkg/models/mocks/SceneMarkerReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 6c9a91f77..a200087ea 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks @@ -482,6 +482,28 @@ func (_m *SceneReaderWriter) FindMany(ids []int) ([]*models.Scene, error) { return r0, r1 } +func (_m *SceneReaderWriter) GetCaptions(sceneID int) ([]*models.SceneCaption, error) { + ret := _m.Called(sceneID) + + var r0 []*models.SceneCaption + if rf, ok := ret.Get(0).(func(int) []*models.SceneCaption); ok { + r0 = rf(sceneID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.SceneCaption) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int) error); ok { + r1 = rf(sceneID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetCover provides a mock function with given fields: sceneID func (_m *SceneReaderWriter) GetCover(sceneID int) ([]byte, error) { ret := _m.Called(sceneID) @@ -729,6 +751,20 @@ func (_m *SceneReaderWriter) Update(updatedScene models.ScenePartial) (*models.S return r0, r1 } +// UpdateCaptions provides a mock function with given fields: id, newCaptions +func (_m *SceneReaderWriter) UpdateCaptions(sceneID int, captions []*models.SceneCaption) error { + ret := _m.Called(sceneID, captions) + + var r0 error + if rf, ok := ret.Get(0).(func(int, []*models.SceneCaption) error); ok { + r0 = rf(sceneID, captions) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // UpdateCover provides a mock function with given fields: sceneID, cover func (_m *SceneReaderWriter) UpdateCover(sceneID int, cover []byte) error { ret := _m.Called(sceneID, cover) diff --git a/pkg/models/mocks/ScrapedItemReaderWriter.go b/pkg/models/mocks/ScrapedItemReaderWriter.go index 8b8e1fbdf..e06b7451d 100644 --- a/pkg/models/mocks/ScrapedItemReaderWriter.go +++ b/pkg/models/mocks/ScrapedItemReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index c433fe305..c15c73719 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index 1d5e14e34..64a8088a6 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -1,4 +1,4 @@ -// Code generated by mockery v0.0.0-dev. DO NOT EDIT. +// Code generated by mockery v2.10.0. DO NOT EDIT. package mocks diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index a3c939821..7e1066ed3 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -221,3 +221,13 @@ func (s *Scenes) Append(o interface{}) { func (s *Scenes) New() interface{} { return &Scene{} } + +type SceneCaption struct { + LanguageCode string `json:"language_code"` + Filename string `json:"filename"` + CaptionType string `json:"caption_type"` +} + +func (c SceneCaption) Path(scenePath string) string { + return filepath.Join(filepath.Dir(scenePath), c.Filename) +} diff --git a/pkg/models/scene.go b/pkg/models/scene.go index a86f75ff0..86a131a0f 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -62,6 +62,7 @@ type SceneReader interface { Wall(q *string) ([]*Scene, error) All() ([]*Scene, error) Query(options SceneQueryOptions) (*SceneQueryResult, error) + GetCaptions(sceneID int) ([]*SceneCaption, error) GetCover(sceneID int) ([]byte, error) GetMovies(sceneID int) ([]MoviesScenes, error) GetTagIDs(sceneID int) ([]int, error) @@ -79,6 +80,7 @@ type SceneWriter interface { ResetOCounter(id int) (int, error) UpdateFileModTime(id int, modTime NullSQLiteTimestamp) error Destroy(id int) error + UpdateCaptions(id int, captions []*SceneCaption) error UpdateCover(sceneID int, cover []byte) error DestroyCover(sceneID int) error UpdatePerformers(sceneID int, performerIDs []int) error diff --git a/pkg/scene/caption.go b/pkg/scene/caption.go new file mode 100644 index 000000000..f45ba8a2d --- /dev/null +++ b/pkg/scene/caption.go @@ -0,0 +1,106 @@ +package scene + +import ( + "os" + "path/filepath" + "strings" + + "golang.org/x/text/language" + + "github.com/asticode/go-astisub" + "github.com/stashapp/stash/pkg/models" +) + +var CaptionExts = []string{"vtt", "srt"} // in a case where vtt and srt files are both provided prioritize vtt file due to native support + +// to be used for captions without a language code in the filename +// ISO 639-1 uses 2 or 3 a-z chars for codes so 00 is a safe non valid choise +// https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes +const LangUnknown = "00" + +// GetCaptionPath generates the path of a caption +// from a given file path, wanted language and caption sufffix +func GetCaptionPath(path, lang, suffix string) string { + ext := filepath.Ext(path) + fn := strings.TrimSuffix(path, ext) + captionExt := "" + if len(lang) == 0 || lang == LangUnknown { + captionExt = suffix + } else { + captionExt = lang + "." + suffix + } + return fn + "." + captionExt +} + +// ReadSubs reads a captions file +func ReadSubs(path string) (*astisub.Subtitles, error) { + return astisub.OpenFile(path) +} + +// IsValidLanguage checks whether the given string is a valid +// ISO 639 language code +func IsValidLanguage(lang string) bool { + _, err := language.ParseBase(lang) + return err == nil +} + +// IsLangInCaptions returns true if lang is present +// in the captions +func IsLangInCaptions(lang string, ext string, captions []*models.SceneCaption) bool { + for _, caption := range captions { + if lang == caption.LanguageCode && ext == caption.CaptionType { + return true + } + } + return false +} + +// GenerateCaptionCandidates generates a list of filenames with exts as extensions +// that can associated with the caption +func GenerateCaptionCandidates(captionPath string, exts []string) []string { + var candidates []string + + basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension + + // a caption file can be something like scene_filename.srt or scene_filename.en.srt + // if a language code is present and valid remove it from the basename + languageExt := filepath.Ext(basename) + if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) { + basename = strings.TrimSuffix(basename, languageExt) + } + + for _, ext := range exts { + candidates = append(candidates, basename+"."+ext) + } + + return candidates +} + +// GetCaptionsLangFromPath returns the language code from a given captions path +// If no valid language is present LangUknown is returned +func GetCaptionsLangFromPath(captionPath string) string { + langCode := LangUnknown + basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension + languageExt := filepath.Ext(basename) + if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) { + langCode = languageExt[1:] + } + return langCode +} + +// CleanCaptions removes non existent/accessible language codes from captions +func CleanCaptions(scenePath string, captions []*models.SceneCaption) (cleanedCaptions []*models.SceneCaption, changed bool) { + changed = false + for _, caption := range captions { + found := false + f := caption.Path(scenePath) + if _, er := os.Stat(f); er == nil { + cleanedCaptions = append(cleanedCaptions, caption) + found = true + } + if !found { + changed = true + } + } + return +} diff --git a/pkg/scene/caption_test.go b/pkg/scene/caption_test.go new file mode 100644 index 000000000..3c9cb54fb --- /dev/null +++ b/pkg/scene/caption_test.go @@ -0,0 +1,55 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var testExts = []string{"mkv", "mp4"} + +type testCase struct { + captionPath string + expectedLang string + expectedCandidates []string +} + +var testCases = []testCase{ + { + captionPath: "/stash/video.vtt", + expectedLang: LangUnknown, + expectedCandidates: []string{"/stash/video.mkv", "/stash/video.mp4"}, + }, + { + captionPath: "/stash/video.en.vtt", + expectedLang: "en", + expectedCandidates: []string{"/stash/video.mkv", "/stash/video.mp4"}, // lang code valid, remove en part + }, + { + captionPath: "/stash/video.test.srt", + expectedLang: LangUnknown, + expectedCandidates: []string{"/stash/video.test.mkv", "/stash/video.test.mp4"}, // no lang code/lang code invalid test should remain + }, + { + captionPath: "C:\\videos\\video.fr.srt", + expectedLang: "fr", + expectedCandidates: []string{"C:\\videos\\video.mkv", "C:\\videos\\video.mp4"}, + }, + { + captionPath: "C:\\videos\\video.xx.srt", + expectedLang: LangUnknown, + expectedCandidates: []string{"C:\\videos\\video.xx.mkv", "C:\\videos\\video.xx.mp4"}, // no lang code/lang code invalid xx should remain + }, +} + +func TestGenerateCaptionCandidates(t *testing.T) { + for _, c := range testCases { + assert.ElementsMatch(t, c.expectedCandidates, GenerateCaptionCandidates(c.captionPath, testExts)) + } +} + +func TestGetCaptionsLangFromPath(t *testing.T) { + for _, l := range testCases { + assert.Equal(t, l.expectedLang, GetCaptionsLangFromPath(l.captionPath)) + } +} diff --git a/pkg/scene/scan.go b/pkg/scene/scan.go index bb0acab77..1f33fa9ff 100644 --- a/pkg/scene/scan.go +++ b/pkg/scene/scan.go @@ -105,6 +105,27 @@ func (scanner *Scanner) ScanExisting(ctx context.Context, existing file.FileBase changed = true } + if err := scanner.TxnManager.WithTxn(context.TODO(), func(r models.Repository) error { + var err error + sqb := r.Scene() + + captions, er := sqb.GetCaptions(s.ID) + if er == nil { + if len(captions) > 0 { + clean, altered := CleanCaptions(s.Path, captions) + if altered { + er = sqb.UpdateCaptions(s.ID, clean) + if er == nil { + logger.Debugf("Captions for %s cleaned: %s -> %s", path, captions, clean) + } + } + } + } + return err + }); err != nil { + logger.Error(err.Error()) + } + if changed { // we are operating on a checksum now, so grab a mutex on the checksum done := make(chan struct{}) diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index b9c8d22bb..f195d9c7e 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -365,6 +365,52 @@ func (r *imageRepository) replace(id int, image []byte) error { return err } +type captionRepository struct { + repository +} + +func (r *captionRepository) get(id int) ([]*models.SceneCaption, error) { + query := fmt.Sprintf("SELECT %s, %s, %s from %s WHERE %s = ?", sceneCaptionCodeColumn, sceneCaptionFilenameColumn, sceneCaptionTypeColumn, r.tableName, r.idColumn) + var ret []*models.SceneCaption + err := r.queryFunc(query, []interface{}{id}, false, func(rows *sqlx.Rows) error { + var captionCode string + var captionFilename string + var captionType string + + if err := rows.Scan(&captionCode, &captionFilename, &captionType); err != nil { + return err + } + + caption := &models.SceneCaption{ + LanguageCode: captionCode, + Filename: captionFilename, + CaptionType: captionType, + } + ret = append(ret, caption) + return nil + }) + return ret, err +} + +func (r *captionRepository) insert(id int, caption *models.SceneCaption) (sql.Result, error) { + stmt := fmt.Sprintf("INSERT INTO %s (%s, %s, %s, %s) VALUES (?, ?, ?, ?)", r.tableName, r.idColumn, sceneCaptionCodeColumn, sceneCaptionFilenameColumn, sceneCaptionTypeColumn) + return r.tx.Exec(stmt, id, caption.LanguageCode, caption.Filename, caption.CaptionType) +} + +func (r *captionRepository) replace(id int, captions []*models.SceneCaption) error { + if err := r.destroy([]int{id}); err != nil { + return err + } + + for _, caption := range captions { + if _, err := r.insert(id, caption); err != nil { + return err + } + } + + return nil +} + type stringRepository struct { repository stringColumn string diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index e6c0d7019..cb5085dfd 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -19,6 +19,11 @@ const scenesTagsTable = "scenes_tags" const scenesGalleriesTable = "scenes_galleries" const moviesScenesTable = "movies_scenes" +const sceneCaptionsTable = "scene_captions" +const sceneCaptionCodeColumn = "language_code" +const sceneCaptionFilenameColumn = "filename" +const sceneCaptionTypeColumn = "caption_type" + var scenesForPerformerQuery = selectAll(sceneTable) + ` LEFT JOIN performers_scenes as performers_join on performers_join.scene_id = scenes.id WHERE performers_join.performer_id = ? @@ -127,6 +132,25 @@ func (qb *sceneQueryBuilder) UpdateFileModTime(id int, modTime models.NullSQLite }) } +func (qb *sceneQueryBuilder) captionRepository() *captionRepository { + return &captionRepository{ + repository: repository{ + tx: qb.tx, + tableName: sceneCaptionsTable, + idColumn: sceneIDColumn, + }, + } +} + +func (qb *sceneQueryBuilder) GetCaptions(sceneID int) ([]*models.SceneCaption, error) { + return qb.captionRepository().get(sceneID) +} + +func (qb *sceneQueryBuilder) UpdateCaptions(sceneID int, captions []*models.SceneCaption) error { + return qb.captionRepository().replace(sceneID, captions) + +} + func (qb *sceneQueryBuilder) IncrementOCounter(id int) (int, error) { _, err := qb.tx.Exec( `UPDATE scenes SET o_counter = o_counter + 1 WHERE scenes.id = ?`, @@ -385,6 +409,8 @@ func (qb *sceneQueryBuilder) makeFilter(sceneFilter *models.SceneFilterType) *fi query.handleCriterion(boolCriterionHandler(sceneFilter.Interactive, "scenes.interactive")) query.handleCriterion(intCriterionHandler(sceneFilter.InteractiveSpeed, "scenes.interactive_speed")) + query.handleCriterion(sceneCaptionCriterionHandler(qb, sceneFilter.Captions)) + query.handleCriterion(sceneTagsCriterionHandler(qb, sceneFilter.Tags)) query.handleCriterion(sceneTagCountCriterionHandler(qb, sceneFilter.TagCount)) query.handleCriterion(scenePerformersCriterionHandler(qb, sceneFilter.Performers)) @@ -607,6 +633,18 @@ func (qb *sceneQueryBuilder) getMultiCriterionHandlerBuilder(foreignTable, joinT } } +func sceneCaptionCriterionHandler(qb *sceneQueryBuilder, captions *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + joinTable: sceneCaptionsTable, + stringColumn: sceneCaptionCodeColumn, + addJoinTable: func(f *filterBuilder) { + qb.captionRepository().join(f, "", "scenes.id") + }, + } + + return h.handler(captions) +} + func sceneTagsCriterionHandler(qb *sceneQueryBuilder, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { h := joinedHierarchicalMultiCriterionHandlerBuilder{ tx: qb.tx, diff --git a/ui/v2.5/src/components/Changelog/versions/v0140.md b/ui/v2.5/src/components/Changelog/versions/v0140.md index e571d0bbb..68cd4224e 100644 --- a/ui/v2.5/src/components/Changelog/versions/v0140.md +++ b/ui/v2.5/src/components/Changelog/versions/v0140.md @@ -1,7 +1,7 @@ ##### 💥 Note: Image Slideshow Delay (in Interface Settings) is now in seconds rather than milliseconds and has not been converted. Please adjust your settings as needed. ### ✨ New Features -* Add Ignore Auto Tag flag to Performers, Studios and Tags. ([#2439](https://github.com/stashapp/stash/pull/2439)) +* Add Ignore Auto Tag flag to Performers, Studios and Tags. ([#2439](https://github.com/stashapp/stash/pull/2439)) * Add python location in System Settings for script scrapers and plugins. ([#2409](https://github.com/stashapp/stash/pull/2409)) ### 🎨 Improvements diff --git a/ui/v2.5/src/components/Changelog/versions/v0150.md b/ui/v2.5/src/components/Changelog/versions/v0150.md index 243d2b5d3..276d993ad 100644 --- a/ui/v2.5/src/components/Changelog/versions/v0150.md +++ b/ui/v2.5/src/components/Changelog/versions/v0150.md @@ -1,3 +1,6 @@ +### ✨ New Features +* Add support for VTT and SRT captions for scenes. ([#2462](https://github.com/stashapp/stash/pull/2462)) + ### 🎨 Improvements * Changed playback rate options to be the same as those provided by YouTube. ([#2550](https://github.com/stashapp/stash/pull/2550)) * Display error message on fatal error when running stash with double-click in Windows. ([#2543](https://github.com/stashapp/stash/pull/2543)) diff --git a/ui/v2.5/src/components/Help/Manual.tsx b/ui/v2.5/src/components/Help/Manual.tsx index e0c6ccbc7..501563472 100644 --- a/ui/v2.5/src/components/Help/Manual.tsx +++ b/ui/v2.5/src/components/Help/Manual.tsx @@ -19,6 +19,7 @@ import KeyboardShortcuts from "src/docs/en/KeyboardShortcuts.md"; import Help from "src/docs/en/Help.md"; import Deduplication from "src/docs/en/Deduplication.md"; import Interactive from "src/docs/en/Interactive.md"; +import Captions from "src/docs/en/Captions.md"; import Identify from "src/docs/en/Identify.md"; import Browsing from "src/docs/en/Browsing.md"; import { MarkdownPage } from "../Shared/MarkdownPage"; @@ -134,6 +135,11 @@ export const Manual: React.FC = ({ title: "Interactivity", content: Interactive, }, + { + key: "Captions.md", + title: "Captions", + content: Captions, + }, { key: "KeyboardShortcuts.md", title: "Keyboard Shortcuts", diff --git a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx index 5308faaaf..8282a642a 100644 --- a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx +++ b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx @@ -16,6 +16,7 @@ import * as GQL from "src/core/generated-graphql"; import { ScenePlayerScrubber } from "./ScenePlayerScrubber"; import { ConfigurationContext } from "src/hooks/Config"; import { Interactive } from "src/utils/interactive"; +import { languageMap } from "src/utils/caption"; export const VIDEO_PLAYER_ID = "VideoJsPlayer"; @@ -160,6 +161,13 @@ export const ScenePlayer: React.FC = ({ const player = VideoJS(videoElement, options); + const settings = (player as any).textTrackSettings; + settings.setValues({ + backgroundColor: "#000", + backgroundOpacity: "0.5", + }); + settings.updateDisplay(); + (player as any).landscapeFullscreen({ fullscreen: { enterOnRotate: true, @@ -215,6 +223,38 @@ export const ScenePlayer: React.FC = ({ }, []); useEffect(() => { + let prevCaptionOffset = 0; + + function addCaptionOffset(player: VideoJsPlayer, offset: number) { + const tracks = player.remoteTextTracks(); + for (let i = 0; i < tracks.length; i++) { + const track = tracks[i]; + const { cues } = track; + if (cues) { + for (let j = 0; j < cues.length; j++) { + const cue = cues[j]; + cue.startTime = cue.startTime + offset; + cue.endTime = cue.endTime + offset; + } + } + } + } + + function removeCaptionOffset(player: VideoJsPlayer, offset: number) { + const tracks = player.remoteTextTracks(); + for (let i = 0; i < tracks.length; i++) { + const track = tracks[i]; + const { cues } = track; + if (cues) { + for (let j = 0; j < cues.length; j++) { + const cue = cues[j]; + cue.startTime = cue.startTime + prevCaptionOffset - offset; + cue.endTime = cue.endTime + prevCaptionOffset - offset; + } + } + } + } + function handleOffset(player: VideoJsPlayer) { if (!scene) return; @@ -222,11 +262,25 @@ export const ScenePlayer: React.FC = ({ const isDirect = currentSrc.endsWith("/stream") || currentSrc.endsWith("/stream.m3u8"); + + const curTime = player.currentTime(); if (!isDirect) { (player as any).setOffsetDuration(scene.file.duration); } else { (player as any).clearOffsetDuration(); } + + if (curTime != prevCaptionOffset) { + if (!isDirect) { + removeCaptionOffset(player, curTime); + prevCaptionOffset = curTime; + } else { + if (prevCaptionOffset != 0) { + addCaptionOffset(player, prevCaptionOffset); + prevCaptionOffset = 0; + } + } + } } function handleError(play: boolean) { @@ -268,6 +322,58 @@ export const ScenePlayer: React.FC = ({ return false; } + function getDefaultLanguageCode() { + var languageCode = window.navigator.language; + + if (languageCode.indexOf("-") !== -1) { + languageCode = languageCode.split("-")[0]; + } + + if (languageCode.indexOf("_") !== -1) { + languageCode = languageCode.split("_")[0]; + } + + return languageCode; + } + + function loadCaptions(player: VideoJsPlayer) { + if (!scene) return; + + if (scene.captions) { + var languageCode = getDefaultLanguageCode(); + var hasDefault = false; + + for (let caption of scene.captions) { + var lang = caption.language_code; + var label = lang; + if (languageMap.has(lang)) { + label = languageMap.get(lang)!; + } + + label = label + " (" + caption.caption_type + ")"; + var setAsDefault = !hasDefault && languageCode == lang; + if (!hasDefault && setAsDefault) { + hasDefault = true; + } + player.addRemoteTextTrack( + { + src: + scene.paths.caption + + "?lang=" + + lang + + "&type=" + + caption.caption_type, + kind: "captions", + srclang: lang, + label: label, + default: setAsDefault, + }, + true + ); + } + } + } + if (!scene || scene.id === sceneId.current) return; sceneId.current = scene.id; @@ -285,8 +391,8 @@ export const ScenePlayer: React.FC = ({ (player as any).clearOffsetDuration(); const tracks = player.remoteTextTracks(); - if (tracks.length > 0) { - player.removeRemoteTextTrack(tracks[0] as any); + for (let i = 0; i < tracks.length; i++) { + player.removeRemoteTextTrack(tracks[i] as any); } player.src( @@ -308,6 +414,10 @@ export const ScenePlayer: React.FC = ({ ); } + if (scene.captions?.length! > 0) { + loadCaptions(player); + } + player.currentTime(0); player.loop( @@ -338,12 +448,10 @@ export const ScenePlayer: React.FC = ({ if (scene.interactive) { interactiveClient.ensurePlaying(this.currentTime()); } - setTime(this.currentTime()); }); player.on("seeking", function (this: VideoJsPlayer) { - // backwards compatibility - may want to remove this in future this.play(); }); diff --git a/ui/v2.5/src/components/ScenePlayer/styles.scss b/ui/v2.5/src/components/ScenePlayer/styles.scss index 2271a871a..d04f3a7f5 100644 --- a/ui/v2.5/src/components/ScenePlayer/styles.scss +++ b/ui/v2.5/src/components/ScenePlayer/styles.scss @@ -492,6 +492,10 @@ $sceneTabWidth: 450px; z-index: 1; } +.vjs-text-track-settings select { + background: #fff; +} + .VideoPlayer .video-js .vjs-seek-button.skip-back diff --git a/ui/v2.5/src/docs/en/Captions.md b/ui/v2.5/src/docs/en/Captions.md new file mode 100644 index 000000000..5f2d91902 --- /dev/null +++ b/ui/v2.5/src/docs/en/Captions.md @@ -0,0 +1,14 @@ +# Captions + +Stash supports captioning with SRT and VTT files. + +These files need to be named as follows: + +## Scene + +- {scene_name}.{language_code}.ext +- {scene_name}.ext + +Where `{language_code}` is defined by the [ISO-6399-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (2 letters) standard and `ext` is the file extension. Captions files without a language code will be labeled as Unknown in the video player but will work fine. + +Scenes with captions can be filtered with the `captions` criterion. diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index 921936f3a..e85100746 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -743,6 +743,7 @@ "instagram": "Instagram", "interactive": "Interactive", "interactive_speed": "Interactive speed", + "captions": "Captions", "isMissing": "Is Missing", "library": "Library", "loading": { diff --git a/ui/v2.5/src/models/list-filter/criteria/captions.ts b/ui/v2.5/src/models/list-filter/criteria/captions.ts new file mode 100644 index 000000000..b516b8777 --- /dev/null +++ b/ui/v2.5/src/models/list-filter/criteria/captions.ts @@ -0,0 +1,42 @@ +import { CriterionModifier } from "src/core/generated-graphql"; +import { languageMap, valueToCode } from "src/utils/caption"; +import { CriterionType } from "../types"; +import { CriterionOption, StringCriterion } from "./criterion"; + +const languageStrings = Array.from(languageMap.values()); + +class CaptionsCriterionOptionType extends CriterionOption { + constructor(value: CriterionType) { + super({ + messageID: value, + type: value, + parameterName: value, + modifierOptions: [ + CriterionModifier.Includes, + CriterionModifier.Excludes, + CriterionModifier.IsNull, + CriterionModifier.NotNull, + ], + options: languageStrings, + }); + } +} + +export const CaptionsCriterionOption = new CaptionsCriterionOptionType( + "captions" +); + +export class CaptionCriterion extends StringCriterion { + protected toCriterionInput() { + const value = valueToCode(this.value); + + return { + value, + modifier: this.modifier, + }; + } + + constructor() { + super(CaptionsCriterionOption); + } +} diff --git a/ui/v2.5/src/models/list-filter/criteria/factory.ts b/ui/v2.5/src/models/list-filter/criteria/factory.ts index 88b67879f..8f3f7001c 100644 --- a/ui/v2.5/src/models/list-filter/criteria/factory.ts +++ b/ui/v2.5/src/models/list-filter/criteria/factory.ts @@ -43,6 +43,7 @@ import { CriterionType } from "../types"; import { InteractiveCriterion } from "./interactive"; import { RatingCriterionOption } from "./rating"; import { DuplicatedCriterion, PhashCriterionOption } from "./phash"; +import { CaptionCriterion } from "./captions"; export function makeCriteria(type: CriterionType = "none") { switch (type) { @@ -159,6 +160,8 @@ export function makeCriteria(type: CriterionType = "none") { return new StringCriterion(new StringCriterionOption(type, type)); case "interactive": return new InteractiveCriterion(); + case "captions": + return new CaptionCriterion(); case "parent_tag_count": return new NumberCriterion( new MandatoryNumberCriterionOption( diff --git a/ui/v2.5/src/models/list-filter/scenes.ts b/ui/v2.5/src/models/list-filter/scenes.ts index 71c91b966..c53c7ce21 100644 --- a/ui/v2.5/src/models/list-filter/scenes.ts +++ b/ui/v2.5/src/models/list-filter/scenes.ts @@ -23,6 +23,7 @@ import { PhashCriterionOption, } from "./criteria/phash"; import { PerformerFavoriteCriterionOption } from "./criteria/favorite"; +import { CaptionsCriterionOption } from "./criteria/captions"; const defaultSortBy = "date"; const sortByOptions = [ @@ -78,6 +79,7 @@ const criterionOptions = [ createStringCriterionOption("url"), createStringCriterionOption("stash_id"), InteractiveCriterionOption, + CaptionsCriterionOption, createMandatoryNumberCriterionOption("interactive_speed"), ]; diff --git a/ui/v2.5/src/models/list-filter/types.ts b/ui/v2.5/src/models/list-filter/types.ts index cd42c5403..01e4755f2 100644 --- a/ui/v2.5/src/models/list-filter/types.ts +++ b/ui/v2.5/src/models/list-filter/types.ts @@ -113,6 +113,7 @@ export type CriterionType = | "stash_id" | "interactive" | "interactive_speed" + | "captions" | "name" | "details" | "title" diff --git a/ui/v2.5/src/utils/caption.ts b/ui/v2.5/src/utils/caption.ts new file mode 100644 index 000000000..91d7030b6 --- /dev/null +++ b/ui/v2.5/src/utils/caption.ts @@ -0,0 +1,22 @@ +export const languageMap = new Map([ + ["de", "Deutsche"], + ["en", "English"], + ["es", "Español"], + ["fr", "Français"], + ["it", "Italiano"], + ["ja", "日本"], + ["ko", "한국인"], + ["nl", "Holandés"], + ["pt", "Português"], + ["00", "Unknown"], // stash reserved language code +]); + +export const valueToCode = (value?: string | null) => { + if (!value) { + return undefined; + } + + return Array.from(languageMap.keys()).find((v) => { + return languageMap.get(v) === value; + }); +}; diff --git a/vendor/github.com/asticode/go-astikit/.travis.sh b/vendor/github.com/asticode/go-astikit/.travis.sh new file mode 100644 index 000000000..f67dc1348 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/.travis.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +if [ "$(go list -m all)" != "github.com/asticode/go-astikit" ]; then + echo "This repo doesn't allow any external dependencies" + exit 1 +else + echo "cheers!" +fi \ No newline at end of file diff --git a/vendor/github.com/asticode/go-astikit/.travis.yml b/vendor/github.com/asticode/go-astikit/.travis.yml new file mode 100644 index 000000000..c64e331fd --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/.travis.yml @@ -0,0 +1,15 @@ +language: go +go: +- 1.x +- tip +install: +- bash .travis.sh +- go get -t ./... +- go get golang.org/x/tools/cmd/cover +- go get github.com/mattn/goveralls +matrix: + allow_failures: + - go: tip +script: +- go test -race -v -coverprofile=coverage.out +- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci \ No newline at end of file diff --git a/vendor/github.com/asticode/go-astikit/LICENSE b/vendor/github.com/asticode/go-astikit/LICENSE new file mode 100644 index 000000000..87a311287 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Quentin Renard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/asticode/go-astikit/README.md b/vendor/github.com/asticode/go-astikit/README.md new file mode 100644 index 000000000..e1356c167 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/README.md @@ -0,0 +1,6 @@ +[![GoReportCard](http://goreportcard.com/badge/github.com/asticode/go-astikit)](http://goreportcard.com/report/github.com/asticode/go-astikit) +[![GoDoc](https://godoc.org/github.com/asticode/go-astikit?status.svg)](https://godoc.org/github.com/asticode/go-astikit) +[![Travis](https://travis-ci.org/asticode/go-astikit.svg?branch=master)](https://travis-ci.org/asticode/go-astikit#) +[![Coveralls](https://coveralls.io/repos/github/asticode/go-astikit/badge.svg?branch=master)](https://coveralls.io/github/asticode/go-astikit) + +`astikit` is a set of golang helpers that don't require any external dependencies. \ No newline at end of file diff --git a/vendor/github.com/asticode/go-astikit/archive.go b/vendor/github.com/asticode/go-astikit/archive.go new file mode 100644 index 000000000..cde119404 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/archive.go @@ -0,0 +1,214 @@ +package astikit + +import ( + "archive/zip" + "context" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" +) + +// internal shouldn't lead with a "/" +func zipInternalPath(p string) (external, internal string) { + if items := strings.Split(p, ".zip"); len(items) > 1 { + external = items[0] + ".zip" + internal = strings.TrimPrefix(strings.Join(items[1:], ".zip"), string(os.PathSeparator)) + return + } + external = p + return +} + +// Zip zips a src into a dst +// Possible dst formats are: +// - /path/to/zip.zip +// - /path/to/zip.zip/root/path +func Zip(ctx context.Context, dst, src string) (err error) { + // Get external/internal path + externalPath, internalPath := zipInternalPath(dst) + + // Make sure the directory exists + if err = os.MkdirAll(filepath.Dir(externalPath), DefaultDirMode); err != nil { + return fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(externalPath), err) + } + + // Create destination file + var dstFile *os.File + if dstFile, err = os.Create(externalPath); err != nil { + return fmt.Errorf("astikit: creating %s failed: %w", externalPath, err) + } + defer dstFile.Close() + + // Create zip writer + var zw = zip.NewWriter(dstFile) + defer zw.Close() + + // Walk + if err = filepath.Walk(src, func(path string, info os.FileInfo, e error) (err error) { + // Process error + if e != nil { + err = e + return + } + + // Init header + var h *zip.FileHeader + if h, err = zip.FileInfoHeader(info); err != nil { + return fmt.Errorf("astikit: initializing zip header failed: %w", err) + } + + // Set header info + h.Name = filepath.Join(internalPath, strings.TrimPrefix(path, src)) + if info.IsDir() { + h.Name += string(os.PathSeparator) + } else { + h.Method = zip.Deflate + } + + // Create writer + var w io.Writer + if w, err = zw.CreateHeader(h); err != nil { + return fmt.Errorf("astikit: creating zip header failed: %w", err) + } + + // If path is dir, stop here + if info.IsDir() { + return + } + + // Open path + var walkFile *os.File + if walkFile, err = os.Open(path); err != nil { + return fmt.Errorf("astikit: opening %s failed: %w", path, err) + } + defer walkFile.Close() + + // Copy + if _, err = Copy(ctx, w, walkFile); err != nil { + return fmt.Errorf("astikit: copying failed: %w", err) + } + return + }); err != nil { + return fmt.Errorf("astikit: walking failed: %w", err) + } + return +} + +// Unzip unzips a src into a dst +// Possible src formats are: +// - /path/to/zip.zip +// - /path/to/zip.zip/root/path +func Unzip(ctx context.Context, dst, src string) (err error) { + // Get external/internal path + externalPath, internalPath := zipInternalPath(src) + + // Make sure the destination exists + if err = os.MkdirAll(dst, DefaultDirMode); err != nil { + return fmt.Errorf("astikit: mkdirall %s failed: %w", dst, err) + } + + // Open overall reader + var r *zip.ReadCloser + if r, err = zip.OpenReader(externalPath); err != nil { + return fmt.Errorf("astikit: opening overall zip reader on %s failed: %w", externalPath, err) + } + defer r.Close() + + // Loop through files to determine their type + var dirs, files, symlinks = make(map[string]*zip.File), make(map[string]*zip.File), make(map[string]*zip.File) + for _, f := range r.File { + // Validate internal path + if internalPath != "" && !strings.HasPrefix(f.Name, internalPath) { + continue + } + var p = filepath.Join(dst, strings.TrimPrefix(f.Name, internalPath)) + + // Check file type + if f.FileInfo().Mode()&os.ModeSymlink != 0 { + symlinks[p] = f + } else if f.FileInfo().IsDir() { + dirs[p] = f + } else { + files[p] = f + } + } + + // Invalid internal path + if internalPath != "" && len(dirs) == 0 && len(files) == 0 && len(symlinks) == 0 { + return fmt.Errorf("astikit: content in archive does not match specified internal path %s", internalPath) + } + + // Create dirs + for p, f := range dirs { + if err = os.MkdirAll(p, f.FileInfo().Mode().Perm()); err != nil { + return fmt.Errorf("astikit: mkdirall %s failed: %w", p, err) + } + } + + // Create files + for p, f := range files { + if err = createZipFile(ctx, f, p); err != nil { + return fmt.Errorf("astikit: creating zip file into %s failed: %w", p, err) + } + } + + // Create symlinks + for p, f := range symlinks { + if err = createZipSymlink(f, p); err != nil { + return fmt.Errorf("astikit: creating zip symlink into %s failed: %w", p, err) + } + } + return +} + +func createZipFile(ctx context.Context, f *zip.File, p string) (err error) { + // Open file reader + var fr io.ReadCloser + if fr, err = f.Open(); err != nil { + return fmt.Errorf("astikit: opening zip reader on file %s failed: %w", f.Name, err) + } + defer fr.Close() + + // Since dirs don't always come up we make sure the directory of the file exists with default + // file mode + if err = os.MkdirAll(filepath.Dir(p), DefaultDirMode); err != nil { + return fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(p), err) + } + + // Open the file + var fl *os.File + if fl, err = os.OpenFile(p, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.FileInfo().Mode().Perm()); err != nil { + return fmt.Errorf("astikit: opening file %s failed: %w", p, err) + } + defer fl.Close() + + // Copy + if _, err = Copy(ctx, fl, fr); err != nil { + return fmt.Errorf("astikit: copying %s into %s failed: %w", f.Name, p, err) + } + return +} + +func createZipSymlink(f *zip.File, p string) (err error) { + // Open file reader + var fr io.ReadCloser + if fr, err = f.Open(); err != nil { + return fmt.Errorf("astikit: opening zip reader on file %s failed: %w", f.Name, err) + } + defer fr.Close() + + // If file is a symlink we retrieve the target path that is in the content of the file + var b []byte + if b, err = ioutil.ReadAll(fr); err != nil { + return fmt.Errorf("astikit: ioutil.Readall on %s failed: %w", f.Name, err) + } + + // Create the symlink + if err = os.Symlink(string(b), p); err != nil { + return fmt.Errorf("astikit: creating symlink from %s to %s failed: %w", string(b), p, err) + } + return +} diff --git a/vendor/github.com/asticode/go-astikit/astikit.go b/vendor/github.com/asticode/go-astikit/astikit.go new file mode 100644 index 000000000..cb6909a68 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/astikit.go @@ -0,0 +1,8 @@ +package astikit + +import "os" + +// Default modes +var ( + DefaultDirMode os.FileMode = 0755 +) diff --git a/vendor/github.com/asticode/go-astikit/binary.go b/vendor/github.com/asticode/go-astikit/binary.go new file mode 100644 index 000000000..521c3b50e --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/binary.go @@ -0,0 +1,297 @@ +package astikit + +import ( + "encoding/binary" + "errors" + "io" +) + +// BitsWriter represents an object that can write individual bits into a writer +// in a developer-friendly way. Check out the Write method for more information. +// This is particularly helpful when you want to build a slice of bytes based +// on individual bits for testing purposes. +type BitsWriter struct { + bo binary.ByteOrder + cache byte + cacheLen byte + bsCache []byte + w io.Writer + writeCb BitsWriterWriteCallback +} + +type BitsWriterWriteCallback func([]byte) + +// BitsWriterOptions represents BitsWriter options +type BitsWriterOptions struct { + ByteOrder binary.ByteOrder + // WriteCallback is called every time when full byte is written + WriteCallback BitsWriterWriteCallback + Writer io.Writer +} + +// NewBitsWriter creates a new BitsWriter +func NewBitsWriter(o BitsWriterOptions) (w *BitsWriter) { + w = &BitsWriter{ + bo: o.ByteOrder, + bsCache: make([]byte, 1), + w: o.Writer, + writeCb: o.WriteCallback, + } + if w.bo == nil { + w.bo = binary.BigEndian + } + return +} + +func (w *BitsWriter) SetWriteCallback(cb BitsWriterWriteCallback) { + w.writeCb = cb +} + +// Write writes bits into the writer. Bits are only written when there are +// enough to create a byte. When using a string or a bool, bits are added +// from left to right as if +// Available types are: +// - string("10010"): processed as n bits, n being the length of the input +// - []byte: processed as n bytes, n being the length of the input +// - bool: processed as one bit +// - uint8/uint16/uint32/uint64: processed as n bits, if type is uintn +func (w *BitsWriter) Write(i interface{}) error { + // Transform input into "10010" format + + switch a := i.(type) { + case string: + for _, r := range a { + var err error + if r == '1' { + err = w.writeBit(1) + } else { + err = w.writeBit(0) + } + if err != nil { + return err + } + } + case []byte: + for _, b := range a { + if err := w.writeFullByte(b); err != nil { + return err + } + } + case bool: + if a { + return w.writeBit(1) + } else { + return w.writeBit(0) + } + case uint8: + return w.writeFullByte(a) + case uint16: + return w.writeFullInt(uint64(a), 2) + case uint32: + return w.writeFullInt(uint64(a), 4) + case uint64: + return w.writeFullInt(a, 8) + default: + return errors.New("astikit: invalid type") + } + + return nil +} + +// Writes exactly n bytes from bs +// Writes first n bytes of bs if len(bs) > n +// Pads with padByte at the end if len(bs) < n +func (w *BitsWriter) WriteBytesN(bs []byte, n int, padByte uint8) error { + if len(bs) >= n { + return w.Write(bs[:n]) + } + + if err := w.Write(bs); err != nil { + return err + } + + // no bytes.Repeat here to avoid allocation + for i := 0; i < n-len(bs); i++ { + if err := w.Write(padByte); err != nil { + return err + } + } + + return nil +} + +func (w *BitsWriter) writeFullInt(in uint64, len int) error { + if w.bo == binary.BigEndian { + for i := len - 1; i >= 0; i-- { + err := w.writeFullByte(byte((in >> (i * 8)) & 0xff)) + if err != nil { + return err + } + } + } else { + for i := 0; i < len; i++ { + err := w.writeFullByte(byte((in >> (i * 8)) & 0xff)) + if err != nil { + return err + } + } + } + + return nil +} + +func (w *BitsWriter) flushBsCache() error { + if _, err := w.w.Write(w.bsCache); err != nil { + return err + } + + if w.writeCb != nil { + w.writeCb(w.bsCache) + } + + return nil +} + +func (w *BitsWriter) writeFullByte(b byte) error { + if w.cacheLen == 0 { + w.bsCache[0] = b + } else { + w.bsCache[0] = w.cache | (b >> w.cacheLen) + w.cache = b << (8 - w.cacheLen) + } + return w.flushBsCache() +} + +func (w *BitsWriter) writeBit(bit byte) error { + w.cache = w.cache | (bit)<<(7-w.cacheLen) + w.cacheLen++ + if w.cacheLen == 8 { + w.bsCache[0] = w.cache + if err := w.flushBsCache(); err != nil { + return err + } + + w.cacheLen = 0 + w.cache = 0 + } + return nil +} + +// WriteN writes the input into n bits +func (w *BitsWriter) WriteN(i interface{}, n int) error { + var toWrite uint64 + switch a := i.(type) { + case uint8: + toWrite = uint64(a) + case uint16: + toWrite = uint64(a) + case uint32: + toWrite = uint64(a) + case uint64: + toWrite = a + default: + return errors.New("astikit: invalid type") + } + + for i := n - 1; i >= 0; i-- { + err := w.writeBit(byte(toWrite>>i) & 0x1) + if err != nil { + return err + } + } + return nil +} + +// BitsWriterBatch allows to chain multiple Write* calls and check for error only once +// For more info see https://github.com/asticode/go-astikit/pull/6 +type BitsWriterBatch struct { + err error + w *BitsWriter +} + +func NewBitsWriterBatch(w *BitsWriter) BitsWriterBatch { + return BitsWriterBatch{ + w: w, + } +} + +// Calls BitsWriter.Write if there was no write error before +func (b *BitsWriterBatch) Write(i interface{}) { + if b.err == nil { + b.err = b.w.Write(i) + } +} + +// Calls BitsWriter.WriteN if there was no write error before +func (b *BitsWriterBatch) WriteN(i interface{}, n int) { + if b.err == nil { + b.err = b.w.WriteN(i, n) + } +} + +// Calls BitsWriter.WriteBytesN if there was no write error before +func (b *BitsWriterBatch) WriteBytesN(bs []byte, n int, padByte uint8) { + if b.err == nil { + b.err = b.w.WriteBytesN(bs, n, padByte) + } +} + +// Returns first write error +func (b *BitsWriterBatch) Err() error { + return b.err +} + +var byteHamming84Tab = [256]uint8{ + 0x01, 0xff, 0xff, 0x08, 0xff, 0x0c, 0x04, 0xff, 0xff, 0x08, 0x08, 0x08, 0x06, 0xff, 0xff, 0x08, + 0xff, 0x0a, 0x02, 0xff, 0x06, 0xff, 0xff, 0x0f, 0x06, 0xff, 0xff, 0x08, 0x06, 0x06, 0x06, 0xff, + 0xff, 0x0a, 0x04, 0xff, 0x04, 0xff, 0x04, 0x04, 0x00, 0xff, 0xff, 0x08, 0xff, 0x0d, 0x04, 0xff, + 0x0a, 0x0a, 0xff, 0x0a, 0xff, 0x0a, 0x04, 0xff, 0xff, 0x0a, 0x03, 0xff, 0x06, 0xff, 0xff, 0x0e, + 0x01, 0x01, 0x01, 0xff, 0x01, 0xff, 0xff, 0x0f, 0x01, 0xff, 0xff, 0x08, 0xff, 0x0d, 0x05, 0xff, + 0x01, 0xff, 0xff, 0x0f, 0xff, 0x0f, 0x0f, 0x0f, 0xff, 0x0b, 0x03, 0xff, 0x06, 0xff, 0xff, 0x0f, + 0x01, 0xff, 0xff, 0x09, 0xff, 0x0d, 0x04, 0xff, 0xff, 0x0d, 0x03, 0xff, 0x0d, 0x0d, 0xff, 0x0d, + 0xff, 0x0a, 0x03, 0xff, 0x07, 0xff, 0xff, 0x0f, 0x03, 0xff, 0x03, 0x03, 0xff, 0x0d, 0x03, 0xff, + 0xff, 0x0c, 0x02, 0xff, 0x0c, 0x0c, 0xff, 0x0c, 0x00, 0xff, 0xff, 0x08, 0xff, 0x0c, 0x05, 0xff, + 0x02, 0xff, 0x02, 0x02, 0xff, 0x0c, 0x02, 0xff, 0xff, 0x0b, 0x02, 0xff, 0x06, 0xff, 0xff, 0x0e, + 0x00, 0xff, 0xff, 0x09, 0xff, 0x0c, 0x04, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0xff, 0xff, 0x0e, + 0xff, 0x0a, 0x02, 0xff, 0x07, 0xff, 0xff, 0x0e, 0x00, 0xff, 0xff, 0x0e, 0xff, 0x0e, 0x0e, 0x0e, + 0x01, 0xff, 0xff, 0x09, 0xff, 0x0c, 0x05, 0xff, 0xff, 0x0b, 0x05, 0xff, 0x05, 0xff, 0x05, 0x05, + 0xff, 0x0b, 0x02, 0xff, 0x07, 0xff, 0xff, 0x0f, 0x0b, 0x0b, 0xff, 0x0b, 0xff, 0x0b, 0x05, 0xff, + 0xff, 0x09, 0x09, 0x09, 0x07, 0xff, 0xff, 0x09, 0x00, 0xff, 0xff, 0x09, 0xff, 0x0d, 0x05, 0xff, + 0x07, 0xff, 0xff, 0x09, 0x07, 0x07, 0x07, 0xff, 0xff, 0x0b, 0x03, 0xff, 0x07, 0xff, 0xff, 0x0e, +} + +// ByteHamming84Decode hamming 8/4 decodes +func ByteHamming84Decode(i uint8) (o uint8, ok bool) { + o = byteHamming84Tab[i] + if o == 0xff { + return + } + ok = true + return +} + +var byteParityTab = [256]uint8{ + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, + 0x00, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, +} + +// ByteParity returns the byte parity +func ByteParity(i uint8) (o uint8, ok bool) { + ok = byteParityTab[i] == 1 + o = i & 0x7f + return +} diff --git a/vendor/github.com/asticode/go-astikit/bytes.go b/vendor/github.com/asticode/go-astikit/bytes.go new file mode 100644 index 000000000..155e2f06a --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/bytes.go @@ -0,0 +1,164 @@ +package astikit + +import "fmt" + +// BytesIterator represents an object capable of iterating sequentially and safely +// through a slice of bytes. This is particularly useful when you need to iterate +// through a slice of bytes and don't want to check for "index out of range" errors +// manually. +type BytesIterator struct { + bs []byte + offset int +} + +// NewBytesIterator creates a new BytesIterator +func NewBytesIterator(bs []byte) *BytesIterator { + return &BytesIterator{bs: bs} +} + +// NextByte returns the next byte +func (i *BytesIterator) NextByte() (b byte, err error) { + if len(i.bs) < i.offset+1 { + err = fmt.Errorf("astikit: slice length is %d, offset %d is invalid", len(i.bs), i.offset) + return + } + b = i.bs[i.offset] + i.offset++ + return +} + +// NextBytes returns the n next bytes +func (i *BytesIterator) NextBytes(n int) (bs []byte, err error) { + if len(i.bs) < i.offset+n { + err = fmt.Errorf("astikit: slice length is %d, offset %d is invalid", len(i.bs), i.offset+n) + return + } + bs = make([]byte, n) + copy(bs, i.bs[i.offset:i.offset+n]) + i.offset += n + return +} + +// NextBytesNoCopy returns the n next bytes +// Be careful with this function as it doesn't make a copy of returned data. +// bs will point to internal BytesIterator buffer. +// If you need to modify returned bytes or store it for some time, use NextBytes instead +func (i *BytesIterator) NextBytesNoCopy(n int) (bs []byte, err error) { + if len(i.bs) < i.offset+n { + err = fmt.Errorf("astikit: slice length is %d, offset %d is invalid", len(i.bs), i.offset+n) + return + } + bs = i.bs[i.offset : i.offset+n] + i.offset += n + return +} + +// Seek seeks to the nth byte +func (i *BytesIterator) Seek(n int) { + i.offset = n +} + +// Skip skips the n previous/next bytes +func (i *BytesIterator) Skip(n int) { + i.offset += n +} + +// HasBytesLeft checks whether there are bytes left +func (i *BytesIterator) HasBytesLeft() bool { + return i.offset < len(i.bs) +} + +// Offset returns the offset +func (i *BytesIterator) Offset() int { + return i.offset +} + +// Dump dumps the rest of the slice +func (i *BytesIterator) Dump() (bs []byte) { + if !i.HasBytesLeft() { + return + } + bs = make([]byte, len(i.bs)-i.offset) + copy(bs, i.bs[i.offset:len(i.bs)]) + i.offset = len(i.bs) + return +} + +// Len returns the slice length +func (i *BytesIterator) Len() int { + return len(i.bs) +} + +const ( + padRight = "right" + padLeft = "left" +) + +type bytesPadder struct { + cut bool + direction string + length int + repeat byte +} + +func newBytesPadder(repeat byte, length int) *bytesPadder { + return &bytesPadder{ + direction: padLeft, + length: length, + repeat: repeat, + } +} + +func (p *bytesPadder) pad(i []byte) []byte { + if len(i) == p.length { + return i + } else if len(i) > p.length { + if p.cut { + return i[:p.length] + } + return i + } else { + o := make([]byte, len(i)) + copy(o, i) + for idx := 0; idx < p.length-len(i); idx++ { + if p.direction == padRight { + o = append(o, p.repeat) + } else { + o = append([]byte{p.repeat}, o...) + } + o = append(o, p.repeat) + } + o = o[:p.length] + return o + } +} + +// PadOption represents a Pad option +type PadOption func(p *bytesPadder) + +// PadCut is a PadOption +// It indicates to the padder it must cut the input to the provided length +// if its original length is bigger +func PadCut(p *bytesPadder) { p.cut = true } + +// PadLeft is a PadOption +// It indicates additionnal bytes have to be added to the left +func PadLeft(p *bytesPadder) { p.direction = padLeft } + +// PadRight is a PadOption +// It indicates additionnal bytes have to be added to the right +func PadRight(p *bytesPadder) { p.direction = padRight } + +// BytesPad pads the slice of bytes with additionnal options +func BytesPad(i []byte, repeat byte, length int, options ...PadOption) []byte { + p := newBytesPadder(repeat, length) + for _, o := range options { + o(p) + } + return p.pad(i) +} + +// StrPad pads the string with additionnal options +func StrPad(i string, repeat rune, length int, options ...PadOption) string { + return string(BytesPad([]byte(i), byte(repeat), length, options...)) +} diff --git a/vendor/github.com/asticode/go-astikit/defer.go b/vendor/github.com/asticode/go-astikit/defer.go new file mode 100644 index 000000000..ddca03702 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/defer.go @@ -0,0 +1,57 @@ +package astikit + +import ( + "sync" +) + +// CloseFunc is a method that closes something +type CloseFunc func() error + +// Closer is an object that can close several things +type Closer struct { + fs []CloseFunc + m *sync.Mutex +} + +// NewCloser creates a new closer +func NewCloser() *Closer { + return &Closer{ + m: &sync.Mutex{}, + } +} + +// Close implements the io.Closer interface +func (c *Closer) Close() error { + // Lock + c.m.Lock() + defer c.m.Unlock() + + // Loop through closers + err := NewErrors() + for _, f := range c.fs { + err.Add(f()) + } + + // Reset closers + c.fs = []CloseFunc{} + + // Return + if err.IsNil() { + return nil + } + return err +} + +// Add adds a close func at the beginning of the list +func (c *Closer) Add(f CloseFunc) { + c.m.Lock() + defer c.m.Unlock() + c.fs = append([]CloseFunc{f}, c.fs...) +} + +// NewChild creates a new child closer +func (c *Closer) NewChild() (child *Closer) { + child = NewCloser() + c.Add(child.Close) + return +} diff --git a/vendor/github.com/asticode/go-astikit/errors.go b/vendor/github.com/asticode/go-astikit/errors.go new file mode 100644 index 000000000..46e84963a --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/errors.go @@ -0,0 +1,71 @@ +package astikit + +import ( + "errors" + "strings" + "sync" +) + +// Errors is an error containing multiple errors +type Errors struct { + m *sync.Mutex // Locks p + p []error +} + +// NewErrors creates new errors +func NewErrors(errs ...error) *Errors { + return &Errors{ + m: &sync.Mutex{}, + p: errs, + } +} + +// Add adds a new error +func (errs *Errors) Add(err error) { + if err == nil { + return + } + errs.m.Lock() + defer errs.m.Unlock() + errs.p = append(errs.p, err) +} + +// IsNil checks whether the error is nil +func (errs *Errors) IsNil() bool { + errs.m.Lock() + defer errs.m.Unlock() + return len(errs.p) == 0 +} + +// Loop loops through the errors +func (errs *Errors) Loop(fn func(idx int, err error) bool) { + errs.m.Lock() + defer errs.m.Unlock() + for idx, err := range errs.p { + if stop := fn(idx, err); stop { + return + } + } +} + +// Error implements the error interface +func (errs *Errors) Error() string { + errs.m.Lock() + defer errs.m.Unlock() + var ss []string + for _, err := range errs.p { + ss = append(ss, err.Error()) + } + return strings.Join(ss, " && ") +} + +// ErrorCause returns the cause of an error +func ErrorCause(err error) error { + for { + if u := errors.Unwrap(err); u != nil { + err = u + continue + } + return err + } +} diff --git a/vendor/github.com/asticode/go-astikit/exec.go b/vendor/github.com/asticode/go-astikit/exec.go new file mode 100644 index 000000000..07a0e4092 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/exec.go @@ -0,0 +1,104 @@ +package astikit + +import ( + "context" + "fmt" + "os/exec" + "strings" + "sync" +) + +// Statuses +const ( + ExecStatusCrashed = "crashed" + ExecStatusRunning = "running" + ExecStatusStopped = "stopped" +) + +// ExecHandler represents an object capable of handling the execution of a cmd +type ExecHandler struct { + cancel context.CancelFunc + ctx context.Context + err error + o sync.Once + stopped bool +} + +// Status returns the cmd status +func (h *ExecHandler) Status() string { + if h.ctx.Err() != nil { + if h.stopped || h.err == nil { + return ExecStatusStopped + } + return ExecStatusCrashed + } + return ExecStatusRunning +} + +// Stop stops the cmd +func (h *ExecHandler) Stop() { + h.o.Do(func() { + h.cancel() + h.stopped = true + }) +} + +// ExecCmdOptions represents exec options +type ExecCmdOptions struct { + Args []string + CmdAdapter func(cmd *exec.Cmd, h *ExecHandler) error + Name string + StopFunc func(cmd *exec.Cmd) error +} + +// ExecCmd executes a cmd +// The process will be stopped when the worker stops +func ExecCmd(w *Worker, o ExecCmdOptions) (h *ExecHandler, err error) { + // Create handler + h = &ExecHandler{} + h.ctx, h.cancel = context.WithCancel(w.Context()) + + // Create command + cmd := exec.Command(o.Name, o.Args...) + + // Adapt command + if o.CmdAdapter != nil { + if err = o.CmdAdapter(cmd, h); err != nil { + err = fmt.Errorf("astikit: adapting cmd failed: %w", err) + return + } + } + + // Start + w.Logger().Infof("astikit: starting %s", strings.Join(cmd.Args, " ")) + if err = cmd.Start(); err != nil { + err = fmt.Errorf("astikit: executing %s: %w", strings.Join(cmd.Args, " "), err) + return + } + + // Handle context + go func() { + // Wait for context to be done + <-h.ctx.Done() + + // Get stop func + f := func() error { return cmd.Process.Kill() } + if o.StopFunc != nil { + f = func() error { return o.StopFunc(cmd) } + } + + // Stop + if err = f(); err != nil { + w.Logger().Error(fmt.Errorf("astikit: stopping cmd failed: %w", err)) + return + } + }() + + // Execute in a task + w.NewTask().Do(func() { + h.err = cmd.Wait() + h.cancel() + w.Logger().Infof("astikit: status is now %s for %s", h.Status(), strings.Join(cmd.Args, " ")) + }) + return +} diff --git a/vendor/github.com/asticode/go-astikit/flag.go b/vendor/github.com/asticode/go-astikit/flag.go new file mode 100644 index 000000000..2c533cd91 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/flag.go @@ -0,0 +1,48 @@ +package astikit + +import ( + "os" + "strings" +) + +// FlagCmd retrieves the command from the input Args +func FlagCmd() (o string) { + if len(os.Args) >= 2 && os.Args[1][0] != '-' { + o = os.Args[1] + os.Args = append([]string{os.Args[0]}, os.Args[2:]...) + } + return +} + +// FlagStrings represents a flag that can be set several times and +// stores unique string values +type FlagStrings struct { + Map map[string]bool + Slice *[]string +} + +// NewFlagStrings creates a new FlagStrings +func NewFlagStrings() FlagStrings { + return FlagStrings{ + Map: make(map[string]bool), + Slice: &[]string{}, + } +} + +// String implements the flag.Value interface +func (f FlagStrings) String() string { + if f.Slice == nil { + return "" + } + return strings.Join(*f.Slice, ",") +} + +// Set implements the flag.Value interface +func (f FlagStrings) Set(i string) error { + if _, ok := f.Map[i]; ok { + return nil + } + f.Map[i] = true + *f.Slice = append(*f.Slice, i) + return nil +} diff --git a/vendor/github.com/asticode/go-astikit/float.go b/vendor/github.com/asticode/go-astikit/float.go new file mode 100644 index 000000000..844f673a4 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/float.go @@ -0,0 +1,60 @@ +package astikit + +import ( + "bytes" + "fmt" + "strconv" +) + +// Rational represents a rational +type Rational struct{ den, num int } + +// NewRational creates a new rational +func NewRational(num, den int) *Rational { + return &Rational{ + den: den, + num: num, + } +} + +// Num returns the rational num +func (r *Rational) Num() int { + return r.num +} + +// Den returns the rational den +func (r *Rational) Den() int { + return r.den +} + +// ToFloat64 returns the rational as a float64 +func (r *Rational) ToFloat64() float64 { + return float64(r.num) / float64(r.den) +} + +// MarshalText implements the TextMarshaler interface +func (r *Rational) MarshalText() (b []byte, err error) { + b = []byte(fmt.Sprintf("%d/%d", r.num, r.den)) + return +} + +// UnmarshalText implements the TextUnmarshaler interface +func (r *Rational) UnmarshalText(b []byte) (err error) { + r.num = 0 + r.den = 1 + if len(b) == 0 { + return + } + items := bytes.Split(b, []byte("/")) + if r.num, err = strconv.Atoi(string(items[0])); err != nil { + err = fmt.Errorf("astikit: atoi of %s failed: %w", string(items[0]), err) + return + } + if len(items) > 1 { + if r.den, err = strconv.Atoi(string(items[1])); err != nil { + err = fmt.Errorf("astifloat: atoi of %s failed: %w", string(items[1]), err) + return + } + } + return +} diff --git a/vendor/github.com/asticode/go-astikit/http.go b/vendor/github.com/asticode/go-astikit/http.go new file mode 100644 index 000000000..72a3b0f08 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/http.go @@ -0,0 +1,632 @@ +package astikit + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net" + "net/http" + "os" + "path/filepath" + "strconv" + "strings" + "sync" + "time" +) + +var ErrHTTPSenderUnmarshaledError = errors.New("astikit: unmarshaled error") + +// ServeHTTPOptions represents serve options +type ServeHTTPOptions struct { + Addr string + Handler http.Handler +} + +// ServeHTTP spawns an HTTP server +func ServeHTTP(w *Worker, o ServeHTTPOptions) { + // Create server + s := &http.Server{Addr: o.Addr, Handler: o.Handler} + + // Execute in a task + w.NewTask().Do(func() { + // Log + w.Logger().Infof("astikit: serving on %s", o.Addr) + + // Serve + var done = make(chan error) + go func() { + if err := s.ListenAndServe(); err != nil { + done <- err + } + }() + + // Wait for context or done to be done + select { + case <-w.ctx.Done(): + if w.ctx.Err() != context.Canceled { + w.Logger().Error(fmt.Errorf("astikit: context error: %w", w.ctx.Err())) + } + case err := <-done: + if err != nil { + w.Logger().Error(fmt.Errorf("astikit: serving failed: %w", err)) + } + } + + // Shutdown + w.Logger().Infof("astikit: shutting down server on %s", o.Addr) + if err := s.Shutdown(context.Background()); err != nil { + w.Logger().Error(fmt.Errorf("astikit: shutting down server on %s failed: %w", o.Addr, err)) + } + }) +} + +// HTTPClient represents an HTTP client +type HTTPClient interface { + Do(req *http.Request) (*http.Response, error) +} + +// HTTPSender represents an object capable of sending http requests +type HTTPSender struct { + client HTTPClient + l SeverityLogger + retryFunc HTTPSenderRetryFunc + retryMax int + retrySleep time.Duration + timeout time.Duration +} + +// HTTPSenderRetryFunc is a function that decides whether to retry an HTTP request +type HTTPSenderRetryFunc func(resp *http.Response) error + +// HTTPSenderOptions represents HTTPSender options +type HTTPSenderOptions struct { + Client HTTPClient + Logger StdLogger + RetryFunc HTTPSenderRetryFunc + RetryMax int + RetrySleep time.Duration + Timeout time.Duration +} + +// NewHTTPSender creates a new HTTP sender +func NewHTTPSender(o HTTPSenderOptions) (s *HTTPSender) { + s = &HTTPSender{ + client: o.Client, + l: AdaptStdLogger(o.Logger), + retryFunc: o.RetryFunc, + retryMax: o.RetryMax, + retrySleep: o.RetrySleep, + timeout: o.Timeout, + } + if s.client == nil { + s.client = &http.Client{} + } + if s.retryFunc == nil { + s.retryFunc = s.defaultHTTPRetryFunc + } + return +} + +func (s *HTTPSender) defaultHTTPRetryFunc(resp *http.Response) error { + if resp.StatusCode >= http.StatusInternalServerError { + return fmt.Errorf("astikit: invalid status code %d", resp.StatusCode) + } + return nil +} + +// Send sends a new *http.Request +func (s *HTTPSender) Send(req *http.Request) (*http.Response, error) { + return s.SendWithTimeout(req, s.timeout) +} + +// SendWithTimeout sends a new *http.Request with a timeout +func (s *HTTPSender) SendWithTimeout(req *http.Request, timeout time.Duration) (resp *http.Response, err error) { + // Set name + name := req.Method + " request" + if req.URL != nil { + name += " to " + req.URL.String() + } + + // Timeout + if timeout > 0 { + // Create context + ctx, cancel := context.WithTimeout(req.Context(), timeout) + defer cancel() + + // Update request + req = req.WithContext(ctx) + + // Update name + name += " with timeout " + timeout.String() + } + + // Loop + // We start at retryMax + 1 so that it runs at least once even if retryMax == 0 + tries := 0 + for retriesLeft := s.retryMax + 1; retriesLeft > 0; retriesLeft-- { + // Get request name + nr := name + " (" + strconv.Itoa(s.retryMax-retriesLeft+2) + "/" + strconv.Itoa(s.retryMax+1) + ")" + tries++ + + // Send request + s.l.Debugf("astikit: sending %s", nr) + if resp, err = s.client.Do(req); err != nil { + // Retry if error is temporary, stop here otherwise + if netError, ok := err.(net.Error); !ok || !netError.Temporary() { + err = fmt.Errorf("astikit: sending %s failed: %w", nr, err) + return + } + } else if err = req.Context().Err(); err != nil { + err = fmt.Errorf("astikit: request context failed: %w", err) + return + } else { + err = s.retryFunc(resp) + } + + // Retry + if err != nil { + if retriesLeft > 1 { + s.l.Errorf("astikit: sending %s failed, sleeping %s and retrying... (%d retries left): %w", nr, s.retrySleep, retriesLeft-1, err) + time.Sleep(s.retrySleep) + } + continue + } + + // Return if conditions for retrying were not met + return + } + + // Max retries limit reached + err = fmt.Errorf("astikit: sending %s failed after %d tries: %w", name, tries, err) + return +} + +// HTTPSendJSONOptions represents SendJSON options +type HTTPSendJSONOptions struct { + BodyError interface{} + BodyIn interface{} + BodyOut interface{} + Headers map[string]string + Method string + URL string +} + +// SendJSON sends a new JSON HTTP request +func (s *HTTPSender) SendJSON(o HTTPSendJSONOptions) (err error) { + // Marshal body in + var bi io.Reader + if o.BodyIn != nil { + bb := &bytes.Buffer{} + if err = json.NewEncoder(bb).Encode(o.BodyIn); err != nil { + err = fmt.Errorf("astikit: marshaling body in failed: %w", err) + return + } + bi = bb + } + + // Create request + var req *http.Request + if req, err = http.NewRequest(o.Method, o.URL, bi); err != nil { + err = fmt.Errorf("astikit: creating request failed: %w", err) + return + } + + // Add headers + for k, v := range o.Headers { + req.Header.Set(k, v) + } + + // Send request + var resp *http.Response + if resp, err = s.Send(req); err != nil { + err = fmt.Errorf("astikit: sending request failed: %w", err) + return + } + defer resp.Body.Close() + + // Process status code + if code := resp.StatusCode; code < 200 || code > 299 { + // Try unmarshaling error + if o.BodyError != nil { + if err2 := json.NewDecoder(resp.Body).Decode(o.BodyError); err2 == nil { + err = ErrHTTPSenderUnmarshaledError + return + } + } + + // Default error + err = fmt.Errorf("astikit: invalid status code %d", code) + return + } + + // Unmarshal body out + if o.BodyOut != nil { + if err = json.NewDecoder(resp.Body).Decode(o.BodyOut); err != nil { + err = fmt.Errorf("astikit: unmarshaling failed: %w", err) + return + } + } + return +} + +// HTTPResponseFunc is a func that can process an $http.Response +type HTTPResponseFunc func(resp *http.Response) error + +func defaultHTTPResponseFunc(resp *http.Response) (err error) { + if resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusMultipleChoices { + err = fmt.Errorf("astikit: invalid status code %d", resp.StatusCode) + return + } + return +} + +// HTTPDownloader represents an object capable of downloading several HTTP srcs simultaneously +// and doing stuff to the results +type HTTPDownloader struct { + bp *BufferPool + l *GoroutineLimiter + responseFunc HTTPResponseFunc + s *HTTPSender +} + +// HTTPDownloaderOptions represents HTTPDownloader options +type HTTPDownloaderOptions struct { + Limiter GoroutineLimiterOptions + ResponseFunc HTTPResponseFunc + Sender HTTPSenderOptions +} + +// NewHTTPDownloader creates a new HTTPDownloader +func NewHTTPDownloader(o HTTPDownloaderOptions) (d *HTTPDownloader) { + d = &HTTPDownloader{ + bp: NewBufferPool(), + l: NewGoroutineLimiter(o.Limiter), + responseFunc: o.ResponseFunc, + s: NewHTTPSender(o.Sender), + } + if d.responseFunc == nil { + d.responseFunc = defaultHTTPResponseFunc + } + return +} + +// Close closes the downloader properly +func (d *HTTPDownloader) Close() error { + return d.l.Close() +} + +type HTTPDownloaderSrc struct { + Body io.Reader + Header http.Header + Method string + URL string +} + +// It is the responsibility of the caller to call i.Close() +type httpDownloaderFunc func(ctx context.Context, idx int, i *BufferPoolItem) error + +func (d *HTTPDownloader) do(ctx context.Context, fn httpDownloaderFunc, idx int, src HTTPDownloaderSrc) (err error) { + // Defaults + if src.Method == "" { + src.Method = http.MethodGet + } + + // Create request + var r *http.Request + if r, err = http.NewRequestWithContext(ctx, src.Method, src.URL, src.Body); err != nil { + err = fmt.Errorf("astikit: creating request to %s failed: %w", src.URL, err) + return + } + + // Copy header + for k := range src.Header { + r.Header.Set(k, src.Header.Get(k)) + } + + // Send request + var resp *http.Response + if resp, err = d.s.Send(r); err != nil { + err = fmt.Errorf("astikit: sending request to %s failed: %w", src.URL, err) + return + } + defer resp.Body.Close() + + // Create buffer pool item + buf := d.bp.New() + + // Process response + if err = d.responseFunc(resp); err != nil { + err = fmt.Errorf("astikit: response for request to %s is invalid: %w", src.URL, err) + return + } + + // Copy body + if _, err = Copy(ctx, buf, resp.Body); err != nil { + err = fmt.Errorf("astikit: copying body of %s failed: %w", src.URL, err) + return + } + + // Custom + if err = fn(ctx, idx, buf); err != nil { + err = fmt.Errorf("astikit: custom callback on %s failed: %w", src.URL, err) + return + } + return +} + +func (d *HTTPDownloader) download(ctx context.Context, srcs []HTTPDownloaderSrc, fn httpDownloaderFunc) (err error) { + // Nothing to download + if len(srcs) == 0 { + return nil + } + + // Loop through srcs + wg := &sync.WaitGroup{} + wg.Add(len(srcs)) + for idx, src := range srcs { + func(idx int, src HTTPDownloaderSrc) { + // Update error with ctx + if ctx.Err() != nil { + err = ctx.Err() + } + + // Do nothing if error + if err != nil { + wg.Done() + return + } + + // Do + d.l.Do(func() { + // Task is done + defer wg.Done() + + // Do + if errD := d.do(ctx, fn, idx, src); errD != nil && err == nil { + err = errD + return + } + }) + }(idx, src) + } + + // Wait + wg.Wait() + return +} + +// DownloadInDirectory downloads in parallel a set of srcs and saves them in a dst directory +func (d *HTTPDownloader) DownloadInDirectory(ctx context.Context, dst string, srcs ...HTTPDownloaderSrc) error { + return d.download(ctx, srcs, func(ctx context.Context, idx int, buf *BufferPoolItem) (err error) { + // Make sure to close buffer + defer buf.Close() + + // Make sure destination directory exists + if err = os.MkdirAll(dst, DefaultDirMode); err != nil { + err = fmt.Errorf("astikit: mkdirall %s failed: %w", dst, err) + return + } + + // Create destination file + var f *os.File + dst := filepath.Join(dst, filepath.Base(srcs[idx].URL)) + if f, err = os.Create(dst); err != nil { + err = fmt.Errorf("astikit: creating %s failed: %w", dst, err) + return + } + defer f.Close() + + // Copy buffer + if _, err = Copy(ctx, f, buf); err != nil { + err = fmt.Errorf("astikit: copying content to %s failed: %w", dst, err) + return + } + return + }) +} + +// DownloadInWriter downloads in parallel a set of srcs and concatenates them in a writer while +// maintaining the initial order +func (d *HTTPDownloader) DownloadInWriter(ctx context.Context, dst io.Writer, srcs ...HTTPDownloaderSrc) error { + // Init + type chunk struct { + buf *BufferPoolItem + idx int + } + var cs []chunk + var m sync.Mutex // Locks cs + var requiredIdx int + + // Make sure to close all buffers + defer func() { + for _, c := range cs { + c.buf.Close() + } + }() + + // Download + return d.download(ctx, srcs, func(ctx context.Context, idx int, buf *BufferPoolItem) (err error) { + // Lock + m.Lock() + defer m.Unlock() + + // Check where to insert chunk + var idxInsert = -1 + for idxChunk := 0; idxChunk < len(cs); idxChunk++ { + if idx < cs[idxChunk].idx { + idxInsert = idxChunk + break + } + } + + // Create chunk + c := chunk{ + buf: buf, + idx: idx, + } + + // Add chunk + if idxInsert > -1 { + cs = append(cs[:idxInsert], append([]chunk{c}, cs[idxInsert:]...)...) + } else { + cs = append(cs, c) + } + + // Loop through chunks + for idxChunk := 0; idxChunk < len(cs); idxChunk++ { + // Get chunk + c := cs[idxChunk] + + // The chunk should be copied + if c.idx == requiredIdx { + // Copy chunk content + // Do not check error right away since we still want to close the buffer + // and remove the chunk + _, err = Copy(ctx, dst, c.buf) + + // Close buffer + c.buf.Close() + + // Remove chunk + requiredIdx++ + cs = append(cs[:idxChunk], cs[idxChunk+1:]...) + idxChunk-- + + // Check error + if err != nil { + err = fmt.Errorf("astikit: copying chunk #%d to dst failed: %w", c.idx, err) + return + } + } + } + return + }) +} + +// DownloadInFile downloads in parallel a set of srcs and concatenates them in a dst file while +// maintaining the initial order +func (d *HTTPDownloader) DownloadInFile(ctx context.Context, dst string, srcs ...HTTPDownloaderSrc) (err error) { + // Make sure destination directory exists + if err = os.MkdirAll(filepath.Dir(dst), DefaultDirMode); err != nil { + err = fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(dst), err) + return + } + + // Create destination file + var f *os.File + if f, err = os.Create(dst); err != nil { + err = fmt.Errorf("astikit: creating %s failed: %w", dst, err) + return + } + defer f.Close() + + // Download in writer + return d.DownloadInWriter(ctx, f, srcs...) +} + +// HTTPMiddleware represents an HTTP middleware +type HTTPMiddleware func(http.Handler) http.Handler + +// ChainHTTPMiddlewares chains HTTP middlewares +func ChainHTTPMiddlewares(h http.Handler, ms ...HTTPMiddleware) http.Handler { + return ChainHTTPMiddlewaresWithPrefix(h, []string{}, ms...) +} + +// ChainHTTPMiddlewaresWithPrefix chains HTTP middlewares if one of prefixes is present +func ChainHTTPMiddlewaresWithPrefix(h http.Handler, prefixes []string, ms ...HTTPMiddleware) http.Handler { + for _, m := range ms { + if m == nil { + continue + } + if len(prefixes) == 0 { + h = m(h) + } else { + t := h + h = http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + for _, prefix := range prefixes { + if strings.HasPrefix(r.URL.EscapedPath(), prefix) { + m(t).ServeHTTP(rw, r) + return + } + } + t.ServeHTTP(rw, r) + }) + } + } + return h +} + +func handleHTTPBasicAuth(username, password string, rw http.ResponseWriter, r *http.Request) bool { + if u, p, ok := r.BasicAuth(); !ok || u != username || p != password { + rw.Header().Set("WWW-Authenticate", "Basic Realm=Please enter your credentials") + rw.WriteHeader(http.StatusUnauthorized) + return true + } + return false +} + +// HTTPMiddlewareBasicAuth adds basic HTTP auth to an HTTP handler +func HTTPMiddlewareBasicAuth(username, password string) HTTPMiddleware { + if username == "" && password == "" { + return nil + } + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // Handle basic auth + if handleHTTPBasicAuth(username, password, rw, r) { + return + } + + // Next handler + h.ServeHTTP(rw, r) + }) + } +} + +func setHTTPContentType(contentType string, rw http.ResponseWriter) { + rw.Header().Set("Content-Type", contentType) +} + +// HTTPMiddlewareContentType adds a content type to an HTTP handler +func HTTPMiddlewareContentType(contentType string) HTTPMiddleware { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // Set content type + setHTTPContentType(contentType, rw) + + // Next handler + h.ServeHTTP(rw, r) + }) + } +} + +func setHTTPHeaders(vs map[string]string, rw http.ResponseWriter) { + for k, v := range vs { + rw.Header().Set(k, v) + } +} + +// HTTPMiddlewareHeaders adds headers to an HTTP handler +func HTTPMiddlewareHeaders(vs map[string]string) HTTPMiddleware { + return func(h http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // Set headers + setHTTPHeaders(vs, rw) + + // Next handler + h.ServeHTTP(rw, r) + }) + } +} + +// HTTPMiddlewareCORSHeaders adds CORS headers to an HTTP handler +func HTTPMiddlewareCORSHeaders() HTTPMiddleware { + return HTTPMiddlewareHeaders(map[string]string{ + "Access-Control-Allow-Headers": "*", + "Access-Control-Allow-Methods": "*", + "Access-Control-Allow-Origin": "*", + }) +} diff --git a/vendor/github.com/asticode/go-astikit/io.go b/vendor/github.com/asticode/go-astikit/io.go new file mode 100644 index 000000000..d9a2312c6 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/io.go @@ -0,0 +1,121 @@ +package astikit + +import ( + "bytes" + "context" + "io" +) + +// Copy is a copy with a context +func Copy(ctx context.Context, dst io.Writer, src io.Reader) (int64, error) { + return io.Copy(dst, NewCtxReader(ctx, src)) +} + +type nopCloser struct { + io.Writer +} + +func (nopCloser) Close() error { return nil } + +// NopCloser returns a WriteCloser with a no-op Close method wrapping +// the provided Writer w. +func NopCloser(w io.Writer) io.WriteCloser { + return nopCloser{w} +} + +// CtxReader represents a reader with a context +type CtxReader struct { + ctx context.Context + reader io.Reader +} + +// NewCtxReader creates a reader with a context +func NewCtxReader(ctx context.Context, r io.Reader) *CtxReader { + return &CtxReader{ + ctx: ctx, + reader: r, + } +} + +// Read implements the io.Reader interface +func (r *CtxReader) Read(p []byte) (n int, err error) { + // Check context + if err = r.ctx.Err(); err != nil { + return + } + + // Read + return r.reader.Read(p) +} + +// WriterAdapter represents an object that can adapt a Writer +type WriterAdapter struct { + buffer *bytes.Buffer + o WriterAdapterOptions +} + +// WriterAdapterOptions represents WriterAdapter options +type WriterAdapterOptions struct { + Callback func(i []byte) + Split []byte +} + +// NewWriterAdapter creates a new WriterAdapter +func NewWriterAdapter(o WriterAdapterOptions) *WriterAdapter { + return &WriterAdapter{ + buffer: &bytes.Buffer{}, + o: o, + } +} + +// Close closes the adapter properly +func (w *WriterAdapter) Close() error { + if w.buffer.Len() > 0 { + w.write(w.buffer.Bytes()) + } + return nil +} + +// Write implements the io.Writer interface +func (w *WriterAdapter) Write(i []byte) (n int, err error) { + // Update n to avoid broken pipe error + defer func() { + n = len(i) + }() + + // Split + if len(w.o.Split) > 0 { + // Split bytes are not present, write in buffer + if !bytes.Contains(i, w.o.Split) { + w.buffer.Write(i) + return + } + + // Loop in split items + items := bytes.Split(i, w.o.Split) + for i := 0; i < len(items)-1; i++ { + // If this is the first item, prepend the buffer + if i == 0 { + items[i] = append(w.buffer.Bytes(), items[i]...) + w.buffer.Reset() + } + + // Write + w.write(items[i]) + } + + // Add remaining to buffer + w.buffer.Write(items[len(items)-1]) + return + } + + // By default, forward the bytes + w.write(i) + return +} + +func (w *WriterAdapter) write(i []byte) { + if w.o.Callback != nil { + w.o.Callback(i) + } +} diff --git a/vendor/github.com/asticode/go-astikit/limiter.go b/vendor/github.com/asticode/go-astikit/limiter.go new file mode 100644 index 000000000..4eadfc016 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/limiter.go @@ -0,0 +1,101 @@ +package astikit + +import ( + "context" + "sync" + "time" +) + +// Limiter represents a limiter +type Limiter struct { + buckets map[string]*LimiterBucket + m *sync.Mutex // Locks buckets +} + +// NewLimiter creates a new limiter +func NewLimiter() *Limiter { + return &Limiter{ + buckets: make(map[string]*LimiterBucket), + m: &sync.Mutex{}, + } +} + +// Add adds a new bucket +func (l *Limiter) Add(name string, cap int, period time.Duration) *LimiterBucket { + l.m.Lock() + defer l.m.Unlock() + if _, ok := l.buckets[name]; !ok { + l.buckets[name] = newLimiterBucket(cap, period) + } + return l.buckets[name] +} + +// Bucket retrieves a bucket from the limiter +func (l *Limiter) Bucket(name string) (b *LimiterBucket, ok bool) { + l.m.Lock() + defer l.m.Unlock() + b, ok = l.buckets[name] + return +} + +// Close closes the limiter properly +func (l *Limiter) Close() { + l.m.Lock() + defer l.m.Unlock() + for _, b := range l.buckets { + b.Close() + } +} + +// LimiterBucket represents a limiter bucket +type LimiterBucket struct { + cancel context.CancelFunc + cap int + ctx context.Context + count int + period time.Duration + o *sync.Once +} + +// newLimiterBucket creates a new bucket +func newLimiterBucket(cap int, period time.Duration) (b *LimiterBucket) { + b = &LimiterBucket{ + cap: cap, + count: 0, + period: period, + o: &sync.Once{}, + } + b.ctx, b.cancel = context.WithCancel(context.Background()) + go b.tick() + return +} + +// Inc increments the bucket count +func (b *LimiterBucket) Inc() bool { + if b.count >= b.cap { + return false + } + b.count++ + return true +} + +// tick runs a ticker to purge the bucket +func (b *LimiterBucket) tick() { + var t = time.NewTicker(b.period) + defer t.Stop() + for { + select { + case <-t.C: + b.count = 0 + case <-b.ctx.Done(): + return + } + } +} + +// close closes the bucket properly +func (b *LimiterBucket) Close() { + b.o.Do(func() { + b.cancel() + }) +} diff --git a/vendor/github.com/asticode/go-astikit/logger.go b/vendor/github.com/asticode/go-astikit/logger.go new file mode 100644 index 000000000..b7623b992 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/logger.go @@ -0,0 +1,171 @@ +package astikit + +import ( + "context" +) + +// CompleteLogger represents a complete logger +type CompleteLogger interface { + StdLogger + SeverityLogger + SeverityCtxLogger +} + +// StdLogger represents a standard logger +type StdLogger interface { + Fatal(v ...interface{}) + Fatalf(format string, v ...interface{}) + Print(v ...interface{}) + Printf(format string, v ...interface{}) +} + +// SeverityLogger represents a severity logger +type SeverityLogger interface { + Debug(v ...interface{}) + Debugf(format string, v ...interface{}) + Error(v ...interface{}) + Errorf(format string, v ...interface{}) + Info(v ...interface{}) + Infof(format string, v ...interface{}) + Warn(v ...interface{}) + Warnf(format string, v ...interface{}) +} + +// SeverityCtxLogger represents a severity with context logger +type SeverityCtxLogger interface { + DebugC(ctx context.Context, v ...interface{}) + DebugCf(ctx context.Context, format string, v ...interface{}) + ErrorC(ctx context.Context, v ...interface{}) + ErrorCf(ctx context.Context, format string, v ...interface{}) + FatalC(ctx context.Context, v ...interface{}) + FatalCf(ctx context.Context, format string, v ...interface{}) + InfoC(ctx context.Context, v ...interface{}) + InfoCf(ctx context.Context, format string, v ...interface{}) + WarnC(ctx context.Context, v ...interface{}) + WarnCf(ctx context.Context, format string, v ...interface{}) +} + +type completeLogger struct { + print, debug, error, fatal, info, warn func(v ...interface{}) + printf, debugf, errorf, fatalf, infof, warnf func(format string, v ...interface{}) + debugC, errorC, fatalC, infoC, warnC func(ctx context.Context, v ...interface{}) + debugCf, errorCf, fatalCf, infoCf, warnCf func(ctx context.Context, format string, v ...interface{}) +} + +func newCompleteLogger() *completeLogger { + return &completeLogger{ + debug: func(v ...interface{}) {}, + debugf: func(format string, v ...interface{}) {}, + debugC: func(ctx context.Context, v ...interface{}) {}, + debugCf: func(ctx context.Context, format string, v ...interface{}) {}, + error: func(v ...interface{}) {}, + errorf: func(format string, v ...interface{}) {}, + errorC: func(ctx context.Context, v ...interface{}) {}, + errorCf: func(ctx context.Context, format string, v ...interface{}) {}, + fatal: func(v ...interface{}) {}, + fatalf: func(format string, v ...interface{}) {}, + fatalC: func(ctx context.Context, v ...interface{}) {}, + fatalCf: func(ctx context.Context, format string, v ...interface{}) {}, + info: func(v ...interface{}) {}, + infof: func(format string, v ...interface{}) {}, + infoC: func(ctx context.Context, v ...interface{}) {}, + infoCf: func(ctx context.Context, format string, v ...interface{}) {}, + print: func(v ...interface{}) {}, + printf: func(format string, v ...interface{}) {}, + warn: func(v ...interface{}) {}, + warnf: func(format string, v ...interface{}) {}, + warnC: func(ctx context.Context, v ...interface{}) {}, + warnCf: func(ctx context.Context, format string, v ...interface{}) {}, + } +} + +func (l *completeLogger) Debug(v ...interface{}) { l.debug(v...) } +func (l *completeLogger) Debugf(format string, v ...interface{}) { l.debugf(format, v...) } +func (l *completeLogger) DebugC(ctx context.Context, v ...interface{}) { l.debugC(ctx, v...) } +func (l *completeLogger) DebugCf(ctx context.Context, format string, v ...interface{}) { + l.debugCf(ctx, format, v...) +} +func (l *completeLogger) Error(v ...interface{}) { l.error(v...) } +func (l *completeLogger) Errorf(format string, v ...interface{}) { l.errorf(format, v...) } +func (l *completeLogger) ErrorC(ctx context.Context, v ...interface{}) { l.errorC(ctx, v...) } +func (l *completeLogger) ErrorCf(ctx context.Context, format string, v ...interface{}) { + l.errorCf(ctx, format, v...) +} +func (l *completeLogger) Fatal(v ...interface{}) { l.fatal(v...) } +func (l *completeLogger) Fatalf(format string, v ...interface{}) { l.fatalf(format, v...) } +func (l *completeLogger) FatalC(ctx context.Context, v ...interface{}) { l.fatalC(ctx, v...) } +func (l *completeLogger) FatalCf(ctx context.Context, format string, v ...interface{}) { + l.fatalCf(ctx, format, v...) +} +func (l *completeLogger) Info(v ...interface{}) { l.info(v...) } +func (l *completeLogger) Infof(format string, v ...interface{}) { l.infof(format, v...) } +func (l *completeLogger) InfoC(ctx context.Context, v ...interface{}) { l.infoC(ctx, v...) } +func (l *completeLogger) InfoCf(ctx context.Context, format string, v ...interface{}) { + l.infoCf(ctx, format, v...) +} +func (l *completeLogger) Print(v ...interface{}) { l.print(v...) } +func (l *completeLogger) Printf(format string, v ...interface{}) { l.printf(format, v...) } +func (l *completeLogger) Warn(v ...interface{}) { l.warn(v...) } +func (l *completeLogger) Warnf(format string, v ...interface{}) { l.warnf(format, v...) } +func (l *completeLogger) WarnC(ctx context.Context, v ...interface{}) { l.warnC(ctx, v...) } +func (l *completeLogger) WarnCf(ctx context.Context, format string, v ...interface{}) { + l.warnCf(ctx, format, v...) +} + +// AdaptStdLogger transforms an StdLogger into a CompleteLogger if needed +func AdaptStdLogger(i StdLogger) CompleteLogger { + if v, ok := i.(CompleteLogger); ok { + return v + } + l := newCompleteLogger() + if i == nil { + return l + } + l.fatal = i.Fatal + l.fatalf = i.Fatalf + l.print = i.Print + l.printf = i.Printf + if v, ok := i.(SeverityLogger); ok { + l.debug = v.Debug + l.debugf = v.Debugf + l.error = v.Error + l.errorf = v.Errorf + l.info = v.Info + l.infof = v.Infof + l.warn = v.Warn + l.warnf = v.Warnf + } else { + l.debug = l.print + l.debugf = l.printf + l.error = l.print + l.errorf = l.printf + l.info = l.print + l.infof = l.printf + l.warn = l.print + l.warnf = l.printf + } + if v, ok := i.(SeverityCtxLogger); ok { + l.debugC = v.DebugC + l.debugCf = v.DebugCf + l.errorC = v.ErrorC + l.errorCf = v.ErrorCf + l.fatalC = v.FatalC + l.fatalCf = v.FatalCf + l.infoC = v.InfoC + l.infoCf = v.InfoCf + l.warnC = v.WarnC + l.warnCf = v.WarnCf + } else { + l.debugC = func(ctx context.Context, v ...interface{}) { l.debug(v...) } + l.debugCf = func(ctx context.Context, format string, v ...interface{}) { l.debugf(format, v...) } + l.errorC = func(ctx context.Context, v ...interface{}) { l.error(v...) } + l.errorCf = func(ctx context.Context, format string, v ...interface{}) { l.errorf(format, v...) } + l.fatalC = func(ctx context.Context, v ...interface{}) { l.fatal(v...) } + l.fatalCf = func(ctx context.Context, format string, v ...interface{}) { l.fatalf(format, v...) } + l.infoC = func(ctx context.Context, v ...interface{}) { l.info(v...) } + l.infoCf = func(ctx context.Context, format string, v ...interface{}) { l.infof(format, v...) } + l.warnC = func(ctx context.Context, v ...interface{}) { l.warn(v...) } + l.warnCf = func(ctx context.Context, format string, v ...interface{}) { l.warnf(format, v...) } + } + return l +} diff --git a/vendor/github.com/asticode/go-astikit/map.go b/vendor/github.com/asticode/go-astikit/map.go new file mode 100644 index 000000000..8ec17b20f --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/map.go @@ -0,0 +1,67 @@ +package astikit + +import ( + "fmt" + "sync" +) + +// BiMap represents a bidirectional map +type BiMap struct { + forward map[interface{}]interface{} + inverse map[interface{}]interface{} + m *sync.Mutex +} + +// NewBiMap creates a new BiMap +func NewBiMap() *BiMap { + return &BiMap{ + forward: make(map[interface{}]interface{}), + inverse: make(map[interface{}]interface{}), + m: &sync.Mutex{}, + } +} + +func (m *BiMap) get(k interface{}, i map[interface{}]interface{}) (v interface{}, ok bool) { + m.m.Lock() + defer m.m.Unlock() + v, ok = i[k] + return +} + +// Get gets the value in the forward map based on the provided key +func (m *BiMap) Get(k interface{}) (interface{}, bool) { return m.get(k, m.forward) } + +// GetInverse gets the value in the inverse map based on the provided key +func (m *BiMap) GetInverse(k interface{}) (interface{}, bool) { return m.get(k, m.inverse) } + +// MustGet gets the value in the forward map based on the provided key and panics if key is not found +func (m *BiMap) MustGet(k interface{}) interface{} { + v, ok := m.get(k, m.forward) + if !ok { + panic(fmt.Sprintf("astikit: key %+v not found in foward map", k)) + } + return v +} + +// MustGetInverse gets the value in the inverse map based on the provided key and panics if key is not found +func (m *BiMap) MustGetInverse(k interface{}) interface{} { + v, ok := m.get(k, m.inverse) + if !ok { + panic(fmt.Sprintf("astikit: key %+v not found in inverse map", k)) + } + return v +} + +func (m *BiMap) set(k, v interface{}, f, i map[interface{}]interface{}) *BiMap { + m.m.Lock() + defer m.m.Unlock() + f[k] = v + i[v] = k + return m +} + +// Set sets the value in the forward and inverse map for the provided forward key +func (m *BiMap) Set(k, v interface{}) *BiMap { return m.set(k, v, m.forward, m.inverse) } + +// SetInverse sets the value in the forward and inverse map for the provided inverse key +func (m *BiMap) SetInverse(k, v interface{}) *BiMap { return m.set(k, v, m.inverse, m.forward) } diff --git a/vendor/github.com/asticode/go-astikit/os.go b/vendor/github.com/asticode/go-astikit/os.go new file mode 100644 index 000000000..3c9895c12 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/os.go @@ -0,0 +1,148 @@ +package astikit + +import ( + "context" + "fmt" + "os" + "path/filepath" + "strings" +) + +// MoveFile is a cancellable move of a local file to a local or remote location +func MoveFile(ctx context.Context, dst, src string, f CopyFileFunc) (err error) { + // Copy + if err = CopyFile(ctx, dst, src, f); err != nil { + err = fmt.Errorf("astikit: copying file %s to %s failed: %w", src, dst, err) + return + } + + // Delete + if err = os.Remove(src); err != nil { + err = fmt.Errorf("astikit: removing %s failed: %w", src, err) + return + } + return +} + +// CopyFileFunc represents a CopyFile func +type CopyFileFunc func(ctx context.Context, dst string, srcStat os.FileInfo, srcFile *os.File) error + +// CopyFile is a cancellable copy of a local file to a local or remote location +func CopyFile(ctx context.Context, dst, src string, f CopyFileFunc) (err error) { + // Check context + if err = ctx.Err(); err != nil { + return + } + + // Stat src + var srcStat os.FileInfo + if srcStat, err = os.Stat(src); err != nil { + err = fmt.Errorf("astikit: stating %s failed: %w", src, err) + return + } + + // Src is a dir + if srcStat.IsDir() { + // Walk through the dir + if err = filepath.Walk(src, func(path string, info os.FileInfo, errWalk error) (err error) { + // Check error + if errWalk != nil { + err = errWalk + return + } + + // Do not process root + if src == path { + return + } + + // Copy + p := filepath.Join(dst, strings.TrimPrefix(path, filepath.Clean(src))) + if err = CopyFile(ctx, p, path, f); err != nil { + err = fmt.Errorf("astikit: copying %s to %s failed: %w", path, p, err) + return + } + return nil + }); err != nil { + err = fmt.Errorf("astikit: walking through %s failed: %w", src, err) + return + } + return + } + + // Open src + var srcFile *os.File + if srcFile, err = os.Open(src); err != nil { + err = fmt.Errorf("astikit: opening %s failed: %w", src, err) + return + } + defer srcFile.Close() + + // Custom + if err = f(ctx, dst, srcStat, srcFile); err != nil { + err = fmt.Errorf("astikit: custom failed: %w", err) + return + } + return +} + +// LocalCopyFileFunc is the local CopyFileFunc that allows doing cross partition copies +func LocalCopyFileFunc(ctx context.Context, dst string, srcStat os.FileInfo, srcFile *os.File) (err error) { + // Check context + if err = ctx.Err(); err != nil { + return + } + + // Create the destination folder + if err = os.MkdirAll(filepath.Dir(dst), DefaultDirMode); err != nil { + err = fmt.Errorf("astikit: mkdirall %s failed: %w", filepath.Dir(dst), err) + return + } + + // Create the destination file + var dstFile *os.File + if dstFile, err = os.Create(dst); err != nil { + err = fmt.Errorf("astikit: creating %s failed: %w", dst, err) + return + } + defer dstFile.Close() + + // Chmod using os.chmod instead of file.Chmod + if err = os.Chmod(dst, srcStat.Mode()); err != nil { + err = fmt.Errorf("astikit: chmod %s %s failed, %w", dst, srcStat.Mode(), err) + return + } + + // Copy the content + if _, err = Copy(ctx, dstFile, srcFile); err != nil { + err = fmt.Errorf("astikit: copying content of %s to %s failed: %w", srcFile.Name(), dstFile.Name(), err) + return + } + return +} + +// SignalHandler represents a func that can handle a signal +type SignalHandler func(s os.Signal) + +// TermSignalHandler returns a SignalHandler that is executed only on a term signal +func TermSignalHandler(f func()) SignalHandler { + return func(s os.Signal) { + if isTermSignal(s) { + f() + } + } +} + +// LoggerSignalHandler returns a SignalHandler that logs the signal +func LoggerSignalHandler(l SeverityLogger, ignoredSignals ...os.Signal) SignalHandler { + ss := make(map[os.Signal]bool) + for _, s := range ignoredSignals { + ss[s] = true + } + return func(s os.Signal) { + if _, ok := ss[s]; ok { + return + } + l.Debugf("astikit: received signal %s", s) + } +} diff --git a/vendor/github.com/asticode/go-astikit/os_js.go b/vendor/github.com/asticode/go-astikit/os_js.go new file mode 100644 index 000000000..5403b2455 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/os_js.go @@ -0,0 +1,12 @@ +// +build js,wasm + +package astikit + +import ( + "os" + "syscall" +) + +func isTermSignal(s os.Signal) bool { + return s == syscall.SIGKILL || s == syscall.SIGINT || s == syscall.SIGQUIT || s == syscall.SIGTERM +} diff --git a/vendor/github.com/asticode/go-astikit/os_others.go b/vendor/github.com/asticode/go-astikit/os_others.go new file mode 100644 index 000000000..606e178f1 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/os_others.go @@ -0,0 +1,12 @@ +// +build !js !wasm + +package astikit + +import ( + "os" + "syscall" +) + +func isTermSignal(s os.Signal) bool { + return s == syscall.SIGABRT || s == syscall.SIGKILL || s == syscall.SIGINT || s == syscall.SIGQUIT || s == syscall.SIGTERM +} diff --git a/vendor/github.com/asticode/go-astikit/pcm.go b/vendor/github.com/asticode/go-astikit/pcm.go new file mode 100644 index 000000000..8d6303b7c --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/pcm.go @@ -0,0 +1,426 @@ +package astikit + +import ( + "fmt" + "math" + "sync" + "time" +) + +// PCMLevel computes the PCM level of samples +// https://dsp.stackexchange.com/questions/2951/loudness-of-pcm-stream +// https://dsp.stackexchange.com/questions/290/getting-loudness-of-a-track-with-rms?noredirect=1&lq=1 +func PCMLevel(samples []int) float64 { + // Compute sum of square values + var sum float64 + for _, s := range samples { + sum += math.Pow(float64(s), 2) + } + + // Square root + return math.Sqrt(sum / float64(len(samples))) +} + +func maxPCMSample(bitDepth int) int { + return int(math.Pow(2, float64(bitDepth))/2.0) - 1 +} + +// PCMNormalize normalizes the PCM samples +func PCMNormalize(samples []int, bitDepth int) (o []int) { + // Get max sample + var m int + for _, s := range samples { + if v := int(math.Abs(float64(s))); v > m { + m = v + } + } + + // Get max for bit depth + max := maxPCMSample(bitDepth) + + // Loop through samples + for _, s := range samples { + o = append(o, s*max/m) + } + return +} + +// ConvertPCMBitDepth converts the PCM bit depth +func ConvertPCMBitDepth(srcSample int, srcBitDepth, dstBitDepth int) (dstSample int, err error) { + // Nothing to do + if srcBitDepth == dstBitDepth { + dstSample = srcSample + return + } + + // Convert + if srcBitDepth < dstBitDepth { + dstSample = srcSample << uint(dstBitDepth-srcBitDepth) + } else { + dstSample = srcSample >> uint(srcBitDepth-dstBitDepth) + } + return +} + +// PCMSampleFunc is a func that can process a sample +type PCMSampleFunc func(s int) error + +// PCMSampleRateConverter is an object capable of converting a PCM's sample rate +type PCMSampleRateConverter struct { + b [][]int + dstSampleRate int + fn PCMSampleFunc + numChannels int + numChannelsProcessed int + numSamplesOutputed int + numSamplesProcessed int + srcSampleRate int +} + +// NewPCMSampleRateConverter creates a new PCMSampleRateConverter +func NewPCMSampleRateConverter(srcSampleRate, dstSampleRate, numChannels int, fn PCMSampleFunc) *PCMSampleRateConverter { + return &PCMSampleRateConverter{ + b: make([][]int, numChannels), + dstSampleRate: dstSampleRate, + fn: fn, + numChannels: numChannels, + srcSampleRate: srcSampleRate, + } +} + +// Reset resets the converter +func (c *PCMSampleRateConverter) Reset() { + c.b = make([][]int, c.numChannels) + c.numChannelsProcessed = 0 + c.numSamplesOutputed = 0 + c.numSamplesProcessed = 0 +} + +// Add adds a new sample to the converter +func (c *PCMSampleRateConverter) Add(i int) (err error) { + // Forward sample + if c.srcSampleRate == c.dstSampleRate { + if err = c.fn(i); err != nil { + err = fmt.Errorf("astikit: handling sample failed: %w", err) + return + } + return + } + + // Increment num channels processed + c.numChannelsProcessed++ + + // Reset num channels processed + if c.numChannelsProcessed > c.numChannels { + c.numChannelsProcessed = 1 + } + + // Only increment num samples processed if all channels have been processed + if c.numChannelsProcessed == c.numChannels { + c.numSamplesProcessed++ + } + + // Append sample to buffer + c.b[c.numChannelsProcessed-1] = append(c.b[c.numChannelsProcessed-1], i) + + // Throw away data + if c.srcSampleRate > c.dstSampleRate { + // Make sure to always keep the first sample but do nothing until we have all channels or target sample has been + // reached + if (c.numSamplesOutputed > 0 && float64(c.numSamplesProcessed) < 1.0+float64(c.numSamplesOutputed)*float64(c.srcSampleRate)/float64(c.dstSampleRate)) || c.numChannelsProcessed < c.numChannels { + return + } + + // Loop through channels + for idx, b := range c.b { + // Merge samples + var s int + for _, v := range b { + s += v + } + s /= len(b) + + // Reset buffer + c.b[idx] = []int{} + + // Custom + if err = c.fn(s); err != nil { + err = fmt.Errorf("astikit: handling sample failed: %w", err) + return + } + } + + // Increment num samples outputted + c.numSamplesOutputed++ + return + } + + // Do nothing until we have all channels + if c.numChannelsProcessed < c.numChannels { + return + } + + // Repeat data + for c.numSamplesOutputed == 0 || float64(c.numSamplesProcessed)+1.0 > 1.0+float64(c.numSamplesOutputed)*float64(c.srcSampleRate)/float64(c.dstSampleRate) { + // Loop through channels + for _, b := range c.b { + // Invalid length + if len(b) != 1 { + err = fmt.Errorf("astikit: invalid buffer item length %d", len(b)) + return + } + + // Custom + if err = c.fn(b[0]); err != nil { + err = fmt.Errorf("astikit: handling sample failed: %w", err) + return + } + } + + // Increment num samples outputted + c.numSamplesOutputed++ + } + + // Reset buffer + c.b = make([][]int, c.numChannels) + return +} + +// PCMChannelsConverter is an object of converting PCM's channels +type PCMChannelsConverter struct { + dstNumChannels int + fn PCMSampleFunc + srcNumChannels int + srcSamples int +} + +// NewPCMChannelsConverter creates a new PCMChannelsConverter +func NewPCMChannelsConverter(srcNumChannels, dstNumChannels int, fn PCMSampleFunc) *PCMChannelsConverter { + return &PCMChannelsConverter{ + dstNumChannels: dstNumChannels, + fn: fn, + srcNumChannels: srcNumChannels, + } +} + +// Reset resets the converter +func (c *PCMChannelsConverter) Reset() { + c.srcSamples = 0 +} + +// Add adds a new sample to the converter +func (c *PCMChannelsConverter) Add(i int) (err error) { + // Forward sample + if c.srcNumChannels == c.dstNumChannels { + if err = c.fn(i); err != nil { + err = fmt.Errorf("astikit: handling sample failed: %w", err) + return + } + return + } + + // Reset + if c.srcSamples == c.srcNumChannels { + c.srcSamples = 0 + } + + // Increment src samples + c.srcSamples++ + + // Throw away data + if c.srcNumChannels > c.dstNumChannels { + // Throw away sample + if c.srcSamples > c.dstNumChannels { + return + } + + // Custom + if err = c.fn(i); err != nil { + err = fmt.Errorf("astikit: handling sample failed: %w", err) + return + } + return + } + + // Store + var ss []int + if c.srcSamples < c.srcNumChannels { + ss = []int{i} + } else { + // Repeat data + for idx := c.srcNumChannels; idx <= c.dstNumChannels; idx++ { + ss = append(ss, i) + } + } + + // Loop through samples + for _, s := range ss { + // Custom + if err = c.fn(s); err != nil { + err = fmt.Errorf("astikit: handling sample failed: %w", err) + return + } + } + return +} + +// PCMSilenceDetector represents a PCM silence detector +type PCMSilenceDetector struct { + analyses []pcmSilenceDetectorAnalysis + buf []int + m *sync.Mutex // Locks buf + minAnalysesPerSilence int + o PCMSilenceDetectorOptions + samplesPerAnalysis int +} + +type pcmSilenceDetectorAnalysis struct { + level float64 + samples []int +} + +// PCMSilenceDetectorOptions represents a PCM silence detector options +type PCMSilenceDetectorOptions struct { + MaxSilenceLevel float64 `toml:"max_silence_level"` + MinSilenceDuration time.Duration `toml:"min_silence_duration"` + SampleRate int `toml:"sample_rate"` + StepDuration time.Duration `toml:"step_duration"` +} + +// NewPCMSilenceDetector creates a new silence detector +func NewPCMSilenceDetector(o PCMSilenceDetectorOptions) (d *PCMSilenceDetector) { + // Create + d = &PCMSilenceDetector{ + m: &sync.Mutex{}, + o: o, + } + + // Reset + d.Reset() + + // Default option values + if d.o.MinSilenceDuration == 0 { + d.o.MinSilenceDuration = time.Second + } + if d.o.StepDuration == 0 { + d.o.StepDuration = 30 * time.Millisecond + } + + // Compute attributes depending on options + d.samplesPerAnalysis = int(math.Floor(float64(d.o.SampleRate) * d.o.StepDuration.Seconds())) + d.minAnalysesPerSilence = int(math.Floor(d.o.MinSilenceDuration.Seconds() / d.o.StepDuration.Seconds())) + return +} + +// Reset resets the silence detector +func (d *PCMSilenceDetector) Reset() { + // Lock + d.m.Lock() + defer d.m.Unlock() + + // Reset + d.analyses = []pcmSilenceDetectorAnalysis{} + d.buf = []int{} +} + +// Add adds samples to the buffer and checks whether there are valid samples between silences +func (d *PCMSilenceDetector) Add(samples []int) (validSamples [][]int) { + // Lock + d.m.Lock() + defer d.m.Unlock() + + // Append samples to buffer + d.buf = append(d.buf, samples...) + + // Analyze samples by step + for len(d.buf) >= d.samplesPerAnalysis { + // Append analysis + d.analyses = append(d.analyses, pcmSilenceDetectorAnalysis{ + level: PCMLevel(d.buf[:d.samplesPerAnalysis]), + samples: append([]int(nil), d.buf[:d.samplesPerAnalysis]...), + }) + + // Remove samples from buffer + d.buf = d.buf[d.samplesPerAnalysis:] + } + + // Loop through analyses + var leadingSilence, inBetween, trailingSilence int + for i := 0; i < len(d.analyses); i++ { + if d.analyses[i].level < d.o.MaxSilenceLevel { + // This is a silence + + // This is a leading silence + if inBetween == 0 { + leadingSilence++ + + // The leading silence is valid + // We can trim its useless part + if leadingSilence > d.minAnalysesPerSilence { + d.analyses = d.analyses[leadingSilence-d.minAnalysesPerSilence:] + i -= leadingSilence - d.minAnalysesPerSilence + leadingSilence = d.minAnalysesPerSilence + } + continue + } + + // This is a trailing silence + trailingSilence++ + + // Trailing silence is invalid + if trailingSilence < d.minAnalysesPerSilence { + continue + } + + // Trailing silence is valid + // Loop through analyses + var ss []int + for _, a := range d.analyses[:i+1] { + ss = append(ss, a.samples...) + } + + // Append valid samples + validSamples = append(validSamples, ss) + + // Remove leading silence and non silence + d.analyses = d.analyses[leadingSilence+inBetween:] + i -= leadingSilence + inBetween + + // Reset counts + leadingSilence, inBetween, trailingSilence = trailingSilence, 0, 0 + } else { + // This is not a silence + + // This is a leading non silence + // We need to remove it + if i == 0 { + d.analyses = d.analyses[1:] + i = -1 + continue + } + + // This is the first in-between + if inBetween == 0 { + // The leading silence is invalid + // We need to remove it as well as this first non silence + if leadingSilence < d.minAnalysesPerSilence { + d.analyses = d.analyses[i+1:] + i = -1 + continue + } + } + + // This non-silence was preceded by a silence not big enough to be a valid trailing silence + // We incorporate it in the in-between + if trailingSilence > 0 { + inBetween += trailingSilence + trailingSilence = 0 + } + + // This is an in-between + inBetween++ + continue + } + } + return +} diff --git a/vendor/github.com/asticode/go-astikit/ptr.go b/vendor/github.com/asticode/go-astikit/ptr.go new file mode 100644 index 000000000..fb3e7f5ae --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/ptr.go @@ -0,0 +1,58 @@ +package astikit + +import "time" + +// BoolPtr transforms a bool into a *bool +func BoolPtr(i bool) *bool { + return &i +} + +// BytePtr transforms a byte into a *byte +func BytePtr(i byte) *byte { + return &i +} + +// DurationPtr transforms a time.Duration into a *time.Duration +func DurationPtr(i time.Duration) *time.Duration { + return &i +} + +// Float64Ptr transforms a float64 into a *float64 +func Float64Ptr(i float64) *float64 { + return &i +} + +// IntPtr transforms an int into an *int +func IntPtr(i int) *int { + return &i +} + +// Int64Ptr transforms an int64 into an *int64 +func Int64Ptr(i int64) *int64 { + return &i +} + +// StrSlicePtr transforms a []string into a *[]string +func StrSlicePtr(i []string) *[]string { + return &i +} + +// StrPtr transforms a string into a *string +func StrPtr(i string) *string { + return &i +} + +// TimePtr transforms a time.Time into a *time.Time +func TimePtr(i time.Time) *time.Time { + return &i +} + +// UInt8Ptr transforms a uint8 into a *uint8 +func UInt8Ptr(i uint8) *uint8 { + return &i +} + +// UInt32Ptr transforms a uint32 into a *uint32 +func UInt32Ptr(i uint32) *uint32 { + return &i +} diff --git a/vendor/github.com/asticode/go-astikit/rand.go b/vendor/github.com/asticode/go-astikit/rand.go new file mode 100644 index 000000000..c10e6db38 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/rand.go @@ -0,0 +1,36 @@ +package astikit + +import ( + "math/rand" + "strings" + "time" +) + +const ( + randLetterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890" + randLetterIdxBits = 6 // 6 bits to represent a letter index + randLetterIdxMask = 1<= 0; { + if remain == 0 { + cache, remain = randSrc.Int63(), randLetterIdxMax + } + if idx := int(cache & randLetterIdxMask); idx < len(randLetterBytes) { + sb.WriteByte(randLetterBytes[idx]) + i-- + } + cache >>= randLetterIdxBits + remain-- + } + return sb.String() +} diff --git a/vendor/github.com/asticode/go-astikit/sort.go b/vendor/github.com/asticode/go-astikit/sort.go new file mode 100644 index 000000000..45539c6f5 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/sort.go @@ -0,0 +1,13 @@ +package astikit + +import "sort" + +// SortInt64 sorts a slice of int64s in increasing order. +func SortInt64(a []int64) { sort.Sort(SortInt64Slice(a)) } + +// SortInt64Slice attaches the methods of Interface to []int64, sorting in increasing order. +type SortInt64Slice []int64 + +func (p SortInt64Slice) Len() int { return len(p) } +func (p SortInt64Slice) Less(i, j int) bool { return p[i] < p[j] } +func (p SortInt64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } diff --git a/vendor/github.com/asticode/go-astikit/ssh.go b/vendor/github.com/asticode/go-astikit/ssh.go new file mode 100644 index 000000000..5754895f0 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/ssh.go @@ -0,0 +1,113 @@ +package astikit + +import ( + "context" + "fmt" + "io" + "os" + "path/filepath" +) + +// SSHSession represents an SSH Session +type SSHSession interface { + Run(string) error + Start(string) error + StdinPipe() (io.WriteCloser, error) + Wait() error +} + +// SSHSessionFunc represents a func that can return an SSHSession +type SSHSessionFunc func() (s SSHSession, c *Closer, err error) + +// SSHCopyFileFunc is the SSH CopyFileFunc that allows doing SSH copies +func SSHCopyFileFunc(fn SSHSessionFunc) CopyFileFunc { + return func(ctx context.Context, dst string, srcStat os.FileInfo, srcFile *os.File) (err error) { + // Check context + if err = ctx.Err(); err != nil { + return + } + + // Using local closure allows better readibility for the defer c.Close() since it + // isolates the use of the ssh session + if err = func() (err error) { + // Create ssh session + var s SSHSession + var c *Closer + if s, c, err = fn(); err != nil { + err = fmt.Errorf("astikit: creating ssh session failed: %w", err) + return + } + defer c.Close() + + // Create the destination folder + if err = s.Run("mkdir -p " + filepath.Dir(dst)); err != nil { + err = fmt.Errorf("astikit: creating %s failed: %w", filepath.Dir(dst), err) + return + } + return + }(); err != nil { + return + } + + // Using local closure allows better readibility for the defer c.Close() since it + // isolates the use of the ssh session + if err = func() (err error) { + // Create ssh session + var s SSHSession + var c *Closer + if s, c, err = fn(); err != nil { + err = fmt.Errorf("astikit: creating ssh session failed: %w", err) + return + } + defer c.Close() + + // Create stdin pipe + var stdin io.WriteCloser + if stdin, err = s.StdinPipe(); err != nil { + err = fmt.Errorf("astikit: creating stdin pipe failed: %w", err) + return + } + defer stdin.Close() + + // Use "scp" command + if err = s.Start("scp -qt \"" + filepath.Dir(dst) + "\""); err != nil { + err = fmt.Errorf("astikit: scp to %s failed: %w", dst, err) + return + } + + // Send metadata + if _, err = fmt.Fprintln(stdin, fmt.Sprintf("C%04o", srcStat.Mode().Perm()), srcStat.Size(), filepath.Base(dst)); err != nil { + err = fmt.Errorf("astikit: sending metadata failed: %w", err) + return + } + + // Copy + if _, err = Copy(ctx, stdin, srcFile); err != nil { + err = fmt.Errorf("astikit: copying failed: %w", err) + return + } + + // Send close + if _, err = fmt.Fprint(stdin, "\x00"); err != nil { + err = fmt.Errorf("astikit: sending close failed: %w", err) + return + } + + // Close stdin + if err = stdin.Close(); err != nil { + err = fmt.Errorf("astikit: closing failed: %w", err) + return + } + + // Wait + if err = s.Wait(); err != nil { + err = fmt.Errorf("astikit: waiting failed: %w", err) + return + } + return + }(); err != nil { + return + } + return + } +} diff --git a/vendor/github.com/asticode/go-astikit/stat.go b/vendor/github.com/asticode/go-astikit/stat.go new file mode 100644 index 000000000..6f66aed2a --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/stat.go @@ -0,0 +1,301 @@ +package astikit + +import ( + "context" + "sync" + "sync/atomic" + "time" +) + +// Stater is an object that can compute and handle stats +type Stater struct { + cancel context.CancelFunc + ctx context.Context + h StatsHandleFunc + m *sync.Mutex // Locks ss + period time.Duration + running uint32 + ss map[*StatMetadata]StatOptions +} + +// StatOptions represents stat options +type StatOptions struct { + Handler StatHandler + Metadata *StatMetadata +} + +// StatsHandleFunc is a method that can handle stat values +type StatsHandleFunc func(stats []StatValue) + +// StatMetadata represents a stat metadata +type StatMetadata struct { + Description string + Label string + Name string + Unit string +} + +// StatHandler represents a stat handler +type StatHandler interface { + Start() + Stop() + Value(delta time.Duration) interface{} +} + +// StatValue represents a stat value +type StatValue struct { + *StatMetadata + Value interface{} +} + +// StaterOptions represents stater options +type StaterOptions struct { + HandleFunc StatsHandleFunc + Period time.Duration +} + +// NewStater creates a new stater +func NewStater(o StaterOptions) *Stater { + return &Stater{ + h: o.HandleFunc, + m: &sync.Mutex{}, + period: o.Period, + ss: make(map[*StatMetadata]StatOptions), + } +} + +// Start starts the stater +func (s *Stater) Start(ctx context.Context) { + // Check context + if ctx.Err() != nil { + return + } + + // Make sure to start only once + if atomic.CompareAndSwapUint32(&s.running, 0, 1) { + // Update status + defer atomic.StoreUint32(&s.running, 0) + + // Reset context + s.ctx, s.cancel = context.WithCancel(ctx) + + // Create ticker + t := time.NewTicker(s.period) + defer t.Stop() + + // Loop + lastStatAt := now() + for { + select { + case <-t.C: + // Get delta + n := now() + delta := n.Sub(lastStatAt) + lastStatAt = n + + // Loop through stats + var stats []StatValue + s.m.Lock() + for _, v := range s.ss { + stats = append(stats, StatValue{ + StatMetadata: v.Metadata, + Value: v.Handler.Value(delta), + }) + } + s.m.Unlock() + + // Handle stats + go s.h(stats) + case <-s.ctx.Done(): + return + } + } + } +} + +// Stop stops the stater +func (s *Stater) Stop() { + if s.cancel != nil { + s.cancel() + } +} + +// AddStats adds stats +func (s *Stater) AddStats(os ...StatOptions) { + s.m.Lock() + defer s.m.Unlock() + for _, o := range os { + s.ss[o.Metadata] = o + } +} + +// DelStats deletes stats +func (s *Stater) DelStats(os ...StatOptions) { + s.m.Lock() + defer s.m.Unlock() + for _, o := range os { + delete(s.ss, o.Metadata) + } +} + +type durationStat struct { + d time.Duration + fn func(d, delta time.Duration) interface{} + isStarted bool + m *sync.Mutex // Locks isStarted + startedAt time.Time +} + +func newDurationStat(fn func(d, delta time.Duration) interface{}) *durationStat { + return &durationStat{ + fn: fn, + m: &sync.Mutex{}, + } +} + +func (s *durationStat) Begin() { + s.m.Lock() + defer s.m.Unlock() + if !s.isStarted { + return + } + s.startedAt = now() +} + +func (s *durationStat) End() { + s.m.Lock() + defer s.m.Unlock() + if !s.isStarted { + return + } + s.d += now().Sub(s.startedAt) + s.startedAt = time.Time{} +} + +func (s *durationStat) Value(delta time.Duration) (o interface{}) { + // Lock + s.m.Lock() + defer s.m.Unlock() + + // Get current values + n := now() + d := s.d + + // Recording is still in process + if !s.startedAt.IsZero() { + d += n.Sub(s.startedAt) + s.startedAt = n + } + + // Compute stat + o = s.fn(d, delta) + s.d = 0 + return +} + +func (s *durationStat) Start() { + s.m.Lock() + defer s.m.Unlock() + s.d = 0 + s.isStarted = true +} + +func (s *durationStat) Stop() { + s.m.Lock() + defer s.m.Unlock() + s.isStarted = false +} + +// DurationPercentageStat is an object capable of computing the percentage of time some work is taking per second +type DurationPercentageStat struct { + *durationStat +} + +// NewDurationPercentageStat creates a new duration percentage stat +func NewDurationPercentageStat() *DurationPercentageStat { + return &DurationPercentageStat{durationStat: newDurationStat(func(d, delta time.Duration) interface{} { + if delta == 0 { + return 0 + } + return float64(d) / float64(delta) * 100 + })} +} + +type counterStat struct { + c float64 + fn func(c, t float64, delta time.Duration) interface{} + isStarted bool + m *sync.Mutex // Locks isStarted + t float64 +} + +func newCounterStat(fn func(c, t float64, delta time.Duration) interface{}) *counterStat { + return &counterStat{ + fn: fn, + m: &sync.Mutex{}, + } +} + +func (s *counterStat) Add(delta float64) { + s.m.Lock() + defer s.m.Unlock() + if !s.isStarted { + return + } + s.c += delta + s.t++ +} + +func (s *counterStat) Start() { + s.m.Lock() + defer s.m.Unlock() + s.c = 0 + s.isStarted = true + s.t = 0 +} + +func (s *counterStat) Stop() { + s.m.Lock() + defer s.m.Unlock() + s.isStarted = true +} + +func (s *counterStat) Value(delta time.Duration) interface{} { + s.m.Lock() + defer s.m.Unlock() + c := s.c + t := s.t + s.c = 0 + s.t = 0 + return s.fn(c, t, delta) +} + +// CounterAvgStat is an object capable of computing the average value of a counter +type CounterAvgStat struct { + *counterStat +} + +// NewCounterAvgStat creates a new counter avg stat +func NewCounterAvgStat() *CounterAvgStat { + return &CounterAvgStat{counterStat: newCounterStat(func(c, t float64, delta time.Duration) interface{} { + if t == 0 { + return 0 + } + return c / t + })} +} + +// CounterRateStat is an object capable of computing the average value of a counter per second +type CounterRateStat struct { + *counterStat +} + +// NewCounterRateStat creates a new counter rate stat +func NewCounterRateStat() *CounterRateStat { + return &CounterRateStat{counterStat: newCounterStat(func(c, t float64, delta time.Duration) interface{} { + if delta.Seconds() == 0 { + return 0 + } + return c / delta.Seconds() + })} +} diff --git a/vendor/github.com/asticode/go-astikit/sync.go b/vendor/github.com/asticode/go-astikit/sync.go new file mode 100644 index 000000000..afa2158bd --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/sync.go @@ -0,0 +1,489 @@ +package astikit + +import ( + "bytes" + "context" + "errors" + "fmt" + "runtime" + "sync" + "sync/atomic" + "time" +) + +// Stat names +const ( + StatNameWorkRatio = "astikit.work.ratio" +) + +// Chan constants +const ( + // Calling Add() only blocks if the chan has been started and the ctx + // has not been canceled + ChanAddStrategyBlockWhenStarted = "block.when.started" + // Calling Add() never blocks + ChanAddStrategyNoBlock = "no.block" + ChanOrderFIFO = "fifo" + ChanOrderFILO = "filo" +) + +// Chan is an object capable of executing funcs in a specific order while controlling the conditions +// in which adding new funcs is blocking +// Check out ChanOptions for detailed options +type Chan struct { + cancel context.CancelFunc + c *sync.Cond + ctx context.Context + fs []func() + mc *sync.Mutex // Locks ctx + mf *sync.Mutex // Locks fs + o ChanOptions + running uint32 + statWorkRatio *DurationPercentageStat +} + +// ChanOptions are Chan options +type ChanOptions struct { + // Determines the conditions in which Add() blocks. See constants with pattern ChanAddStrategy* + // Default is ChanAddStrategyNoBlock + AddStrategy string + // Order in which the funcs will be processed. See constants with pattern ChanOrder* + // Default is ChanOrderFIFO + Order string + // By default the funcs not yet processed when the context is cancelled are dropped. + // If "ProcessAll" is true, ALL funcs are processed even after the context is cancelled. + // However, no funcs can be added after the context is cancelled + ProcessAll bool +} + +// NewChan creates a new Chan +func NewChan(o ChanOptions) *Chan { + return &Chan{ + c: sync.NewCond(&sync.Mutex{}), + mc: &sync.Mutex{}, + mf: &sync.Mutex{}, + o: o, + } +} + +// Start starts the chan by looping through functions in the buffer and +// executing them if any, or waiting for a new one otherwise +func (c *Chan) Start(ctx context.Context) { + // Make sure to start only once + if atomic.CompareAndSwapUint32(&c.running, 0, 1) { + // Update status + defer atomic.StoreUint32(&c.running, 0) + + // Create context + c.mc.Lock() + c.ctx, c.cancel = context.WithCancel(ctx) + d := c.ctx.Done() + c.mc.Unlock() + + // Handle context + go func() { + // Wait for context to be done + <-d + + // Signal + c.c.L.Lock() + c.c.Signal() + c.c.L.Unlock() + }() + + // Loop + for { + // Lock cond here in case a func is added between retrieving l and doing the if on it + c.c.L.Lock() + + // Get number of funcs in buffer + c.mf.Lock() + l := len(c.fs) + c.mf.Unlock() + + // Only return if context has been cancelled and: + // - the user wants to drop funcs that has not yet been processed + // - the buffer is empty otherwise + c.mc.Lock() + if c.ctx.Err() != nil && (!c.o.ProcessAll || l == 0) { + c.mc.Unlock() + c.c.L.Unlock() + return + } + c.mc.Unlock() + + // No funcs in buffer + if l == 0 { + c.c.Wait() + c.c.L.Unlock() + continue + } + c.c.L.Unlock() + + // Get first func + c.mf.Lock() + fn := c.fs[0] + c.mf.Unlock() + + // Execute func + if c.statWorkRatio != nil { + c.statWorkRatio.Begin() + } + fn() + if c.statWorkRatio != nil { + c.statWorkRatio.End() + } + + // Remove first func + c.mf.Lock() + c.fs = c.fs[1:] + c.mf.Unlock() + } + } +} + +// Stop stops the chan +func (c *Chan) Stop() { + c.mc.Lock() + if c.cancel != nil { + c.cancel() + } + c.mc.Unlock() +} + +// Add adds a new item to the chan +func (c *Chan) Add(i func()) { + // Check context + c.mc.Lock() + if c.ctx != nil && c.ctx.Err() != nil { + c.mc.Unlock() + return + } + c.mc.Unlock() + + // Wrap the function + var fn func() + var wg *sync.WaitGroup + if c.o.AddStrategy == ChanAddStrategyBlockWhenStarted { + wg = &sync.WaitGroup{} + wg.Add(1) + fn = func() { + defer wg.Done() + i() + } + } else { + fn = i + } + + // Add func to buffer + c.mf.Lock() + if c.o.Order == ChanOrderFILO { + c.fs = append([]func(){fn}, c.fs...) + } else { + c.fs = append(c.fs, fn) + } + c.mf.Unlock() + + // Signal + c.c.L.Lock() + c.c.Signal() + c.c.L.Unlock() + + // Wait + if wg != nil { + wg.Wait() + } +} + +// Reset resets the chan +func (c *Chan) Reset() { + c.mf.Lock() + defer c.mf.Unlock() + c.fs = []func(){} +} + +// Stats returns the chan stats +func (c *Chan) Stats() []StatOptions { + if c.statWorkRatio == nil { + c.statWorkRatio = NewDurationPercentageStat() + } + return []StatOptions{ + { + Handler: c.statWorkRatio, + Metadata: &StatMetadata{ + Description: "Percentage of time doing work", + Label: "Work ratio", + Name: StatNameWorkRatio, + Unit: "%", + }, + }, + } +} + +// BufferPool represents a *bytes.Buffer pool +type BufferPool struct { + bp *sync.Pool +} + +// NewBufferPool creates a new BufferPool +func NewBufferPool() *BufferPool { + return &BufferPool{bp: &sync.Pool{New: func() interface{} { return &bytes.Buffer{} }}} +} + +// New creates a new BufferPoolItem +func (p *BufferPool) New() *BufferPoolItem { + return newBufferPoolItem(p.bp.Get().(*bytes.Buffer), p.bp) +} + +// BufferPoolItem represents a BufferPool item +type BufferPoolItem struct { + *bytes.Buffer + bp *sync.Pool +} + +func newBufferPoolItem(b *bytes.Buffer, bp *sync.Pool) *BufferPoolItem { + return &BufferPoolItem{ + Buffer: b, + bp: bp, + } +} + +// Close implements the io.Closer interface +func (i *BufferPoolItem) Close() error { + i.Reset() + i.bp.Put(i.Buffer) + return nil +} + +// GoroutineLimiter is an object capable of doing several things in parallel while maintaining the +// max number of things running in parallel under a threshold +type GoroutineLimiter struct { + busy int + c *sync.Cond + ctx context.Context + cancel context.CancelFunc + o GoroutineLimiterOptions +} + +// GoroutineLimiterOptions represents GoroutineLimiter options +type GoroutineLimiterOptions struct { + Max int +} + +// NewGoroutineLimiter creates a new GoroutineLimiter +func NewGoroutineLimiter(o GoroutineLimiterOptions) (l *GoroutineLimiter) { + l = &GoroutineLimiter{ + c: sync.NewCond(&sync.Mutex{}), + o: o, + } + if l.o.Max <= 0 { + l.o.Max = 1 + } + l.ctx, l.cancel = context.WithCancel(context.Background()) + go l.handleCtx() + return +} + +// Close closes the limiter properly +func (l *GoroutineLimiter) Close() error { + l.cancel() + return nil +} + +func (l *GoroutineLimiter) handleCtx() { + <-l.ctx.Done() + l.c.L.Lock() + l.c.Broadcast() + l.c.L.Unlock() +} + +// GoroutineLimiterFunc is a GoroutineLimiter func +type GoroutineLimiterFunc func() + +// Do executes custom work in a goroutine +func (l *GoroutineLimiter) Do(fn GoroutineLimiterFunc) (err error) { + // Check context in case the limiter has already been closed + if err = l.ctx.Err(); err != nil { + return + } + + // Lock + l.c.L.Lock() + + // Wait for a goroutine to be available + for l.busy >= l.o.Max { + l.c.Wait() + } + + // Check context in case the limiter has been closed while waiting + if err = l.ctx.Err(); err != nil { + return + } + + // Increment + l.busy++ + + // Unlock + l.c.L.Unlock() + + // Execute in a goroutine + go func() { + // Decrement + defer func() { + l.c.L.Lock() + l.busy-- + l.c.Signal() + l.c.L.Unlock() + }() + + // Execute + fn() + }() + return +} + +// Eventer represents an object that can dispatch simple events (name + payload) +type Eventer struct { + c *Chan + hs map[string][]EventerHandler + mh *sync.Mutex +} + +// EventerOptions represents Eventer options +type EventerOptions struct { + Chan ChanOptions +} + +// EventerHandler represents a function that can handle the payload of an event +type EventerHandler func(payload interface{}) + +// NewEventer creates a new eventer +func NewEventer(o EventerOptions) *Eventer { + return &Eventer{ + c: NewChan(o.Chan), + hs: make(map[string][]EventerHandler), + mh: &sync.Mutex{}, + } +} + +// On adds an handler for a specific name +func (e *Eventer) On(name string, h EventerHandler) { + // Lock + e.mh.Lock() + defer e.mh.Unlock() + + // Add handler + e.hs[name] = append(e.hs[name], h) +} + +// Dispatch dispatches a payload for a specific name +func (e *Eventer) Dispatch(name string, payload interface{}) { + // Lock + e.mh.Lock() + defer e.mh.Unlock() + + // No handlers + hs, ok := e.hs[name] + if !ok { + return + } + + // Loop through handlers + for _, h := range hs { + func(h EventerHandler) { + // Add to chan + e.c.Add(func() { + h(payload) + }) + }(h) + } +} + +// Start starts the eventer. It is blocking +func (e *Eventer) Start(ctx context.Context) { + e.c.Start(ctx) +} + +// Stop stops the eventer +func (e *Eventer) Stop() { + e.c.Stop() +} + +// Reset resets the eventer +func (e *Eventer) Reset() { + e.c.Reset() +} + +// RWMutex represents a RWMutex capable of logging its actions to ease deadlock debugging +type RWMutex struct { + c string // Last successful caller + l SeverityLogger + m *sync.RWMutex + n string // Name +} + +// RWMutexOptions represents RWMutex options +type RWMutexOptions struct { + Logger StdLogger + Name string +} + +// NewRWMutex creates a new RWMutex +func NewRWMutex(o RWMutexOptions) *RWMutex { + return &RWMutex{ + l: AdaptStdLogger(o.Logger), + m: &sync.RWMutex{}, + n: o.Name, + } +} + +func (m *RWMutex) caller() (o string) { + if _, file, line, ok := runtime.Caller(2); ok { + o = fmt.Sprintf("%s:%d", file, line) + } + return +} + +// Lock write locks the mutex +func (m *RWMutex) Lock() { + c := m.caller() + m.l.Debugf("astikit: requesting lock for %s at %s", m.n, c) + m.m.Lock() + m.l.Debugf("astikit: lock acquired for %s at %s", m.n, c) + m.c = c +} + +// Unlock write unlocks the mutex +func (m *RWMutex) Unlock() { + m.m.Unlock() + m.l.Debugf("astikit: unlock executed for %s", m.n) +} + +// RLock read locks the mutex +func (m *RWMutex) RLock() { + c := m.caller() + m.l.Debugf("astikit: requesting rlock for %s at %s", m.n, c) + m.m.RLock() + m.l.Debugf("astikit: rlock acquired for %s at %s", m.n, c) + m.c = c +} + +// RUnlock read unlocks the mutex +func (m *RWMutex) RUnlock() { + m.m.RUnlock() + m.l.Debugf("astikit: unlock executed for %s", m.n) +} + +// IsDeadlocked checks whether the mutex is deadlocked with a given timeout +// and returns the last caller +func (m *RWMutex) IsDeadlocked(timeout time.Duration) (bool, string) { + ctx, cancel := context.WithTimeout(context.Background(), timeout) + defer cancel() + go func() { + m.m.Lock() + cancel() + m.m.Unlock() + }() + <-ctx.Done() + return errors.Is(ctx.Err(), context.DeadlineExceeded), m.c +} diff --git a/vendor/github.com/asticode/go-astikit/template.go b/vendor/github.com/asticode/go-astikit/template.go new file mode 100644 index 000000000..804ad77e1 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/template.go @@ -0,0 +1,156 @@ +package astikit + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strings" + "sync" + "text/template" +) + +// Templater represents an object capable of storing and parsing templates +type Templater struct { + layouts []string + m sync.Mutex + templates map[string]*template.Template +} + +// NewTemplater creates a new templater +func NewTemplater() *Templater { + return &Templater{templates: make(map[string]*template.Template)} +} + +// AddLayoutsFromDir walks through a dir and add files as layouts +func (t *Templater) AddLayoutsFromDir(dirPath, ext string) (err error) { + // Get layouts + if err = filepath.Walk(dirPath, func(path string, info os.FileInfo, e error) (err error) { + // Check input error + if e != nil { + err = fmt.Errorf("astikit: walking layouts has an input error for path %s: %w", path, e) + return + } + + // Only process files + if info.IsDir() { + return + } + + // Check extension + if ext != "" && filepath.Ext(path) != ext { + return + } + + // Read layout + var b []byte + if b, err = ioutil.ReadFile(path); err != nil { + err = fmt.Errorf("astikit: reading %s failed: %w", path, err) + return + } + + // Add layout + t.AddLayout(string(b)) + return + }); err != nil { + err = fmt.Errorf("astikit: walking layouts in %s failed: %w", dirPath, err) + return + } + return +} + +// AddTemplatesFromDir walks through a dir and add files as templates +func (t *Templater) AddTemplatesFromDir(dirPath, ext string) (err error) { + // Loop through templates + if err = filepath.Walk(dirPath, func(path string, info os.FileInfo, e error) (err error) { + // Check input error + if e != nil { + err = fmt.Errorf("astikit: walking templates has an input error for path %s: %w", path, e) + return + } + + // Only process files + if info.IsDir() { + return + } + + // Check extension + if ext != "" && filepath.Ext(path) != ext { + return + } + + // Read file + var b []byte + if b, err = ioutil.ReadFile(path); err != nil { + err = fmt.Errorf("astikit: reading template content of %s failed: %w", path, err) + return + } + + // Add template + // We use ToSlash to homogenize Windows path + if err = t.AddTemplate(filepath.ToSlash(strings.TrimPrefix(path, dirPath)), string(b)); err != nil { + err = fmt.Errorf("astikit: adding template failed: %w", err) + return + } + return + }); err != nil { + err = fmt.Errorf("astikit: walking templates in %s failed: %w", dirPath, err) + return + } + return +} + +// AddLayout adds a new layout +func (t *Templater) AddLayout(c string) { + t.layouts = append(t.layouts, c) +} + +// AddTemplate adds a new template +func (t *Templater) AddTemplate(path, content string) (err error) { + // Parse + var tpl *template.Template + if tpl, err = t.Parse(content); err != nil { + err = fmt.Errorf("astikit: parsing template for path %s failed: %w", path, err) + return + } + + // Add template + t.m.Lock() + t.templates[path] = tpl + t.m.Unlock() + return +} + +// DelTemplate deletes a template +func (t *Templater) DelTemplate(path string) { + t.m.Lock() + defer t.m.Unlock() + delete(t.templates, path) +} + +// Template retrieves a templates +func (t *Templater) Template(path string) (tpl *template.Template, ok bool) { + t.m.Lock() + defer t.m.Unlock() + tpl, ok = t.templates[path] + return +} + +// Parse parses the content of a template +func (t *Templater) Parse(content string) (o *template.Template, err error) { + // Parse content + o = template.New("root") + if o, err = o.Parse(content); err != nil { + err = fmt.Errorf("astikit: parsing template content failed: %w", err) + return + } + + // Parse layouts + for idx, l := range t.layouts { + if o, err = o.Parse(l); err != nil { + err = fmt.Errorf("astikit: parsing layout #%d failed: %w", idx+1, err) + return + } + } + return +} diff --git a/vendor/github.com/asticode/go-astikit/time.go b/vendor/github.com/asticode/go-astikit/time.go new file mode 100644 index 000000000..c30fa0173 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/time.go @@ -0,0 +1,58 @@ +package astikit + +import ( + "context" + "strconv" + "time" +) + +var now = func() time.Time { return time.Now() } + +// Sleep is a cancellable sleep +func Sleep(ctx context.Context, d time.Duration) (err error) { + for { + select { + case <-time.After(d): + return + case <-ctx.Done(): + err = ctx.Err() + return + } + } +} + +// Timestamp represents a timestamp you can marshal and umarshal +type Timestamp struct { + time.Time +} + +// NewTimestamp creates a new timestamp +func NewTimestamp(t time.Time) *Timestamp { + return &Timestamp{Time: t} +} + +// UnmarshalJSON implements the JSONUnmarshaler interface +func (t *Timestamp) UnmarshalJSON(text []byte) error { + return t.UnmarshalText(text) +} + +// UnmarshalText implements the TextUnmarshaler interface +func (t *Timestamp) UnmarshalText(text []byte) (err error) { + var i int + if i, err = strconv.Atoi(string(text)); err != nil { + return + } + t.Time = time.Unix(int64(i), 0) + return +} + +// MarshalJSON implements the JSONMarshaler interface +func (t Timestamp) MarshalJSON() ([]byte, error) { + return t.MarshalText() +} + +// MarshalText implements the TextMarshaler interface +func (t Timestamp) MarshalText() (text []byte, err error) { + text = []byte(strconv.Itoa(int(t.UTC().Unix()))) + return +} diff --git a/vendor/github.com/asticode/go-astikit/translator.go b/vendor/github.com/asticode/go-astikit/translator.go new file mode 100644 index 000000000..4e2c8310c --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/translator.go @@ -0,0 +1,184 @@ +package astikit + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "os" + "path/filepath" + "strings" + "sync" +) + +// Translator represents an object capable of translating stuff +type Translator struct { + m *sync.RWMutex // Lock p + o TranslatorOptions + p map[string]string +} + +// TranslatorOptions represents Translator options +type TranslatorOptions struct { + DefaultLanguage string +} + +// NewTranslator creates a new Translator +func NewTranslator(o TranslatorOptions) *Translator { + return &Translator{ + m: &sync.RWMutex{}, + o: o, + p: make(map[string]string), + } +} + +// ParseDir adds translations located in ".json" files in the specified dir +func (t *Translator) ParseDir(dirPath string) (err error) { + // Default dir path + if dirPath == "" { + if dirPath, err = os.Getwd(); err != nil { + err = fmt.Errorf("astikit: getwd failed: %w", err) + return + } + } + + // Walk through dir + if err = filepath.Walk(dirPath, func(path string, info os.FileInfo, e error) (err error) { + // Check input error + if e != nil { + err = fmt.Errorf("astikit: walking %s has an input error for path %s: %w", dirPath, path, e) + return + } + + // Only process first level files + if info.IsDir() { + if path != dirPath { + err = filepath.SkipDir + } + return + } + + // Only process ".json" files + if filepath.Ext(path) != ".json" { + return + } + + // Parse file + if err = t.ParseFile(path); err != nil { + err = fmt.Errorf("astikit: parsing %s failed: %w", path, err) + return + } + return + }); err != nil { + err = fmt.Errorf("astikit: walking %s failed: %w", dirPath, err) + return + } + return +} + +// ParseFile adds translation located in the provided path +func (t *Translator) ParseFile(path string) (err error) { + // Lock + t.m.Lock() + defer t.m.Unlock() + + // Open file + var f *os.File + if f, err = os.Open(path); err != nil { + err = fmt.Errorf("astikit: opening %s failed: %w", path, err) + return + } + defer f.Close() + + // Unmarshal + var p map[string]interface{} + if err = json.NewDecoder(f).Decode(&p); err != nil { + err = fmt.Errorf("astikit: unmarshaling %s failed: %w", path, err) + return + } + + // Parse + t.parse(p, strings.TrimSuffix(filepath.Base(path), filepath.Ext(path))) + return +} + +func (t *Translator) key(prefix, key string) string { + return prefix + "." + key +} + +func (t *Translator) parse(i map[string]interface{}, prefix string) { + for k, v := range i { + p := t.key(prefix, k) + switch a := v.(type) { + case string: + t.p[p] = a + case map[string]interface{}: + t.parse(a, p) + } + } +} + +// HTTPMiddleware is the Translator HTTP middleware +func (t *Translator) HTTPMiddleware(h http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // Store language in context + if l := r.Header.Get("Accept-Language"); l != "" { + *r = *r.WithContext(contextWithTranslatorLanguage(r.Context(), l)) + } + + // Next handler + h.ServeHTTP(rw, r) + }) +} + +const contextKeyTranslatorLanguage = "astikit.translator.language" + +func contextWithTranslatorLanguage(ctx context.Context, language string) context.Context { + return context.WithValue(ctx, contextKeyTranslatorLanguage, language) +} + +func translatorLanguageFromContext(ctx context.Context) string { + v, ok := ctx.Value(contextKeyTranslatorLanguage).(string) + if !ok { + return "" + } + return v +} + +func (t *Translator) language(language string) string { + if language == "" { + return t.o.DefaultLanguage + } + return language +} + +// LanguageCtx returns the translator language from the context, or the default language if not in the context +func (t *Translator) LanguageCtx(ctx context.Context) string { + return t.language(translatorLanguageFromContext(ctx)) +} + +// Translate translates a key into a specific language +func (t *Translator) Translate(language, key string) string { + // Lock + t.m.RLock() + defer t.m.RUnlock() + + // Get translation + k1 := t.key(t.language(language), key) + v, ok := t.p[k1] + if ok { + return v + } + + // Default translation + k2 := t.key(t.o.DefaultLanguage, key) + if v, ok = t.p[k2]; ok { + return v + } + return k1 +} + +// TranslateCtx translates a key using the language specified in the context +func (t *Translator) TranslateCtx(ctx context.Context, key string) string { + return t.Translate(translatorLanguageFromContext(ctx), key) +} diff --git a/vendor/github.com/asticode/go-astikit/worker.go b/vendor/github.com/asticode/go-astikit/worker.go new file mode 100644 index 000000000..b9a95a3a9 --- /dev/null +++ b/vendor/github.com/asticode/go-astikit/worker.go @@ -0,0 +1,148 @@ +package astikit + +import ( + "context" + "os" + "os/signal" + "sync" +) + +// Worker represents an object capable of blocking, handling signals and stopping +type Worker struct { + cancel context.CancelFunc + ctx context.Context + l SeverityLogger + os, ow sync.Once + wg *sync.WaitGroup +} + +// WorkerOptions represents worker options +type WorkerOptions struct { + Logger StdLogger +} + +// NewWorker builds a new worker +func NewWorker(o WorkerOptions) (w *Worker) { + w = &Worker{ + l: AdaptStdLogger(o.Logger), + wg: &sync.WaitGroup{}, + } + w.ctx, w.cancel = context.WithCancel(context.Background()) + w.wg.Add(1) + w.l.Info("astikit: starting worker...") + return +} + +// HandleSignals handles signals +func (w *Worker) HandleSignals(hs ...SignalHandler) { + // Prepend mandatory handler + hs = append([]SignalHandler{TermSignalHandler(w.Stop)}, hs...) + + // Notify + ch := make(chan os.Signal, 1) + signal.Notify(ch) + + // Execute in a task + w.NewTask().Do(func() { + for { + select { + case s := <-ch: + // Loop through handlers + for _, h := range hs { + h(s) + } + + // Return + if isTermSignal(s) { + return + } + case <-w.Context().Done(): + return + } + } + }) +} + +// Stop stops the Worker +func (w *Worker) Stop() { + w.os.Do(func() { + w.l.Info("astikit: stopping worker...") + w.cancel() + w.wg.Done() + }) +} + +// Wait is a blocking pattern +func (w *Worker) Wait() { + w.ow.Do(func() { + w.l.Info("astikit: worker is now waiting...") + w.wg.Wait() + }) +} + +// NewTask creates a new task +func (w *Worker) NewTask() *Task { + return newTask(w.wg) +} + +// Context returns the worker's context +func (w *Worker) Context() context.Context { + return w.ctx +} + +// Logger returns the worker's logger +func (w *Worker) Logger() SeverityLogger { + return w.l +} + +// TaskFunc represents a function that can create a new task +type TaskFunc func() *Task + +// Task represents a task +type Task struct { + od, ow sync.Once + wg, pwg *sync.WaitGroup +} + +func newTask(parentWg *sync.WaitGroup) (t *Task) { + t = &Task{ + wg: &sync.WaitGroup{}, + pwg: parentWg, + } + t.pwg.Add(1) + return +} + +// NewSubTask creates a new sub task +func (t *Task) NewSubTask() *Task { + return newTask(t.wg) +} + +// Do executes the task +func (t *Task) Do(f func()) { + go func() { + // Make sure to mark the task as done + defer t.Done() + + // Custom + f() + + // Wait for first level subtasks to be done + // Wait() can also be called in f() if something needs to be executed just after Wait() + t.Wait() + }() +} + +// Done indicates the task is done +func (t *Task) Done() { + t.od.Do(func() { + t.pwg.Done() + }) +} + +// Wait waits for first level subtasks to be finished +func (t *Task) Wait() { + t.ow.Do(func() { + t.wg.Wait() + }) +} diff --git a/vendor/github.com/asticode/go-astisub/.gitignore b/vendor/github.com/asticode/go-astisub/.gitignore new file mode 100644 index 000000000..5be2b41d1 --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/.gitignore @@ -0,0 +1,5 @@ +.DS_Store +Thumbs.db +.idea/ +cover* +test diff --git a/vendor/github.com/asticode/go-astisub/.travis.yml b/vendor/github.com/asticode/go-astisub/.travis.yml new file mode 100644 index 000000000..2295f8d4e --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/.travis.yml @@ -0,0 +1,14 @@ +language: go +go: +- 1.x +- tip +install: +- go get -t ./... +- go get golang.org/x/tools/cmd/cover +- go get github.com/mattn/goveralls +matrix: + allow_failures: + - go: tip +script: +- go test -race -v -coverprofile=coverage.out +- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci \ No newline at end of file diff --git a/vendor/github.com/asticode/go-astisub/LICENSE b/vendor/github.com/asticode/go-astisub/LICENSE new file mode 100644 index 000000000..606a4160f --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Quentin Renard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/asticode/go-astisub/README.md b/vendor/github.com/asticode/go-astisub/README.md new file mode 100644 index 000000000..43e7bd62b --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/README.md @@ -0,0 +1,95 @@ +[![GoReportCard](http://goreportcard.com/badge/github.com/asticode/go-astisub)](http://goreportcard.com/report/github.com/asticode/go-astisub) +[![GoDoc](https://godoc.org/github.com/asticode/go-astisub?status.svg)](https://godoc.org/github.com/asticode/go-astisub) +[![Travis](https://travis-ci.com/asticode/go-astisub.svg?branch=master)](https://travis-ci.com/asticode/go-astisub#) +[![Coveralls](https://coveralls.io/repos/github/asticode/go-astisub/badge.svg?branch=master)](https://coveralls.io/github/asticode/go-astisub) + +This is a Golang library to manipulate subtitles. + +It allows you to manipulate `srt`, `stl`, `ttml`, `ssa/ass`, `webvtt` and `teletext` files for now. + +Available operations are `parsing`, `writing`, `syncing`, `fragmenting`, `unfragmenting`, `merging` and `optimizing`. + +# Installation + +To install the library: + + go get github.com/asticode/go-astisub + +To install the CLI: + + go install github.com/asticode/go-astisub/astisub + +# Using the library in your code + +WARNING: the code below doesn't handle errors for readibility purposes. However you SHOULD! + +```go +// Open subtitles +s1, _ := astisub.OpenFile("/path/to/example.ttml") +s2, _ := astisub.ReadFromSRT(bytes.NewReader([]byte("00:01:00.000 --> 00:02:00.000\nCredits"))) + +// Add a duration to every subtitles (syncing) +s1.Add(-2*time.Second) + +// Fragment the subtitles +s1.Fragment(2*time.Second) + +// Merge subtitles +s1.Merge(s2) + +// Optimize subtitles +s1.Optimize() + +// Unfragment the subtitles +s1.Unfragment() + +// Write subtitles +s1.Write("/path/to/example.srt") +var buf = &bytes.Buffer{} +s2.WriteToTTML(buf) +``` + +# Using the CLI + +If **astisub** has been installed properly you can: + +- convert any type of subtitle to any other type of subtitle: + + astisub convert -i example.srt -o example.ttml + +- fragment any type of subtitle: + + astisub fragment -i example.srt -f 2s -o example.out.srt + +- merge any type of subtitle into any other type of subtitle: + + astisub merge -i example.srt -i example.ttml -o example.out.srt + +- optimize any type of subtitle: + + astisub optimize -i example.srt -o example.out.srt + +- unfragment any type of subtitle: + + astisub unfragment -i example.srt -o example.out.srt + +- sync any type of subtitle: + + astisub sync -i example.srt -s "-2s" -o example.out.srt + +# Features and roadmap + +- [x] parsing +- [x] writing +- [x] syncing +- [x] fragmenting/unfragmenting +- [x] merging +- [x] ordering +- [x] optimizing +- [x] .srt +- [x] .ttml +- [x] .vtt +- [x] .stl +- [x] .ssa/.ass +- [x] .teletext +- [ ] .smi diff --git a/vendor/github.com/asticode/go-astisub/language.go b/vendor/github.com/asticode/go-astisub/language.go new file mode 100644 index 000000000..a7c762abb --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/language.go @@ -0,0 +1,10 @@ +package astisub + +// Languages +const ( + LanguageChinese = "chinese" + LanguageEnglish = "english" + LanguageFrench = "french" + LanguageJapanese = "japanese" + LanguageNorwegian = "norwegian" +) diff --git a/vendor/github.com/asticode/go-astisub/srt.go b/vendor/github.com/asticode/go-astisub/srt.go new file mode 100644 index 000000000..8e854e558 --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/srt.go @@ -0,0 +1,159 @@ +package astisub + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" + "time" +) + +// Constants +const ( + srtTimeBoundariesSeparator = " --> " +) + +// Vars +var ( + bytesSRTTimeBoundariesSeparator = []byte(srtTimeBoundariesSeparator) +) + +// parseDurationSRT parses an .srt duration +func parseDurationSRT(i string) (time.Duration, error) { + return parseDuration(i, ",", 3) +} + +// ReadFromSRT parses an .srt content +func ReadFromSRT(i io.Reader) (o *Subtitles, err error) { + // Init + o = NewSubtitles() + var scanner = bufio.NewScanner(i) + + // Scan + var line string + var lineNum int + var s = &Item{} + for scanner.Scan() { + // Fetch line + line = strings.TrimSpace(scanner.Text()) + lineNum++ + + // Remove BOM header + if lineNum == 1 { + line = strings.TrimPrefix(line, string(BytesBOM)) + } + + // Line contains time boundaries + if strings.Contains(line, srtTimeBoundariesSeparator) { + // Return the wrong number of rows + if len(s.Lines) == 0 { + err = fmt.Errorf("astisub: line %d: no lines", lineNum) + return + } + + // Remove last item of previous subtitle since it's the index + index := s.Lines[len(s.Lines)-1] + s.Lines = s.Lines[:len(s.Lines)-1] + + // Remove trailing empty lines + if len(s.Lines) > 0 { + for i := len(s.Lines) - 1; i >= 0; i-- { + if len(s.Lines[i].Items) > 0 { + for j := len(s.Lines[i].Items) - 1; j >= 0; j-- { + if len(s.Lines[i].Items[j].Text) == 0 { + s.Lines[i].Items = s.Lines[i].Items[:j] + } else { + break + } + } + if len(s.Lines[i].Items) == 0 { + s.Lines = s.Lines[:i] + } + + } + } + } + + // Init subtitle + s = &Item{} + + // Fetch Index + s.Index, _ = strconv.Atoi(index.String()) + + // Extract time boundaries + s1 := strings.Split(line, srtTimeBoundariesSeparator) + if l := len(s1); l < 2 { + err = fmt.Errorf("astisub: line %d: time boundaries has only %d element(s)", lineNum, l) + return + } + // We do this to eliminate extra stuff like positions which are not documented anywhere + s2 := strings.Split(s1[1], " ") + + // Parse time boundaries + if s.StartAt, err = parseDurationSRT(s1[0]); err != nil { + err = fmt.Errorf("astisub: line %d: parsing srt duration %s failed: %w", lineNum, s1[0], err) + return + } + if s.EndAt, err = parseDurationSRT(s2[0]); err != nil { + err = fmt.Errorf("astisub: line %d: parsing srt duration %s failed: %w", lineNum, s2[0], err) + return + } + + // Append subtitle + o.Items = append(o.Items, s) + } else { + // Add text + s.Lines = append(s.Lines, Line{Items: []LineItem{{Text: strings.TrimSpace(line)}}}) + } + } + return +} + +// formatDurationSRT formats an .srt duration +func formatDurationSRT(i time.Duration) string { + return formatDuration(i, ",", 3) +} + +// WriteToSRT writes subtitles in .srt format +func (s Subtitles) WriteToSRT(o io.Writer) (err error) { + // Do not write anything if no subtitles + if len(s.Items) == 0 { + err = ErrNoSubtitlesToWrite + return + } + + // Add BOM header + var c []byte + c = append(c, BytesBOM...) + + // Loop through subtitles + for k, v := range s.Items { + // Add time boundaries + c = append(c, []byte(strconv.Itoa(k+1))...) + c = append(c, bytesLineSeparator...) + c = append(c, []byte(formatDurationSRT(v.StartAt))...) + c = append(c, bytesSRTTimeBoundariesSeparator...) + c = append(c, []byte(formatDurationSRT(v.EndAt))...) + c = append(c, bytesLineSeparator...) + + // Loop through lines + for _, l := range v.Lines { + c = append(c, []byte(l.String())...) + c = append(c, bytesLineSeparator...) + } + + // Add new line + c = append(c, bytesLineSeparator...) + } + + // Remove last new line + c = c[:len(c)-1] + + // Write + if _, err = o.Write(c); err != nil { + err = fmt.Errorf("astisub: writing failed: %w", err) + return + } + return +} diff --git a/vendor/github.com/asticode/go-astisub/ssa.go b/vendor/github.com/asticode/go-astisub/ssa.go new file mode 100644 index 000000000..a3a00b000 --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/ssa.go @@ -0,0 +1,1297 @@ +package astisub + +import ( + "bufio" + "fmt" + "io" + "log" + "regexp" + "sort" + "strconv" + "strings" + "time" + + "github.com/asticode/go-astikit" +) + +// https://www.matroska.org/technical/specs/subtitles/ssa.html +// http://moodub.free.fr/video/ass-specs.doc +// https://en.wikipedia.org/wiki/SubStation_Alpha + +// SSA alignment +const ( + ssaAlignmentCentered = 2 + ssaAlignmentLeft = 1 + ssaAlignmentLeftJustifiedTopTitle = 5 + ssaAlignmentMidTitle = 8 + ssaAlignmentRight = 3 + ssaAlignmentTopTitle = 4 +) + +// SSA border styles +const ( + ssaBorderStyleOpaqueBox = 3 + ssaBorderStyleOutlineAndDropShadow = 1 +) + +// SSA collisions +const ( + ssaCollisionsNormal = "Normal" + ssaCollisionsReverse = "Reverse" +) + +// SSA event categories +const ( + ssaEventCategoryCommand = "Command" + ssaEventCategoryComment = "Comment" + ssaEventCategoryDialogue = "Dialogue" + ssaEventCategoryMovie = "Movie" + ssaEventCategoryPicture = "Picture" + ssaEventCategorySound = "Sound" +) + +// SSA event format names +const ( + ssaEventFormatNameEffect = "Effect" + ssaEventFormatNameEnd = "End" + ssaEventFormatNameLayer = "Layer" + ssaEventFormatNameMarginL = "MarginL" + ssaEventFormatNameMarginR = "MarginR" + ssaEventFormatNameMarginV = "MarginV" + ssaEventFormatNameMarked = "Marked" + ssaEventFormatNameName = "Name" + ssaEventFormatNameStart = "Start" + ssaEventFormatNameStyle = "Style" + ssaEventFormatNameText = "Text" +) + +// SSA script info names +const ( + ssaScriptInfoNameCollisions = "Collisions" + ssaScriptInfoNameOriginalEditing = "Original Editing" + ssaScriptInfoNameOriginalScript = "Original Script" + ssaScriptInfoNameOriginalTiming = "Original Timing" + ssaScriptInfoNameOriginalTranslation = "Original Translation" + ssaScriptInfoNamePlayDepth = "PlayDepth" + ssaScriptInfoNamePlayResX = "PlayResX" + ssaScriptInfoNamePlayResY = "PlayResY" + ssaScriptInfoNameScriptType = "ScriptType" + ssaScriptInfoNameScriptUpdatedBy = "Script Updated By" + ssaScriptInfoNameSynchPoint = "Synch Point" + ssaScriptInfoNameTimer = "Timer" + ssaScriptInfoNameTitle = "Title" + ssaScriptInfoNameUpdateDetails = "Update Details" + ssaScriptInfoNameWrapStyle = "WrapStyle" +) + +// SSA section names +const ( + ssaSectionNameEvents = "events" + ssaSectionNameScriptInfo = "script.info" + ssaSectionNameStyles = "styles" + ssaSectionNameUnknown = "unknown" +) + +// SSA style format names +const ( + ssaStyleFormatNameAlignment = "Alignment" + ssaStyleFormatNameAlphaLevel = "AlphaLevel" + ssaStyleFormatNameAngle = "Angle" + ssaStyleFormatNameBackColour = "BackColour" + ssaStyleFormatNameBold = "Bold" + ssaStyleFormatNameBorderStyle = "BorderStyle" + ssaStyleFormatNameEncoding = "Encoding" + ssaStyleFormatNameFontName = "Fontname" + ssaStyleFormatNameFontSize = "Fontsize" + ssaStyleFormatNameItalic = "Italic" + ssaStyleFormatNameMarginL = "MarginL" + ssaStyleFormatNameMarginR = "MarginR" + ssaStyleFormatNameMarginV = "MarginV" + ssaStyleFormatNameName = "Name" + ssaStyleFormatNameOutline = "Outline" + ssaStyleFormatNameOutlineColour = "OutlineColour" + ssaStyleFormatNamePrimaryColour = "PrimaryColour" + ssaStyleFormatNameScaleX = "ScaleX" + ssaStyleFormatNameScaleY = "ScaleY" + ssaStyleFormatNameSecondaryColour = "SecondaryColour" + ssaStyleFormatNameShadow = "Shadow" + ssaStyleFormatNameSpacing = "Spacing" + ssaStyleFormatNameStrikeout = "Strikeout" + ssaStyleFormatNameTertiaryColour = "TertiaryColour" + ssaStyleFormatNameUnderline = "Underline" +) + +// SSA wrap style +const ( + ssaWrapStyleEndOfLineWordWrapping = "1" + ssaWrapStyleNoWordWrapping = "2" + ssaWrapStyleSmartWrapping = "0" + ssaWrapStyleSmartWrappingWithLowerLinesGettingWider = "3" +) + +// SSA regexp +var ssaRegexpEffect = regexp.MustCompile(`\{[^\{]+\}`) + +// ReadFromSSA parses an .ssa content +func ReadFromSSA(i io.Reader) (o *Subtitles, err error) { + o, err = ReadFromSSAWithOptions(i, defaultSSAOptions()) + return o, err +} + +// ReadFromSSAWithOptions parses an .ssa content +func ReadFromSSAWithOptions(i io.Reader, opts SSAOptions) (o *Subtitles, err error) { + // Init + o = NewSubtitles() + var scanner = bufio.NewScanner(i) + var si = &ssaScriptInfo{} + var ss = []*ssaStyle{} + var es = []*ssaEvent{} + + // Scan + var line, sectionName string + var format map[int]string + isFirstLine := true + for scanner.Scan() { + // Fetch line + line = strings.TrimSpace(scanner.Text()) + + // Remove BOM header + if isFirstLine { + line = strings.TrimPrefix(line, string(BytesBOM)) + isFirstLine = false + } + + // Empty line + if len(line) == 0 { + continue + } + + // Section name + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + switch strings.ToLower(line[1 : len(line)-1]) { + case "events": + sectionName = ssaSectionNameEvents + format = make(map[int]string) + continue + case "script info": + sectionName = ssaSectionNameScriptInfo + continue + case "v4 styles", "v4+ styles", "v4 styles+": + sectionName = ssaSectionNameStyles + format = make(map[int]string) + continue + default: + if opts.OnUnknownSectionName != nil { + opts.OnUnknownSectionName(line) + } + sectionName = ssaSectionNameUnknown + continue + } + } + + // Unknown section + if sectionName == ssaSectionNameUnknown { + continue + } + + // Comment + if len(line) > 0 && line[0] == ';' { + si.comments = append(si.comments, strings.TrimSpace(line[1:])) + continue + } + + // Split on ":" + var split = strings.Split(line, ":") + if len(split) < 2 || split[0] == "" { + if opts.OnInvalidLine != nil { + opts.OnInvalidLine(line) + } + continue + } + var header = strings.TrimSpace(split[0]) + var content = strings.TrimSpace(strings.Join(split[1:], ":")) + + // Switch on section name + switch sectionName { + case ssaSectionNameScriptInfo: + if err = si.parse(header, content); err != nil { + err = fmt.Errorf("astisub: parsing script info block failed: %w", err) + return + } + case ssaSectionNameEvents, ssaSectionNameStyles: + // Parse format + if header == "Format" { + for idx, item := range strings.Split(content, ",") { + format[idx] = strings.TrimSpace(item) + } + } else { + // No format provided + if len(format) == 0 { + err = fmt.Errorf("astisub: no %s format provided", sectionName) + return + } + + // Switch on section name + switch sectionName { + case ssaSectionNameEvents: + var e *ssaEvent + if e, err = newSSAEventFromString(header, content, format); err != nil { + err = fmt.Errorf("astisub: building new ssa event failed: %w", err) + return + } + es = append(es, e) + case ssaSectionNameStyles: + var s *ssaStyle + if s, err = newSSAStyleFromString(content, format); err != nil { + err = fmt.Errorf("astisub: building new ssa style failed: %w", err) + return + } + ss = append(ss, s) + } + } + } + } + + // Set metadata + o.Metadata = si.metadata() + + // Loop through styles + for _, s := range ss { + var st = s.style() + o.Styles[st.ID] = st + } + + // Loop through events + for _, e := range es { + // Only process dialogues + if e.category == ssaEventCategoryDialogue { + // Build item + var item *Item + if item, err = e.item(o.Styles); err != nil { + return + } + + // Append item + o.Items = append(o.Items, item) + } + } + return +} + +// newColorFromSSAColor builds a new color based on an SSA color +func newColorFromSSAColor(i string) (_ *Color, _ error) { + // Empty + if len(i) == 0 { + return + } + + // Check whether input is decimal or hexadecimal + var s = i + var base = 10 + if strings.HasPrefix(i, "&H") { + s = i[2:] + base = 16 + } + return newColorFromSSAString(s, base) +} + +// newSSAColorFromColor builds a new SSA color based on a color +func newSSAColorFromColor(i *Color) string { + return "&H" + i.SSAString() +} + +// ssaScriptInfo represents an SSA script info block +type ssaScriptInfo struct { + collisions string + comments []string + originalEditing string + originalScript string + originalTiming string + originalTranslation string + playDepth *int + playResX, playResY *int + scriptType string + scriptUpdatedBy string + synchPoint string + timer *float64 + title string + updateDetails string + wrapStyle string +} + +// newSSAScriptInfo builds an SSA script info block based on metadata +func newSSAScriptInfo(m *Metadata) (o *ssaScriptInfo) { + // Init + o = &ssaScriptInfo{} + + // Add metadata + if m != nil { + o.collisions = m.SSACollisions + o.comments = m.Comments + o.originalEditing = m.SSAOriginalEditing + o.originalScript = m.SSAOriginalScript + o.originalTiming = m.SSAOriginalTiming + o.originalTranslation = m.SSAOriginalTranslation + o.playDepth = m.SSAPlayDepth + o.playResX = m.SSAPlayResX + o.playResY = m.SSAPlayResY + o.scriptType = m.SSAScriptType + o.scriptUpdatedBy = m.SSAScriptUpdatedBy + o.synchPoint = m.SSASynchPoint + o.timer = m.SSATimer + o.title = m.Title + o.updateDetails = m.SSAUpdateDetails + o.wrapStyle = m.SSAWrapStyle + } + return +} + +// parse parses a script info header/content +func (b *ssaScriptInfo) parse(header, content string) (err error) { + switch header { + case ssaScriptInfoNameCollisions: + b.collisions = content + case ssaScriptInfoNameOriginalEditing: + b.originalEditing = content + case ssaScriptInfoNameOriginalScript: + b.originalScript = content + case ssaScriptInfoNameOriginalTiming: + b.originalTiming = content + case ssaScriptInfoNameOriginalTranslation: + b.originalTranslation = content + case ssaScriptInfoNameScriptType: + b.scriptType = content + case ssaScriptInfoNameScriptUpdatedBy: + b.scriptUpdatedBy = content + case ssaScriptInfoNameSynchPoint: + b.synchPoint = content + case ssaScriptInfoNameTitle: + b.title = content + case ssaScriptInfoNameUpdateDetails: + b.updateDetails = content + case ssaScriptInfoNameWrapStyle: + b.wrapStyle = content + // Int + case ssaScriptInfoNamePlayResX, ssaScriptInfoNamePlayResY, ssaScriptInfoNamePlayDepth: + var v int + if v, err = strconv.Atoi(content); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", content, err) + } + switch header { + case ssaScriptInfoNamePlayDepth: + b.playDepth = astikit.IntPtr(v) + case ssaScriptInfoNamePlayResX: + b.playResX = astikit.IntPtr(v) + case ssaScriptInfoNamePlayResY: + b.playResY = astikit.IntPtr(v) + } + // Float + case ssaScriptInfoNameTimer: + var v float64 + if v, err = strconv.ParseFloat(strings.Replace(content, ",", ".", -1), 64); err != nil { + err = fmt.Errorf("astisub: parseFloat of %s failed: %w", content, err) + } + b.timer = astikit.Float64Ptr(v) + } + return +} + +// metadata returns the block as Metadata +func (b *ssaScriptInfo) metadata() *Metadata { + return &Metadata{ + Comments: b.comments, + SSACollisions: b.collisions, + SSAOriginalEditing: b.originalEditing, + SSAOriginalScript: b.originalScript, + SSAOriginalTiming: b.originalTiming, + SSAOriginalTranslation: b.originalTranslation, + SSAPlayDepth: b.playDepth, + SSAPlayResX: b.playResX, + SSAPlayResY: b.playResY, + SSAScriptType: b.scriptType, + SSAScriptUpdatedBy: b.scriptUpdatedBy, + SSASynchPoint: b.synchPoint, + SSATimer: b.timer, + SSAUpdateDetails: b.updateDetails, + SSAWrapStyle: b.wrapStyle, + Title: b.title, + } +} + +// bytes returns the block as bytes +func (b *ssaScriptInfo) bytes() (o []byte) { + o = []byte("[Script Info]") + o = append(o, bytesLineSeparator...) + for _, c := range b.comments { + o = appendStringToBytesWithNewLine(o, "; "+c) + } + if len(b.collisions) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameCollisions+": "+b.collisions) + } + if len(b.originalEditing) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameOriginalEditing+": "+b.originalEditing) + } + if len(b.originalScript) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameOriginalScript+": "+b.originalScript) + } + if len(b.originalTiming) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameOriginalTiming+": "+b.originalTiming) + } + if len(b.originalTranslation) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameOriginalTranslation+": "+b.originalTranslation) + } + if b.playDepth != nil { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNamePlayDepth+": "+strconv.Itoa(*b.playDepth)) + } + if b.playResX != nil { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNamePlayResX+": "+strconv.Itoa(*b.playResX)) + } + if b.playResY != nil { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNamePlayResY+": "+strconv.Itoa(*b.playResY)) + } + if len(b.scriptType) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameScriptType+": "+b.scriptType) + } + if len(b.scriptUpdatedBy) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameScriptUpdatedBy+": "+b.scriptUpdatedBy) + } + if len(b.synchPoint) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameSynchPoint+": "+b.synchPoint) + } + if b.timer != nil { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameTimer+": "+strings.Replace(strconv.FormatFloat(*b.timer, 'f', -1, 64), ".", ",", -1)) + } + if len(b.title) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameTitle+": "+b.title) + } + if len(b.updateDetails) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameUpdateDetails+": "+b.updateDetails) + } + if len(b.wrapStyle) > 0 { + o = appendStringToBytesWithNewLine(o, ssaScriptInfoNameWrapStyle+": "+b.wrapStyle) + } + return +} + +// ssaStyle represents an SSA style +type ssaStyle struct { + alignment *int + alphaLevel *float64 + angle *float64 // degrees + backColour *Color + bold *bool + borderStyle *int + encoding *int + fontName string + fontSize *float64 + italic *bool + outline *float64 // pixels + outlineColour *Color + marginLeft *int // pixels + marginRight *int // pixels + marginVertical *int // pixels + name string + primaryColour *Color + scaleX *float64 // % + scaleY *float64 // % + secondaryColour *Color + shadow *float64 // pixels + spacing *float64 // pixels + strikeout *bool + underline *bool +} + +// newSSAStyleFromStyle returns an SSA style based on a Style +func newSSAStyleFromStyle(i Style) *ssaStyle { + return &ssaStyle{ + alignment: i.InlineStyle.SSAAlignment, + alphaLevel: i.InlineStyle.SSAAlphaLevel, + angle: i.InlineStyle.SSAAngle, + backColour: i.InlineStyle.SSABackColour, + bold: i.InlineStyle.SSABold, + borderStyle: i.InlineStyle.SSABorderStyle, + encoding: i.InlineStyle.SSAEncoding, + fontName: i.InlineStyle.SSAFontName, + fontSize: i.InlineStyle.SSAFontSize, + italic: i.InlineStyle.SSAItalic, + outline: i.InlineStyle.SSAOutline, + outlineColour: i.InlineStyle.SSAOutlineColour, + marginLeft: i.InlineStyle.SSAMarginLeft, + marginRight: i.InlineStyle.SSAMarginRight, + marginVertical: i.InlineStyle.SSAMarginVertical, + name: i.ID, + primaryColour: i.InlineStyle.SSAPrimaryColour, + scaleX: i.InlineStyle.SSAScaleX, + scaleY: i.InlineStyle.SSAScaleY, + secondaryColour: i.InlineStyle.SSASecondaryColour, + shadow: i.InlineStyle.SSAShadow, + spacing: i.InlineStyle.SSASpacing, + strikeout: i.InlineStyle.SSAStrikeout, + underline: i.InlineStyle.SSAUnderline, + } +} + +// newSSAStyleFromString returns an SSA style based on an input string and a format +func newSSAStyleFromString(content string, format map[int]string) (s *ssaStyle, err error) { + // Split content + var items = strings.Split(content, ",") + + // Not enough items + if len(items) < len(format) { + err = fmt.Errorf("astisub: content has %d items whereas style format has %d items", len(items), len(format)) + return + } + + // Loop through items + s = &ssaStyle{} + for idx, item := range items { + // Index not found in format + var attr string + var ok bool + if attr, ok = format[idx]; !ok { + err = fmt.Errorf("astisub: index %d not found in style format %+v", idx, format) + return + } + + // Switch on attribute name + switch attr { + // Bool + case ssaStyleFormatNameBold, ssaStyleFormatNameItalic, ssaStyleFormatNameStrikeout, + ssaStyleFormatNameUnderline: + var b = item == "-1" + switch attr { + case ssaStyleFormatNameBold: + s.bold = astikit.BoolPtr(b) + case ssaStyleFormatNameItalic: + s.italic = astikit.BoolPtr(b) + case ssaStyleFormatNameStrikeout: + s.strikeout = astikit.BoolPtr(b) + case ssaStyleFormatNameUnderline: + s.underline = astikit.BoolPtr(b) + } + // Color + case ssaStyleFormatNamePrimaryColour, ssaStyleFormatNameSecondaryColour, + ssaStyleFormatNameTertiaryColour, ssaStyleFormatNameOutlineColour, ssaStyleFormatNameBackColour: + // Build color + var c *Color + if c, err = newColorFromSSAColor(item); err != nil { + err = fmt.Errorf("astisub: building new %s from ssa color %s failed: %w", attr, item, err) + return + } + + // Set color + switch attr { + case ssaStyleFormatNameBackColour: + s.backColour = c + case ssaStyleFormatNamePrimaryColour: + s.primaryColour = c + case ssaStyleFormatNameSecondaryColour: + s.secondaryColour = c + case ssaStyleFormatNameTertiaryColour, ssaStyleFormatNameOutlineColour: + s.outlineColour = c + } + // Float + case ssaStyleFormatNameAlphaLevel, ssaStyleFormatNameAngle, ssaStyleFormatNameFontSize, + ssaStyleFormatNameScaleX, ssaStyleFormatNameScaleY, + ssaStyleFormatNameOutline, ssaStyleFormatNameShadow, ssaStyleFormatNameSpacing: + // Parse float + var f float64 + if f, err = strconv.ParseFloat(item, 64); err != nil { + err = fmt.Errorf("astisub: parsing float %s failed: %w", item, err) + return + } + + // Set float + switch attr { + case ssaStyleFormatNameAlphaLevel: + s.alphaLevel = astikit.Float64Ptr(f) + case ssaStyleFormatNameAngle: + s.angle = astikit.Float64Ptr(f) + case ssaStyleFormatNameFontSize: + s.fontSize = astikit.Float64Ptr(f) + case ssaStyleFormatNameScaleX: + s.scaleX = astikit.Float64Ptr(f) + case ssaStyleFormatNameScaleY: + s.scaleY = astikit.Float64Ptr(f) + case ssaStyleFormatNameOutline: + s.outline = astikit.Float64Ptr(f) + case ssaStyleFormatNameShadow: + s.shadow = astikit.Float64Ptr(f) + case ssaStyleFormatNameSpacing: + s.spacing = astikit.Float64Ptr(f) + } + // Int + case ssaStyleFormatNameAlignment, ssaStyleFormatNameBorderStyle, ssaStyleFormatNameEncoding, + ssaStyleFormatNameMarginL, ssaStyleFormatNameMarginR, ssaStyleFormatNameMarginV: + // Parse int + var i int + if i, err = strconv.Atoi(item); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", item, err) + return + } + + // Set int + switch attr { + case ssaStyleFormatNameAlignment: + s.alignment = astikit.IntPtr(i) + case ssaStyleFormatNameBorderStyle: + s.borderStyle = astikit.IntPtr(i) + case ssaStyleFormatNameEncoding: + s.encoding = astikit.IntPtr(i) + case ssaStyleFormatNameMarginL: + s.marginLeft = astikit.IntPtr(i) + case ssaStyleFormatNameMarginR: + s.marginRight = astikit.IntPtr(i) + case ssaStyleFormatNameMarginV: + s.marginVertical = astikit.IntPtr(i) + } + // String + case ssaStyleFormatNameFontName, ssaStyleFormatNameName: + switch attr { + case ssaStyleFormatNameFontName: + s.fontName = item + case ssaStyleFormatNameName: + s.name = item + } + } + } + return +} + +// ssaUpdateFormat updates an SSA format +func ssaUpdateFormat(n string, formatMap map[string]bool, format []string) []string { + if _, ok := formatMap[n]; !ok { + formatMap[n] = true + format = append(format, n) + } + return format +} + +// updateFormat updates the format based on the non empty fields +func (s ssaStyle) updateFormat(formatMap map[string]bool, format []string) []string { + if s.alignment != nil { + format = ssaUpdateFormat(ssaStyleFormatNameAlignment, formatMap, format) + } + if s.alphaLevel != nil { + format = ssaUpdateFormat(ssaStyleFormatNameAlphaLevel, formatMap, format) + } + if s.angle != nil { + format = ssaUpdateFormat(ssaStyleFormatNameAngle, formatMap, format) + } + if s.backColour != nil { + format = ssaUpdateFormat(ssaStyleFormatNameBackColour, formatMap, format) + } + if s.bold != nil { + format = ssaUpdateFormat(ssaStyleFormatNameBold, formatMap, format) + } + if s.borderStyle != nil { + format = ssaUpdateFormat(ssaStyleFormatNameBorderStyle, formatMap, format) + } + if s.encoding != nil { + format = ssaUpdateFormat(ssaStyleFormatNameEncoding, formatMap, format) + } + if len(s.fontName) > 0 { + format = ssaUpdateFormat(ssaStyleFormatNameFontName, formatMap, format) + } + if s.fontSize != nil { + format = ssaUpdateFormat(ssaStyleFormatNameFontSize, formatMap, format) + } + if s.italic != nil { + format = ssaUpdateFormat(ssaStyleFormatNameItalic, formatMap, format) + } + if s.marginLeft != nil { + format = ssaUpdateFormat(ssaStyleFormatNameMarginL, formatMap, format) + } + if s.marginRight != nil { + format = ssaUpdateFormat(ssaStyleFormatNameMarginR, formatMap, format) + } + if s.marginVertical != nil { + format = ssaUpdateFormat(ssaStyleFormatNameMarginV, formatMap, format) + } + if s.outline != nil { + format = ssaUpdateFormat(ssaStyleFormatNameOutline, formatMap, format) + } + if s.outlineColour != nil { + format = ssaUpdateFormat(ssaStyleFormatNameOutlineColour, formatMap, format) + } + if s.primaryColour != nil { + format = ssaUpdateFormat(ssaStyleFormatNamePrimaryColour, formatMap, format) + } + if s.scaleX != nil { + format = ssaUpdateFormat(ssaStyleFormatNameScaleX, formatMap, format) + } + if s.scaleY != nil { + format = ssaUpdateFormat(ssaStyleFormatNameScaleY, formatMap, format) + } + if s.secondaryColour != nil { + format = ssaUpdateFormat(ssaStyleFormatNameSecondaryColour, formatMap, format) + } + if s.shadow != nil { + format = ssaUpdateFormat(ssaStyleFormatNameShadow, formatMap, format) + } + if s.spacing != nil { + format = ssaUpdateFormat(ssaStyleFormatNameSpacing, formatMap, format) + } + if s.strikeout != nil { + format = ssaUpdateFormat(ssaStyleFormatNameStrikeout, formatMap, format) + } + if s.underline != nil { + format = ssaUpdateFormat(ssaStyleFormatNameUnderline, formatMap, format) + } + return format +} + +// string returns the block as a string +func (s ssaStyle) string(format []string) string { + var ss = []string{s.name} + for _, attr := range format { + var v string + var found = true + switch attr { + // Bool + case ssaStyleFormatNameBold, ssaStyleFormatNameItalic, ssaStyleFormatNameStrikeout, + ssaStyleFormatNameUnderline: + var b *bool + switch attr { + case ssaStyleFormatNameBold: + b = s.bold + case ssaStyleFormatNameItalic: + b = s.italic + case ssaStyleFormatNameStrikeout: + b = s.strikeout + case ssaStyleFormatNameUnderline: + b = s.underline + } + if b != nil { + v = "0" + if *b { + v = "1" + } + } + // Color + case ssaStyleFormatNamePrimaryColour, ssaStyleFormatNameSecondaryColour, + ssaStyleFormatNameOutlineColour, ssaStyleFormatNameBackColour: + var c *Color + switch attr { + case ssaStyleFormatNameBackColour: + c = s.backColour + case ssaStyleFormatNamePrimaryColour: + c = s.primaryColour + case ssaStyleFormatNameSecondaryColour: + c = s.secondaryColour + case ssaStyleFormatNameOutlineColour: + c = s.outlineColour + } + if c != nil { + v = newSSAColorFromColor(c) + } + // Float + case ssaStyleFormatNameAlphaLevel, ssaStyleFormatNameAngle, ssaStyleFormatNameFontSize, + ssaStyleFormatNameScaleX, ssaStyleFormatNameScaleY, + ssaStyleFormatNameOutline, ssaStyleFormatNameShadow, ssaStyleFormatNameSpacing: + var f *float64 + switch attr { + case ssaStyleFormatNameAlphaLevel: + f = s.alphaLevel + case ssaStyleFormatNameAngle: + f = s.angle + case ssaStyleFormatNameFontSize: + f = s.fontSize + case ssaStyleFormatNameScaleX: + f = s.scaleX + case ssaStyleFormatNameScaleY: + f = s.scaleY + case ssaStyleFormatNameOutline: + f = s.outline + case ssaStyleFormatNameShadow: + f = s.shadow + case ssaStyleFormatNameSpacing: + f = s.spacing + } + if f != nil { + v = strconv.FormatFloat(*f, 'f', 3, 64) + } + // Int + case ssaStyleFormatNameAlignment, ssaStyleFormatNameBorderStyle, ssaStyleFormatNameEncoding, + ssaStyleFormatNameMarginL, ssaStyleFormatNameMarginR, ssaStyleFormatNameMarginV: + var i *int + switch attr { + case ssaStyleFormatNameAlignment: + i = s.alignment + case ssaStyleFormatNameBorderStyle: + i = s.borderStyle + case ssaStyleFormatNameEncoding: + i = s.encoding + case ssaStyleFormatNameMarginL: + i = s.marginLeft + case ssaStyleFormatNameMarginR: + i = s.marginRight + case ssaStyleFormatNameMarginV: + i = s.marginVertical + } + if i != nil { + v = strconv.Itoa(*i) + } + // String + case ssaStyleFormatNameFontName: + switch attr { + case ssaStyleFormatNameFontName: + v = s.fontName + } + default: + found = false + } + if found { + ss = append(ss, v) + } + } + return strings.Join(ss, ",") +} + +// style converts ssaStyle to Style +func (s ssaStyle) style() (o *Style) { + o = &Style{ + ID: s.name, + InlineStyle: &StyleAttributes{ + SSAAlignment: s.alignment, + SSAAlphaLevel: s.alphaLevel, + SSAAngle: s.angle, + SSABackColour: s.backColour, + SSABold: s.bold, + SSABorderStyle: s.borderStyle, + SSAEncoding: s.encoding, + SSAFontName: s.fontName, + SSAFontSize: s.fontSize, + SSAItalic: s.italic, + SSAOutline: s.outline, + SSAOutlineColour: s.outlineColour, + SSAMarginLeft: s.marginLeft, + SSAMarginRight: s.marginRight, + SSAMarginVertical: s.marginVertical, + SSAPrimaryColour: s.primaryColour, + SSAScaleX: s.scaleX, + SSAScaleY: s.scaleY, + SSASecondaryColour: s.secondaryColour, + SSAShadow: s.shadow, + SSASpacing: s.spacing, + SSAStrikeout: s.strikeout, + SSAUnderline: s.underline, + }, + } + o.InlineStyle.propagateSSAAttributes() + return +} + +// ssaEvent represents an SSA event +type ssaEvent struct { + category string + effect string + end time.Duration + layer *int + marked *bool + marginLeft *int // pixels + marginRight *int // pixels + marginVertical *int // pixels + name string + start time.Duration + style string + text string +} + +// newSSAEventFromItem returns an SSA Event based on an input item +func newSSAEventFromItem(i Item) (e *ssaEvent) { + // Init + e = &ssaEvent{ + category: ssaEventCategoryDialogue, + end: i.EndAt, + start: i.StartAt, + } + + // Style + if i.Style != nil { + e.style = i.Style.ID + } + + // Inline style + if i.InlineStyle != nil { + e.effect = i.InlineStyle.SSAEffect + e.layer = i.InlineStyle.SSALayer + e.marginLeft = i.InlineStyle.SSAMarginLeft + e.marginRight = i.InlineStyle.SSAMarginRight + e.marginVertical = i.InlineStyle.SSAMarginVertical + e.marked = i.InlineStyle.SSAMarked + } + + // Text + var lines []string + for _, l := range i.Lines { + var items []string + for _, item := range l.Items { + var s string + if item.InlineStyle != nil && len(item.InlineStyle.SSAEffect) > 0 { + s += item.InlineStyle.SSAEffect + } + s += item.Text + items = append(items, s) + } + if len(l.VoiceName) > 0 { + e.name = l.VoiceName + } + lines = append(lines, strings.Join(items, " ")) + } + e.text = strings.Join(lines, "\\n") + return +} + +// newSSAEventFromString returns an SSA event based on an input string and a format +func newSSAEventFromString(header, content string, format map[int]string) (e *ssaEvent, err error) { + // Split content + var items = strings.Split(content, ",") + + // Not enough items + if len(items) < len(format) { + err = fmt.Errorf("astisub: content has %d items whereas style format has %d items", len(items), len(format)) + return + } + + // Last item may contain commas, therefore we need to fix it + items[len(format)-1] = strings.Join(items[len(format)-1:], ",") + items = items[:len(format)] + + // Loop through items + e = &ssaEvent{category: header} + for idx, item := range items { + // Index not found in format + var attr string + var ok bool + if attr, ok = format[idx]; !ok { + err = fmt.Errorf("astisub: index %d not found in event format %+v", idx, format) + return + } + + // Switch on attribute name + switch attr { + // Duration + case ssaEventFormatNameStart, ssaEventFormatNameEnd: + // Parse duration + var d time.Duration + if d, err = parseDurationSSA(item); err != nil { + err = fmt.Errorf("astisub: parsing ssa duration %s failed: %w", item, err) + return + } + + // Set duration + switch attr { + case ssaEventFormatNameEnd: + e.end = d + case ssaEventFormatNameStart: + e.start = d + } + // Int + case ssaEventFormatNameLayer, ssaEventFormatNameMarginL, ssaEventFormatNameMarginR, + ssaEventFormatNameMarginV: + // Parse int + var i int + if i, err = strconv.Atoi(item); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", item, err) + return + } + + // Set int + switch attr { + case ssaEventFormatNameLayer: + e.layer = astikit.IntPtr(i) + case ssaEventFormatNameMarginL: + e.marginLeft = astikit.IntPtr(i) + case ssaEventFormatNameMarginR: + e.marginRight = astikit.IntPtr(i) + case ssaEventFormatNameMarginV: + e.marginVertical = astikit.IntPtr(i) + } + // String + case ssaEventFormatNameEffect, ssaEventFormatNameName, ssaEventFormatNameStyle, ssaEventFormatNameText: + switch attr { + case ssaEventFormatNameEffect: + e.effect = item + case ssaEventFormatNameName: + e.name = item + case ssaEventFormatNameStyle: + // *Default is reserved + // http://www.tcax.org/docs/ass-specs.htm + if item == "*Default" { + e.style = "Default" + } else { + e.style = item + } + case ssaEventFormatNameText: + e.text = strings.TrimSpace(item) + } + // Marked + case ssaEventFormatNameMarked: + if item == "Marked=1" { + e.marked = astikit.BoolPtr(true) + } else { + e.marked = astikit.BoolPtr(false) + } + } + } + return +} + +// item converts an SSA event to an Item +func (e *ssaEvent) item(styles map[string]*Style) (i *Item, err error) { + // Init item + i = &Item{ + EndAt: e.end, + InlineStyle: &StyleAttributes{ + SSAEffect: e.effect, + SSALayer: e.layer, + SSAMarginLeft: e.marginLeft, + SSAMarginRight: e.marginRight, + SSAMarginVertical: e.marginVertical, + SSAMarked: e.marked, + }, + StartAt: e.start, + } + + // Set style + if len(e.style) > 0 { + var ok bool + if i.Style, ok = styles[e.style]; !ok { + err = fmt.Errorf("astisub: style %s not found", e.style) + return + } + } + + // Loop through lines + for _, s := range strings.Split(e.text, "\\n") { + // Init + s = strings.TrimSpace(s) + var l = Line{VoiceName: e.name} + + // Extract effects + var matches = ssaRegexpEffect.FindAllStringIndex(s, -1) + if len(matches) > 0 { + // Loop through matches + var lineItem *LineItem + var previousEffectEndOffset int + for _, idxs := range matches { + if lineItem != nil { + lineItem.Text = s[previousEffectEndOffset:idxs[0]] + l.Items = append(l.Items, *lineItem) + } else if idxs[0] > 0 { + l.Items = append(l.Items, LineItem{Text: s[previousEffectEndOffset:idxs[0]]}) + } + previousEffectEndOffset = idxs[1] + lineItem = &LineItem{InlineStyle: &StyleAttributes{SSAEffect: s[idxs[0]:idxs[1]]}} + } + lineItem.Text = s[previousEffectEndOffset:] + l.Items = append(l.Items, *lineItem) + } else { + l.Items = append(l.Items, LineItem{Text: s}) + } + + // Add line + i.Lines = append(i.Lines, l) + } + return +} + +// updateFormat updates the format based on the non empty fields +func (e ssaEvent) updateFormat(formatMap map[string]bool, format []string) []string { + if len(e.effect) > 0 { + format = ssaUpdateFormat(ssaEventFormatNameEffect, formatMap, format) + } + if e.layer != nil { + format = ssaUpdateFormat(ssaEventFormatNameLayer, formatMap, format) + } + if e.marginLeft != nil { + format = ssaUpdateFormat(ssaEventFormatNameMarginL, formatMap, format) + } + if e.marginRight != nil { + format = ssaUpdateFormat(ssaEventFormatNameMarginR, formatMap, format) + } + if e.marginVertical != nil { + format = ssaUpdateFormat(ssaEventFormatNameMarginV, formatMap, format) + } + if e.marked != nil { + format = ssaUpdateFormat(ssaEventFormatNameMarked, formatMap, format) + } + if len(e.name) > 0 { + format = ssaUpdateFormat(ssaEventFormatNameName, formatMap, format) + } + if len(e.style) > 0 { + format = ssaUpdateFormat(ssaEventFormatNameStyle, formatMap, format) + } + return format +} + +// formatDurationSSA formats an .ssa duration +func formatDurationSSA(i time.Duration) string { + return formatDuration(i, ".", 2) +} + +// string returns the block as a string +func (e *ssaEvent) string(format []string) string { + var ss []string + for _, attr := range format { + var v string + var found = true + switch attr { + // Duration + case ssaEventFormatNameEnd, ssaEventFormatNameStart: + switch attr { + case ssaEventFormatNameEnd: + v = formatDurationSSA(e.end) + case ssaEventFormatNameStart: + v = formatDurationSSA(e.start) + } + // Marked + case ssaEventFormatNameMarked: + if e.marked != nil { + if *e.marked { + v = "Marked=1" + } else { + v = "Marked=0" + } + } + // Int + case ssaEventFormatNameLayer, ssaEventFormatNameMarginL, ssaEventFormatNameMarginR, + ssaEventFormatNameMarginV: + var i *int + switch attr { + case ssaEventFormatNameLayer: + i = e.layer + case ssaEventFormatNameMarginL: + i = e.marginLeft + case ssaEventFormatNameMarginR: + i = e.marginRight + case ssaEventFormatNameMarginV: + i = e.marginVertical + } + if i != nil { + v = strconv.Itoa(*i) + } + // String + case ssaEventFormatNameEffect, ssaEventFormatNameName, ssaEventFormatNameStyle, ssaEventFormatNameText: + switch attr { + case ssaEventFormatNameEffect: + v = e.effect + case ssaEventFormatNameName: + v = e.name + case ssaEventFormatNameStyle: + v = e.style + case ssaEventFormatNameText: + v = e.text + } + default: + found = false + } + if found { + ss = append(ss, v) + } + } + return strings.Join(ss, ",") +} + +// parseDurationSSA parses an .ssa duration +func parseDurationSSA(i string) (time.Duration, error) { + return parseDuration(i, ".", 3) +} + +// WriteToSSA writes subtitles in .ssa format +func (s Subtitles) WriteToSSA(o io.Writer) (err error) { + // Do not write anything if no subtitles + if len(s.Items) == 0 { + err = ErrNoSubtitlesToWrite + return + } + + // Write Script Info block + var si = newSSAScriptInfo(s.Metadata) + if _, err = o.Write(si.bytes()); err != nil { + err = fmt.Errorf("astisub: writing script info block failed: %w", err) + return + } + + // Write Styles block + if len(s.Styles) > 0 { + // Header + var b = []byte("\n[V4 Styles]\n") + + // Format + var formatMap = make(map[string]bool) + var format = []string{ssaStyleFormatNameName} + var styles = make(map[string]*ssaStyle) + var styleNames []string + for _, s := range s.Styles { + var ss = newSSAStyleFromStyle(*s) + format = ss.updateFormat(formatMap, format) + styles[ss.name] = ss + styleNames = append(styleNames, ss.name) + } + b = append(b, []byte("Format: "+strings.Join(format, ", ")+"\n")...) + + // Styles + sort.Strings(styleNames) + for _, n := range styleNames { + b = append(b, []byte("Style: "+styles[n].string(format)+"\n")...) + } + + // Write + if _, err = o.Write(b); err != nil { + err = fmt.Errorf("astisub: writing styles block failed: %w", err) + return + } + } + + // Write Events block + if len(s.Items) > 0 { + // Header + var b = []byte("\n[Events]\n") + + // Format + var formatMap = make(map[string]bool) + var format = []string{ + ssaEventFormatNameStart, + ssaEventFormatNameEnd, + } + var events []*ssaEvent + for _, i := range s.Items { + var e = newSSAEventFromItem(*i) + format = e.updateFormat(formatMap, format) + events = append(events, e) + } + format = append(format, ssaEventFormatNameText) + b = append(b, []byte("Format: "+strings.Join(format, ", ")+"\n")...) + + // Styles + for _, e := range events { + b = append(b, []byte(ssaEventCategoryDialogue+": "+e.string(format)+"\n")...) + } + + // Write + if _, err = o.Write(b); err != nil { + err = fmt.Errorf("astisub: writing events block failed: %w", err) + return + } + } + return +} + +// SSAOptions +type SSAOptions struct { + OnUnknownSectionName func(name string) + OnInvalidLine func(line string) +} + +func defaultSSAOptions() SSAOptions { + return SSAOptions{ + OnUnknownSectionName: func(name string) { + log.Printf("astisub: unknown section: %s", name) + }, + OnInvalidLine: func(line string) { + log.Printf("astisub: not understood: '%s', ignoring", line) + }, + } +} diff --git a/vendor/github.com/asticode/go-astisub/stl.go b/vendor/github.com/asticode/go-astisub/stl.go new file mode 100644 index 000000000..81a5d5938 --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/stl.go @@ -0,0 +1,1085 @@ +package astisub + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "io" + "math" + "strconv" + "strings" + "time" + + "github.com/asticode/go-astikit" + "golang.org/x/text/unicode/norm" +) + +// https://tech.ebu.ch/docs/tech/tech3264.pdf +// https://github.com/yanncoupin/stl2srt/blob/master/to_srt.py + +// STL block sizes +const ( + stlBlockSizeGSI = 1024 + stlBlockSizeTTI = 128 +) + +// STL character code table number +const ( + stlCharacterCodeTableNumberLatin uint16 = 12336 + stlCharacterCodeTableNumberLatinCyrillic uint16 = 12337 + stlCharacterCodeTableNumberLatinArabic uint16 = 12338 + stlCharacterCodeTableNumberLatinGreek uint16 = 12339 + stlCharacterCodeTableNumberLatinHebrew uint16 = 12340 +) + +// STL character code tables +// TODO Add missing tables +var ( + stlCharacterCodeTables = map[uint16]*astikit.BiMap{ + stlCharacterCodeTableNumberLatin: astikit.NewBiMap(). + Set(0x20, " ").Set(0x21, "!").Set(0x22, "\"").Set(0x23, "#"). + Set(0x24, "¤").Set(0x25, "%").Set(0x26, "&").Set(0x27, "'"). + Set(0x28, "(").Set(0x29, ")").Set(0x2a, "*").Set(0x2b, "+"). + Set(0x2c, ",").Set(0x2d, "-").Set(0x2e, ".").Set(0x2f, "/"). + Set(0x30, "0").Set(0x31, "1").Set(0x32, "2").Set(0x33, "3"). + Set(0x34, "4").Set(0x35, "5").Set(0x36, "6").Set(0x37, "7"). + Set(0x38, "8").Set(0x39, "9").Set(0x3a, ":").Set(0x3b, ";"). + Set(0x3c, "<").Set(0x3d, "=").Set(0x3e, ">").Set(0x3f, "?"). + Set(0x40, "@").Set(0x41, "A").Set(0x42, "B").Set(0x43, "C"). + Set(0x44, "D").Set(0x45, "E").Set(0x46, "F").Set(0x47, "G"). + Set(0x48, "H").Set(0x49, "I").Set(0x4a, "J").Set(0x4b, "K"). + Set(0x4c, "L").Set(0x4d, "M").Set(0x4e, "N").Set(0x4f, "O"). + Set(0x50, "P").Set(0x51, "Q").Set(0x52, "R").Set(0x53, "S"). + Set(0x54, "T").Set(0x55, "U").Set(0x56, "V").Set(0x57, "W"). + Set(0x58, "X").Set(0x59, "Y").Set(0x5a, "Z").Set(0x5b, "["). + Set(0x5c, "\\").Set(0x5d, "]").Set(0x5e, "^").Set(0x5f, "_"). + Set(0x60, "`").Set(0x61, "a").Set(0x62, "b").Set(0x63, "c"). + Set(0x64, "d").Set(0x65, "e").Set(0x66, "f").Set(0x67, "g"). + Set(0x68, "h").Set(0x69, "i").Set(0x6a, "j").Set(0x6b, "k"). + Set(0x6c, "l").Set(0x6d, "m").Set(0x6e, "n").Set(0x6f, "o"). + Set(0x70, "p").Set(0x71, "q").Set(0x72, "r").Set(0x73, "s"). + Set(0x74, "t").Set(0x75, "u").Set(0x76, "v").Set(0x77, "w"). + Set(0x78, "x").Set(0x79, "y").Set(0x7a, "z").Set(0x7b, "{"). + Set(0x7c, "|").Set(0x7d, "}").Set(0x7e, "~"). + Set(0xa0, string([]byte{0xC2, 0xA0})).Set(0xa1, "¡").Set(0xa2, "¢"). + Set(0xa3, "£").Set(0xa4, "$").Set(0xa5, "¥").Set(0xa7, "§"). + Set(0xa9, "‘").Set(0xaa, "“").Set(0xab, "«").Set(0xac, "←"). + Set(0xad, "↑").Set(0xae, "→").Set(0xaf, "↓"). + Set(0xb0, "°").Set(0xb1, "±").Set(0xb2, "²").Set(0xb3, "³"). + Set(0xb4, "×").Set(0xb5, "µ").Set(0xb6, "¶").Set(0xb7, "·"). + Set(0xb8, "÷").Set(0xb9, "’").Set(0xba, "”").Set(0xbb, "»"). + Set(0xbc, "¼").Set(0xbd, "½").Set(0xbe, "¾").Set(0xbf, "¿"). + Set(0xc1, string([]byte{0xCC, 0x80})).Set(0xc2, string([]byte{0xCC, 0x81})). + Set(0xc3, string([]byte{0xCC, 0x82})).Set(0xc4, string([]byte{0xCC, 0x83})). + Set(0xc5, string([]byte{0xCC, 0x84})).Set(0xc6, string([]byte{0xCC, 0x86})). + Set(0xc7, string([]byte{0xCC, 0x87})).Set(0xc8, string([]byte{0xCC, 0x88})). + Set(0xca, string([]byte{0xCC, 0x8A})).Set(0xcb, string([]byte{0xCC, 0xA7})). + Set(0xcd, string([]byte{0xCC, 0x8B})).Set(0xce, string([]byte{0xCC, 0xA8})). + Set(0xcf, string([]byte{0xCC, 0x8C})). + Set(0xd0, "―").Set(0xd1, "¹").Set(0xd2, "®").Set(0xd3, "©"). + Set(0xd4, "™").Set(0xd5, "♪").Set(0xd6, "¬").Set(0xd7, "¦"). + Set(0xdc, "⅛").Set(0xdd, "⅜").Set(0xde, "⅝").Set(0xdf, "⅞"). + Set(0xe0, "Ω").Set(0xe1, "Æ").Set(0xe2, "Đ").Set(0xe3, "ª"). + Set(0xe4, "Ħ").Set(0xe6, "IJ").Set(0xe7, "Ŀ").Set(0xe8, "Ł"). + Set(0xe9, "Ø").Set(0xea, "Œ").Set(0xeb, "º").Set(0xec, "Þ"). + Set(0xed, "Ŧ").Set(0xee, "Ŋ").Set(0xef, "ʼn"). + Set(0xf0, "ĸ").Set(0xf1, "æ").Set(0xf2, "đ").Set(0xf3, "ð"). + Set(0xf4, "ħ").Set(0xf5, "ı").Set(0xf6, "ij").Set(0xf7, "ŀ"). + Set(0xf8, "ł").Set(0xf9, "ø").Set(0xfa, "œ").Set(0xfb, "ß"). + Set(0xfc, "þ").Set(0xfd, "ŧ").Set(0xfe, "ŋ").Set(0xff, string([]byte{0xC2, 0xAD})), + } +) + +// STL code page numbers +const ( + stlCodePageNumberCanadaFrench uint32 = 3683891 + stlCodePageNumberMultilingual uint32 = 3683632 + stlCodePageNumberNordic uint32 = 3683893 + stlCodePageNumberPortugal uint32 = 3683888 + stlCodePageNumberUnitedStates uint32 = 3420983 +) + +// STL comment flag +const ( + stlCommentFlagTextContainsSubtitleData = '\x00' + stlCommentFlagTextContainsCommentsNotIntendedForTransmission = '\x01' +) + +// STL country codes +const ( + stlCountryCodeChinese = "CHN" + stlCountryCodeFrance = "FRA" + stlCountryCodeJapan = "JPN" + stlCountryCodeNorway = "NOR" +) + +// STL cumulative status +const ( + stlCumulativeStatusFirstSubtitleOfACumulativeSet = '\x01' + stlCumulativeStatusIntermediateSubtitleOfACumulativeSet = '\x02' + stlCumulativeStatusLastSubtitleOfACumulativeSet = '\x03' + stlCumulativeStatusSubtitleNotPartOfACumulativeSet = '\x00' +) + +// STL display standard code +const ( + stlDisplayStandardCodeOpenSubtitling = "0" + stlDisplayStandardCodeLevel1Teletext = "1" + stlDisplayStandardCodeLevel2Teletext = "2" +) + +// STL framerate mapping +var stlFramerateMapping = astikit.NewBiMap(). + Set("STL25.01", 25). + Set("STL30.01", 30) + +// STL justification code +const ( + stlJustificationCodeCentredText = '\x02' + stlJustificationCodeLeftJustifiedText = '\x01' + stlJustificationCodeRightJustifiedText = '\x03' + stlJustificationCodeUnchangedPresentation = '\x00' +) + +// STL language codes +const ( + stlLanguageCodeChinese = "75" + stlLanguageCodeEnglish = "09" + stlLanguageCodeFrench = "0F" + stllanguageCodeJapanese = "69" + stlLanguageCodeNorwegian = "1E" +) + +// STL language mapping +var stlLanguageMapping = astikit.NewBiMap(). + Set(stlLanguageCodeChinese, LanguageChinese). + Set(stlLanguageCodeEnglish, LanguageEnglish). + Set(stlLanguageCodeFrench, LanguageFrench). + Set(stllanguageCodeJapanese, LanguageJapanese). + Set(stlLanguageCodeNorwegian, LanguageNorwegian) + + // STL timecode status +const ( + stlTimecodeStatusNotIntendedForUse = "0" + stlTimecodeStatusIntendedForUse = "1" +) + +// TTI Special Extension Block Number +const extensionBlockNumberReservedUserData = 0xfe + +const stlLineSeparator = 0x8a + +type STLPosition struct { + VerticalPosition int + MaxRows int + Rows int +} + +// STLOptions represents STL parsing options +type STLOptions struct { + // IgnoreTimecodeStartOfProgramme - set STLTimecodeStartOfProgramme to zero before parsing + IgnoreTimecodeStartOfProgramme bool +} + +// ReadFromSTL parses an .stl content +func ReadFromSTL(i io.Reader, opts STLOptions) (o *Subtitles, err error) { + // Init + o = NewSubtitles() + + // Read GSI block + var b []byte + if b, err = readNBytes(i, stlBlockSizeGSI); err != nil { + return + } + + // Parse GSI block + var g *gsiBlock + if g, err = parseGSIBlock(b); err != nil { + err = fmt.Errorf("astisub: building gsi block failed: %w", err) + return + } + + // Create character handler + var ch *stlCharacterHandler + if ch, err = newSTLCharacterHandler(g.characterCodeTableNumber); err != nil { + err = fmt.Errorf("astisub: creating stl character handler failed: %w", err) + return + } + + // Update metadata + // TODO Add more STL fields to metadata + o.Metadata = &Metadata{ + Framerate: g.framerate, + STLCountryOfOrigin: g.countryOfOrigin, + STLCreationDate: &g.creationDate, + STLDisplayStandardCode: g.displayStandardCode, + STLMaximumNumberOfDisplayableCharactersInAnyTextRow: astikit.IntPtr(g.maximumNumberOfDisplayableCharactersInAnyTextRow), + STLMaximumNumberOfDisplayableRows: astikit.IntPtr(g.maximumNumberOfDisplayableRows), + STLPublisher: g.publisher, + STLRevisionDate: &g.revisionDate, + STLSubtitleListReferenceCode: g.subtitleListReferenceCode, + Title: g.originalProgramTitle, + } + if !opts.IgnoreTimecodeStartOfProgramme { + o.Metadata.STLTimecodeStartOfProgramme = g.timecodeStartOfProgramme + } + if v, ok := stlLanguageMapping.Get(g.languageCode); ok { + o.Metadata.Language = v.(string) + } + + // Parse Text and Timing Information (TTI) blocks. + for { + // Read TTI block + if b, err = readNBytes(i, stlBlockSizeTTI); err != nil { + if err == io.EOF { + err = nil + break + } + return + } + + // Parse TTI block + var t = parseTTIBlock(b, g.framerate) + + // Do not process reserved user data + if t.extensionBlockNumber == extensionBlockNumberReservedUserData { + continue + } + + justification := parseSTLJustificationCode(t.justificationCode) + rows := bytes.Split(t.text, []byte{stlLineSeparator}) + + position := STLPosition{ + MaxRows: g.maximumNumberOfDisplayableRows, + Rows: len(rows), + VerticalPosition: t.verticalPosition, + } + + styleAttributes := StyleAttributes{ + STLJustification: &justification, + STLPosition: &position, + } + styleAttributes.propagateSTLAttributes() + + // Create item + var i = &Item{ + EndAt: t.timecodeOut - o.Metadata.STLTimecodeStartOfProgramme, + InlineStyle: &styleAttributes, + StartAt: t.timecodeIn - o.Metadata.STLTimecodeStartOfProgramme, + } + + // Loop through rows + for _, text := range bytes.Split(t.text, []byte{stlLineSeparator}) { + if g.displayStandardCode == stlDisplayStandardCodeOpenSubtitling { + err = parseOpenSubtitleRow(i, ch, func() styler { return newSTLStyler() }, text) + if err != nil { + return nil, err + } + } else { + parseTeletextRow(i, ch, func() styler { return newSTLStyler() }, text) + } + } + + // Append item + o.Items = append(o.Items, i) + + } + return +} + +// readNBytes reads n bytes +func readNBytes(i io.Reader, c int) (o []byte, err error) { + o = make([]byte, c) + var n int + if n, err = i.Read(o); err != nil || n != len(o) { + if err != nil { + if err == io.EOF { + return + } + err = fmt.Errorf("astisub: reading %d bytes failed: %w", c, err) + return + } + err = fmt.Errorf("astisub: read %d bytes, should have read %d", n, c) + return + } + return +} + +// gsiBlock represents a GSI block +type gsiBlock struct { + characterCodeTableNumber uint16 + codePageNumber uint32 + countryOfOrigin string + creationDate time.Time + diskSequenceNumber int + displayStandardCode string + editorContactDetails string + editorName string + framerate int + languageCode string + maximumNumberOfDisplayableCharactersInAnyTextRow int + maximumNumberOfDisplayableRows int + originalEpisodeTitle string + originalProgramTitle string + publisher string + revisionDate time.Time + revisionNumber int + subtitleListReferenceCode string + timecodeFirstInCue time.Duration + timecodeStartOfProgramme time.Duration + timecodeStatus string + totalNumberOfDisks int + totalNumberOfSubtitleGroups int + totalNumberOfSubtitles int + totalNumberOfTTIBlocks int + translatedEpisodeTitle string + translatedProgramTitle string + translatorContactDetails string + translatorName string + userDefinedArea string +} + +// newGSIBlock builds the subtitles GSI block +func newGSIBlock(s Subtitles) (g *gsiBlock) { + // Init + g = &gsiBlock{ + characterCodeTableNumber: stlCharacterCodeTableNumberLatin, + codePageNumber: stlCodePageNumberMultilingual, + countryOfOrigin: stlCountryCodeFrance, + creationDate: Now(), + diskSequenceNumber: 1, + displayStandardCode: stlDisplayStandardCodeLevel1Teletext, + framerate: 25, + languageCode: stlLanguageCodeFrench, + maximumNumberOfDisplayableCharactersInAnyTextRow: 40, + maximumNumberOfDisplayableRows: 23, + revisionDate: Now(), + subtitleListReferenceCode: "", + timecodeStatus: stlTimecodeStatusIntendedForUse, + timecodeStartOfProgramme: 0, + totalNumberOfDisks: 1, + totalNumberOfSubtitleGroups: 1, + totalNumberOfSubtitles: len(s.Items), + totalNumberOfTTIBlocks: len(s.Items), + } + + // Add metadata + if s.Metadata != nil { + if s.Metadata.STLCreationDate != nil { + g.creationDate = *s.Metadata.STLCreationDate + } + g.countryOfOrigin = s.Metadata.STLCountryOfOrigin + g.displayStandardCode = s.Metadata.STLDisplayStandardCode + g.framerate = s.Metadata.Framerate + if v, ok := stlLanguageMapping.GetInverse(s.Metadata.Language); ok { + g.languageCode = v.(string) + } + g.originalProgramTitle = s.Metadata.Title + if s.Metadata.STLMaximumNumberOfDisplayableCharactersInAnyTextRow != nil { + g.maximumNumberOfDisplayableCharactersInAnyTextRow = *s.Metadata.STLMaximumNumberOfDisplayableCharactersInAnyTextRow + } + if s.Metadata.STLMaximumNumberOfDisplayableRows != nil { + g.maximumNumberOfDisplayableRows = *s.Metadata.STLMaximumNumberOfDisplayableRows + } + g.publisher = s.Metadata.STLPublisher + if s.Metadata.STLRevisionDate != nil { + g.revisionDate = *s.Metadata.STLRevisionDate + } + g.subtitleListReferenceCode = s.Metadata.STLSubtitleListReferenceCode + g.timecodeStartOfProgramme = s.Metadata.STLTimecodeStartOfProgramme + } + + // Timecode first in cue + if len(s.Items) > 0 { + g.timecodeFirstInCue = s.Items[0].StartAt + } + return +} + +// parseGSIBlock parses a GSI block +func parseGSIBlock(b []byte) (g *gsiBlock, err error) { + // Init + g = &gsiBlock{ + characterCodeTableNumber: binary.BigEndian.Uint16(b[12:14]), + countryOfOrigin: string(bytes.TrimSpace(b[274:277])), + codePageNumber: binary.BigEndian.Uint32(append([]byte{0x0}, b[0:3]...)), + displayStandardCode: string(bytes.TrimSpace([]byte{b[11]})), + editorName: string(bytes.TrimSpace(b[309:341])), + editorContactDetails: string(bytes.TrimSpace(b[341:373])), + languageCode: string(bytes.TrimSpace(b[14:16])), + originalEpisodeTitle: string(bytes.TrimSpace(b[48:80])), + originalProgramTitle: string(bytes.TrimSpace(b[16:48])), + publisher: string(bytes.TrimSpace(b[277:309])), + subtitleListReferenceCode: string(bytes.TrimSpace(b[208:224])), + timecodeStatus: string(bytes.TrimSpace([]byte{b[255]})), + translatedEpisodeTitle: string(bytes.TrimSpace(b[80:112])), + translatedProgramTitle: string(bytes.TrimSpace(b[112:144])), + translatorContactDetails: string(bytes.TrimSpace(b[176:208])), + translatorName: string(bytes.TrimSpace(b[144:176])), + userDefinedArea: string(bytes.TrimSpace(b[448:])), + } + + // Framerate + if v, ok := stlFramerateMapping.Get(string(b[3:11])); ok { + g.framerate = v.(int) + } + + // Creation date + if v := strings.TrimSpace(string(b[224:230])); len(v) > 0 { + if g.creationDate, err = time.Parse("060102", v); err != nil { + err = fmt.Errorf("astisub: parsing date %s failed: %w", v, err) + return + } + } + + // Revision date + if v := strings.TrimSpace(string(b[230:236])); len(v) > 0 { + if g.revisionDate, err = time.Parse("060102", v); err != nil { + err = fmt.Errorf("astisub: parsing date %s failed: %w", v, err) + return + } + } + + // Revision number + if v := strings.TrimSpace(string(b[236:238])); len(v) > 0 { + if g.revisionNumber, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + + // Total number of TTI blocks + if v := strings.TrimSpace(string(b[238:243])); len(v) > 0 { + if g.totalNumberOfTTIBlocks, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + + // Total number of subtitles + if v := strings.TrimSpace(string(b[243:248])); len(v) > 0 { + if g.totalNumberOfSubtitles, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + + // Total number of subtitle groups + if v := strings.TrimSpace(string(b[248:251])); len(v) > 0 { + if g.totalNumberOfSubtitleGroups, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + + // Maximum number of displayable characters in any text row + if v := strings.TrimSpace(string(b[251:253])); len(v) > 0 { + if g.maximumNumberOfDisplayableCharactersInAnyTextRow, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + + // Maximum number of displayable rows + if v := strings.TrimSpace(string(b[253:255])); len(v) > 0 { + if g.maximumNumberOfDisplayableRows, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + + // Timecode start of programme + if v := strings.TrimSpace(string(b[256:264])); len(v) > 0 { + if g.timecodeStartOfProgramme, err = parseDurationSTL(v, g.framerate); err != nil { + err = fmt.Errorf("astisub: parsing of stl duration %s failed: %w", v, err) + return + } + } + + // Timecode first in cue + if v := strings.TrimSpace(string(b[264:272])); len(v) > 0 { + if g.timecodeFirstInCue, err = parseDurationSTL(v, g.framerate); err != nil { + err = fmt.Errorf("astisub: parsing of stl duration %s failed: %w", v, err) + return + } + } + + // Total number of disks + if v := strings.TrimSpace(string(b[272])); len(v) > 0 { + if g.totalNumberOfDisks, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + + // Disk sequence number + if v := strings.TrimSpace(string(b[273])); len(v) > 0 { + if g.diskSequenceNumber, err = strconv.Atoi(v); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", v, err) + return + } + } + return +} + +// bytes transforms the GSI block into []byte +func (b gsiBlock) bytes() (o []byte) { + bs := make([]byte, 4) + binary.BigEndian.PutUint32(bs, b.codePageNumber) + o = append(o, astikit.BytesPad(bs[1:], ' ', 3, astikit.PadRight, astikit.PadCut)...) // Code page number + // Disk format code + var f string + if v, ok := stlFramerateMapping.GetInverse(b.framerate); ok { + f = v.(string) + } + o = append(o, astikit.BytesPad([]byte(f), ' ', 8, astikit.PadRight, astikit.PadCut)...) + o = append(o, astikit.BytesPad([]byte(b.displayStandardCode), ' ', 1, astikit.PadRight, astikit.PadCut)...) // Display standard code + binary.BigEndian.PutUint16(bs, b.characterCodeTableNumber) + o = append(o, astikit.BytesPad(bs[:2], ' ', 2, astikit.PadRight, astikit.PadCut)...) // Character code table number + o = append(o, astikit.BytesPad([]byte(b.languageCode), ' ', 2, astikit.PadRight, astikit.PadCut)...) // Language code + o = append(o, astikit.BytesPad([]byte(b.originalProgramTitle), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Original program title + o = append(o, astikit.BytesPad([]byte(b.originalEpisodeTitle), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Original episode title + o = append(o, astikit.BytesPad([]byte(b.translatedProgramTitle), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Translated program title + o = append(o, astikit.BytesPad([]byte(b.translatedEpisodeTitle), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Translated episode title + o = append(o, astikit.BytesPad([]byte(b.translatorName), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Translator's name + o = append(o, astikit.BytesPad([]byte(b.translatorContactDetails), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Translator's contact details + o = append(o, astikit.BytesPad([]byte(b.subtitleListReferenceCode), ' ', 16, astikit.PadRight, astikit.PadCut)...) // Subtitle list reference code + o = append(o, astikit.BytesPad([]byte(b.creationDate.Format("060102")), ' ', 6, astikit.PadRight, astikit.PadCut)...) // Creation date + o = append(o, astikit.BytesPad([]byte(b.revisionDate.Format("060102")), ' ', 6, astikit.PadRight, astikit.PadCut)...) // Revision date + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.revisionNumber)), '0', 2, astikit.PadCut)...) // Revision number + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.totalNumberOfTTIBlocks)), '0', 5, astikit.PadCut)...) // Total number of TTI blocks + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.totalNumberOfSubtitles)), '0', 5, astikit.PadCut)...) // Total number of subtitles + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.totalNumberOfSubtitleGroups)), '0', 3, astikit.PadCut)...) // Total number of subtitle groups + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.maximumNumberOfDisplayableCharactersInAnyTextRow)), '0', 2, astikit.PadCut)...) // Maximum number of displayable characters in any text row + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.maximumNumberOfDisplayableRows)), '0', 2, astikit.PadCut)...) // Maximum number of displayable rows + o = append(o, astikit.BytesPad([]byte(b.timecodeStatus), ' ', 1, astikit.PadRight, astikit.PadCut)...) // Timecode status + o = append(o, astikit.BytesPad([]byte(formatDurationSTL(b.timecodeStartOfProgramme, b.framerate)), ' ', 8, astikit.PadRight, astikit.PadCut)...) // Timecode start of a programme + o = append(o, astikit.BytesPad([]byte(formatDurationSTL(b.timecodeFirstInCue, b.framerate)), ' ', 8, astikit.PadRight, astikit.PadCut)...) // Timecode first in cue + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.totalNumberOfDisks)), ' ', 1, astikit.PadRight, astikit.PadCut)...) // Total number of disks + o = append(o, astikit.BytesPad([]byte(strconv.Itoa(b.diskSequenceNumber)), ' ', 1, astikit.PadRight, astikit.PadCut)...) // Disk sequence number + o = append(o, astikit.BytesPad([]byte(b.countryOfOrigin), ' ', 3, astikit.PadRight, astikit.PadCut)...) // Country of origin + o = append(o, astikit.BytesPad([]byte(b.publisher), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Publisher + o = append(o, astikit.BytesPad([]byte(b.editorName), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Editor's name + o = append(o, astikit.BytesPad([]byte(b.editorContactDetails), ' ', 32, astikit.PadRight, astikit.PadCut)...) // Editor's contact details + o = append(o, astikit.BytesPad([]byte{}, ' ', 75+576, astikit.PadRight, astikit.PadCut)...) // Spare bytes + user defined area // // Editor's contact details + return +} + +// parseDurationSTL parses a STL duration +func parseDurationSTL(i string, framerate int) (d time.Duration, err error) { + // Parse hours + var hours, hoursString = 0, i[0:2] + if hours, err = strconv.Atoi(hoursString); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", hoursString, err) + return + } + + // Parse minutes + var minutes, minutesString = 0, i[2:4] + if minutes, err = strconv.Atoi(minutesString); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", minutesString, err) + return + } + + // Parse seconds + var seconds, secondsString = 0, i[4:6] + if seconds, err = strconv.Atoi(secondsString); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", secondsString, err) + return + } + + // Parse frames + var frames, framesString = 0, i[6:8] + if frames, err = strconv.Atoi(framesString); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", framesString, err) + return + } + + // Set duration + d = time.Duration(hours)*time.Hour + time.Duration(minutes)*time.Minute + time.Duration(seconds)*time.Second + time.Duration(1e9*frames/framerate)*time.Nanosecond + return +} + +// formatDurationSTL formats a STL duration +func formatDurationSTL(d time.Duration, framerate int) (o string) { + // Add hours + if d.Hours() < 10 { + o += "0" + } + var delta = int(math.Floor(d.Hours())) + o += strconv.Itoa(delta) + d -= time.Duration(delta) * time.Hour + + // Add minutes + if d.Minutes() < 10 { + o += "0" + } + delta = int(math.Floor(d.Minutes())) + o += strconv.Itoa(delta) + d -= time.Duration(delta) * time.Minute + + // Add seconds + if d.Seconds() < 10 { + o += "0" + } + delta = int(math.Floor(d.Seconds())) + o += strconv.Itoa(delta) + d -= time.Duration(delta) * time.Second + + // Add frames + var frames = int(int(d.Nanoseconds()) * framerate / 1e9) + if frames < 10 { + o += "0" + } + o += strconv.Itoa(frames) + return +} + +// ttiBlock represents a TTI block +type ttiBlock struct { + commentFlag byte + cumulativeStatus byte + extensionBlockNumber int + justificationCode byte + subtitleGroupNumber int + subtitleNumber int + text []byte + timecodeIn time.Duration + timecodeOut time.Duration + verticalPosition int +} + +// newTTIBlock builds an item TTI block +func newTTIBlock(i *Item, idx int) (t *ttiBlock) { + // Init + t = &ttiBlock{ + commentFlag: stlCommentFlagTextContainsSubtitleData, + cumulativeStatus: stlCumulativeStatusSubtitleNotPartOfACumulativeSet, + extensionBlockNumber: 255, + justificationCode: stlJustificationCodeLeftJustifiedText, + subtitleGroupNumber: 0, + subtitleNumber: idx, + timecodeIn: i.StartAt, + timecodeOut: i.EndAt, + verticalPosition: stlVerticalPositionFromStyle(i.InlineStyle), + } + + // Add text + var lines []string + for _, l := range i.Lines { + var lineItems []string + for _, li := range l.Items { + lineItems = append(lineItems, li.STLString()) + } + lines = append(lines, strings.Join(lineItems, " ")) + } + t.text = []byte(strings.Join(lines, string(rune(stlLineSeparator)))) + return +} + +func stlVerticalPositionFromStyle(sa *StyleAttributes) int { + if sa != nil && sa.STLPosition != nil { + return sa.STLPosition.VerticalPosition + } else { + return 20 + } +} + +func (li LineItem) STLString() string { + rs := li.Text + if li.InlineStyle != nil { + if li.InlineStyle.STLItalics != nil && *li.InlineStyle.STLItalics { + rs = string(rune(0x80)) + rs + string(rune(0x81)) + } + if li.InlineStyle.STLUnderline != nil && *li.InlineStyle.STLUnderline { + rs = string(rune(0x82)) + rs + string(rune(0x83)) + } + if li.InlineStyle.STLBoxing != nil && *li.InlineStyle.STLBoxing { + rs = string(rune(0x84)) + rs + string(rune(0x85)) + } + } + return rs +} + +// parseTTIBlock parses a TTI block +func parseTTIBlock(p []byte, framerate int) *ttiBlock { + return &ttiBlock{ + commentFlag: p[15], + cumulativeStatus: p[4], + extensionBlockNumber: int(uint8(p[3])), + justificationCode: p[14], + subtitleGroupNumber: int(uint8(p[0])), + subtitleNumber: int(binary.LittleEndian.Uint16(p[1:3])), + text: p[16:128], + timecodeIn: parseDurationSTLBytes(p[5:9], framerate), + timecodeOut: parseDurationSTLBytes(p[9:13], framerate), + verticalPosition: int(uint8(p[13])), + } +} + +// bytes transforms the TTI block into []byte +func (t *ttiBlock) bytes(g *gsiBlock) (o []byte) { + o = append(o, byte(uint8(t.subtitleGroupNumber))) // Subtitle group number + var b = make([]byte, 2) + binary.LittleEndian.PutUint16(b, uint16(t.subtitleNumber)) + o = append(o, b...) // Subtitle number + o = append(o, byte(uint8(t.extensionBlockNumber))) // Extension block number + o = append(o, t.cumulativeStatus) // Cumulative status + o = append(o, formatDurationSTLBytes(t.timecodeIn, g.framerate)...) // Timecode in + o = append(o, formatDurationSTLBytes(t.timecodeOut, g.framerate)...) // Timecode out + o = append(o, validateVerticalPosition(t.verticalPosition, g.displayStandardCode)) // Vertical position + o = append(o, t.justificationCode) // Justification code + o = append(o, t.commentFlag) // Comment flag + o = append(o, astikit.BytesPad(encodeTextSTL(string(t.text)), '\x8f', 112, astikit.PadRight, astikit.PadCut)...) // Text field + return +} + +// According to EBU 3264 (https://tech.ebu.ch/docs/tech/tech3264.pdf): +// page 12: +// for teletext subtitles, VP contains a value in the range 1-23 decimal (01h-17h) +// corresponding to theteletext row number of the first subtitle row. +// page 6: +// Teletext ("closed") subtitles are indicated via the Display Standard Code +// in the GSI block. +func validateVerticalPosition(vp int, dsc string) byte { + closed := false + switch dsc { + case stlDisplayStandardCodeLevel1Teletext, stlDisplayStandardCodeLevel2Teletext: + closed = true + } + if vp < 1 && closed { + vp = 1 + } + if vp > 23 && closed { + vp = 23 + } + return byte(uint8(vp)) +} + +// formatDurationSTLBytes formats a STL duration in bytes +func formatDurationSTLBytes(d time.Duration, framerate int) (o []byte) { + // Add hours + var hours = int(math.Floor(d.Hours())) + o = append(o, byte(uint8(hours))) + d -= time.Duration(hours) * time.Hour + + // Add minutes + var minutes = int(math.Floor(d.Minutes())) + o = append(o, byte(uint8(minutes))) + d -= time.Duration(minutes) * time.Minute + + // Add seconds + var seconds = int(math.Floor(d.Seconds())) + o = append(o, byte(uint8(seconds))) + d -= time.Duration(seconds) * time.Second + + // Add frames + var frames = int(int(d.Nanoseconds()) * framerate / 1e9) + o = append(o, byte(uint8(frames))) + return +} + +// parseDurationSTLBytes parses a STL duration in bytes +func parseDurationSTLBytes(b []byte, framerate int) time.Duration { + return time.Duration(uint8(b[0]))*time.Hour + time.Duration(uint8(b[1]))*time.Minute + time.Duration(uint8(b[2]))*time.Second + time.Duration(1e9*int(uint8(b[3]))/framerate)*time.Nanosecond +} + +type stlCharacterHandler struct { + accent string + c uint16 + m *astikit.BiMap +} + +func newSTLCharacterHandler(characterCodeTable uint16) (*stlCharacterHandler, error) { + if v, ok := stlCharacterCodeTables[characterCodeTable]; ok { + return &stlCharacterHandler{ + c: characterCodeTable, + m: v, + }, nil + } + return nil, fmt.Errorf("astisub: table doesn't exist for character code table %d", characterCodeTable) +} + +// TODO Use this instead of encodeTextSTL => use in teletext process like for decode +// TODO Test +func (h *stlCharacterHandler) encode(i []byte) byte { + return ' ' +} + +func (h *stlCharacterHandler) decode(i byte) (o []byte) { + k := int(i) + vi, ok := h.m.Get(k) + if !ok { + return + } + v := vi.(string) + if len(h.accent) > 0 { + o = norm.NFC.Bytes([]byte(v + h.accent)) + h.accent = "" + return + } else if h.c == stlCharacterCodeTableNumberLatin && k >= 0xc0 && k <= 0xcf { + h.accent = v + return + } + return []byte(v) +} + +type stlStyler struct { + boxing *bool + italics *bool + underline *bool +} + +func newSTLStyler() *stlStyler { + return &stlStyler{} +} + +func (s *stlStyler) parseSpacingAttribute(i byte) { + switch i { + case 0x80: + s.italics = astikit.BoolPtr(true) + case 0x81: + s.italics = astikit.BoolPtr(false) + case 0x82: + s.underline = astikit.BoolPtr(true) + case 0x83: + s.underline = astikit.BoolPtr(false) + case 0x84: + s.boxing = astikit.BoolPtr(true) + case 0x85: + s.boxing = astikit.BoolPtr(false) + } +} + +func (s *stlStyler) hasBeenSet() bool { + return s.italics != nil || s.boxing != nil || s.underline != nil +} + +func (s *stlStyler) hasChanged(sa *StyleAttributes) bool { + return s.boxing != sa.STLBoxing || s.italics != sa.STLItalics || s.underline != sa.STLUnderline +} + +func (s *stlStyler) propagateStyleAttributes(sa *StyleAttributes) { + sa.propagateSTLAttributes() +} + +func (s *stlStyler) update(sa *StyleAttributes) { + if s.boxing != nil && s.boxing != sa.STLBoxing { + sa.STLBoxing = s.boxing + } + if s.italics != nil && s.italics != sa.STLItalics { + sa.STLItalics = s.italics + } + if s.underline != nil && s.underline != sa.STLUnderline { + sa.STLUnderline = s.underline + } +} + +// WriteToSTL writes subtitles in .stl format +func (s Subtitles) WriteToSTL(o io.Writer) (err error) { + // Do not write anything if no subtitles + if len(s.Items) == 0 { + err = ErrNoSubtitlesToWrite + return + } + + // Write GSI block + var g = newGSIBlock(s) + if _, err = o.Write(g.bytes()); err != nil { + err = fmt.Errorf("astisub: writing gsi block failed: %w", err) + return + } + + // Loop through items + for idx, item := range s.Items { + // Write tti block + if _, err = o.Write(newTTIBlock(item, idx+1).bytes(g)); err != nil { + err = fmt.Errorf("astisub: writing tti block #%d failed: %w", idx+1, err) + return + } + } + return +} + +// TODO Remove below + +// STL unicode diacritic +var stlUnicodeDiacritic = astikit.NewBiMap(). + Set(byte('\xc1'), "\u0300"). // Grave accent + Set(byte('\xc2'), "\u0301"). // Acute accent + Set(byte('\xc3'), "\u0302"). // Circumflex + Set(byte('\xc4'), "\u0303"). // Tilde + Set(byte('\xc5'), "\u0304"). // Macron + Set(byte('\xc6'), "\u0306"). // Breve + Set(byte('\xc7'), "\u0307"). // Dot + Set(byte('\xc8'), "\u0308"). // Umlaut + Set(byte('\xca'), "\u030a"). // Ring + Set(byte('\xcb'), "\u0327"). // Cedilla + Set(byte('\xcd'), "\u030B"). // Double acute accent + Set(byte('\xce'), "\u0328"). // Ogonek + Set(byte('\xcf'), "\u030c") // Caron + +// STL unicode mapping +var stlUnicodeMapping = astikit.NewBiMap(). + Set(byte('\x8a'), "\u000a"). // Line break + Set(byte('\xa8'), "\u00a4"). // ¤ + Set(byte('\xa9'), "\u2018"). // ‘ + Set(byte('\xaa'), "\u201C"). // “ + Set(byte('\xab'), "\u00AB"). // « + Set(byte('\xac'), "\u2190"). // ← + Set(byte('\xad'), "\u2191"). // ↑ + Set(byte('\xae'), "\u2192"). // → + Set(byte('\xaf'), "\u2193"). // ↓ + Set(byte('\xb4'), "\u00D7"). // × + Set(byte('\xb8'), "\u00F7"). // ÷ + Set(byte('\xb9'), "\u2019"). // ’ + Set(byte('\xba'), "\u201D"). // ” + Set(byte('\xbc'), "\u00BC"). // ¼ + Set(byte('\xbd'), "\u00BD"). // ½ + Set(byte('\xbe'), "\u00BE"). // ¾ + Set(byte('\xbf'), "\u00BF"). // ¿ + Set(byte('\xd0'), "\u2015"). // ― + Set(byte('\xd1'), "\u00B9"). // ¹ + Set(byte('\xd2'), "\u00AE"). // ® + Set(byte('\xd3'), "\u00A9"). // © + Set(byte('\xd4'), "\u2122"). // ™ + Set(byte('\xd5'), "\u266A"). // ♪ + Set(byte('\xd6'), "\u00AC"). // ¬ + Set(byte('\xd7'), "\u00A6"). // ¦ + Set(byte('\xdc'), "\u215B"). // ⅛ + Set(byte('\xdd'), "\u215C"). // ⅜ + Set(byte('\xde'), "\u215D"). // ⅝ + Set(byte('\xdf'), "\u215E"). // ⅞ + Set(byte('\xe0'), "\u2126"). // Ohm Ω + Set(byte('\xe1'), "\u00C6"). // Æ + Set(byte('\xe2'), "\u0110"). // Đ + Set(byte('\xe3'), "\u00AA"). // ª + Set(byte('\xe4'), "\u0126"). // Ħ + Set(byte('\xe6'), "\u0132"). // IJ + Set(byte('\xe7'), "\u013F"). // Ŀ + Set(byte('\xe8'), "\u0141"). // Ł + Set(byte('\xe9'), "\u00D8"). // Ø + Set(byte('\xea'), "\u0152"). // Œ + Set(byte('\xeb'), "\u00BA"). // º + Set(byte('\xec'), "\u00DE"). // Þ + Set(byte('\xed'), "\u0166"). // Ŧ + Set(byte('\xee'), "\u014A"). // Ŋ + Set(byte('\xef'), "\u0149"). // ʼn + Set(byte('\xf0'), "\u0138"). // ĸ + Set(byte('\xf1'), "\u00E6"). // æ + Set(byte('\xf2'), "\u0111"). // đ + Set(byte('\xf3'), "\u00F0"). // ð + Set(byte('\xf4'), "\u0127"). // ħ + Set(byte('\xf5'), "\u0131"). // ı + Set(byte('\xf6'), "\u0133"). // ij + Set(byte('\xf7'), "\u0140"). // ŀ + Set(byte('\xf8'), "\u0142"). // ł + Set(byte('\xf9'), "\u00F8"). // ø + Set(byte('\xfa'), "\u0153"). // œ + Set(byte('\xfb'), "\u00DF"). // ß + Set(byte('\xfc'), "\u00FE"). // þ + Set(byte('\xfd'), "\u0167"). // ŧ + Set(byte('\xfe'), "\u014B"). // ŋ + Set(byte('\xff'), "\u00AD") // Soft hyphen + +// encodeTextSTL encodes the STL text +func encodeTextSTL(i string) (o []byte) { + i = string(norm.NFD.Bytes([]byte(i))) + for _, c := range i { + if v, ok := stlUnicodeMapping.GetInverse(string(c)); ok { + o = append(o, v.(byte)) + } else if v, ok := stlUnicodeDiacritic.GetInverse(string(c)); ok { + o = append(o[:len(o)-1], v.(byte), o[len(o)-1]) + } else { + o = append(o, byte(c)) + } + } + return +} + +func parseSTLJustificationCode(i byte) Justification { + switch i { + case 0x00: + return JustificationUnchanged + case 0x01: + return JustificationLeft + case 0x02: + return JustificationCentered + case 0x03: + return JustificationRight + default: + return JustificationUnchanged + } +} + +func isTeletextControlCode(i byte) (b bool) { + return i <= 0x1f +} + +func parseOpenSubtitleRow(i *Item, d decoder, fs func() styler, row []byte) error { + // Loop through columns + var l = Line{} + var li = LineItem{InlineStyle: &StyleAttributes{}} + var s styler + for _, v := range row { + // Create specific styler + if fs != nil { + s = fs() + } + + if isTeletextControlCode(v) { + return errors.New("teletext control code in open text") + } + if s != nil { + s.parseSpacingAttribute(v) + } + + // Style has been set + if s != nil && s.hasBeenSet() { + // Style has changed + if s.hasChanged(li.InlineStyle) { + if len(li.Text) > 0 { + // Append line item + appendOpenSubtitleLineItem(&l, li, s) + + // Create new line item + sa := &StyleAttributes{} + *sa = *li.InlineStyle + li = LineItem{InlineStyle: sa} + } + s.update(li.InlineStyle) + } + } else { + // Append text + li.Text += string(d.decode(v)) + } + } + + appendOpenSubtitleLineItem(&l, li, s) + + // Append line + if len(l.Items) > 0 { + i.Lines = append(i.Lines, l) + } + return nil +} + +func appendOpenSubtitleLineItem(l *Line, li LineItem, s styler) { + // There's some text + if len(strings.TrimSpace(li.Text)) > 0 { + // Make sure inline style exists + if li.InlineStyle == nil { + li.InlineStyle = &StyleAttributes{} + } + + // Propagate style attributes + if s != nil { + s.propagateStyleAttributes(li.InlineStyle) + } + + // Append line item + li.Text = strings.TrimSpace(li.Text) + l.Items = append(l.Items, li) + } +} diff --git a/vendor/github.com/asticode/go-astisub/subtitles.go b/vendor/github.com/asticode/go-astisub/subtitles.go new file mode 100644 index 000000000..e6617998e --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/subtitles.go @@ -0,0 +1,779 @@ +package astisub + +import ( + "errors" + "fmt" + "math" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + "github.com/asticode/go-astikit" +) + +// Bytes +var ( + BytesBOM = []byte{239, 187, 191} + bytesLineSeparator = []byte("\n") + bytesSpace = []byte(" ") +) + +// Colors +var ( + ColorBlack = &Color{} + ColorBlue = &Color{Blue: 255} + ColorCyan = &Color{Blue: 255, Green: 255} + ColorGray = &Color{Blue: 128, Green: 128, Red: 128} + ColorGreen = &Color{Green: 128} + ColorLime = &Color{Green: 255} + ColorMagenta = &Color{Blue: 255, Red: 255} + ColorMaroon = &Color{Red: 128} + ColorNavy = &Color{Blue: 128} + ColorOlive = &Color{Green: 128, Red: 128} + ColorPurple = &Color{Blue: 128, Red: 128} + ColorRed = &Color{Red: 255} + ColorSilver = &Color{Blue: 192, Green: 192, Red: 192} + ColorTeal = &Color{Blue: 128, Green: 128} + ColorYellow = &Color{Green: 255, Red: 255} + ColorWhite = &Color{Blue: 255, Green: 255, Red: 255} +) + +// Errors +var ( + ErrInvalidExtension = errors.New("astisub: invalid extension") + ErrNoSubtitlesToWrite = errors.New("astisub: no subtitles to write") +) + +// Now allows testing functions using it +var Now = func() time.Time { + return time.Now() +} + +// Options represents open or write options +type Options struct { + Filename string + Teletext TeletextOptions + STL STLOptions +} + +// Open opens a subtitle reader based on options +func Open(o Options) (s *Subtitles, err error) { + // Open the file + var f *os.File + if f, err = os.Open(o.Filename); err != nil { + err = fmt.Errorf("astisub: opening %s failed: %w", o.Filename, err) + return + } + defer f.Close() + + // Parse the content + switch filepath.Ext(strings.ToLower(o.Filename)) { + case ".srt": + s, err = ReadFromSRT(f) + case ".ssa", ".ass": + s, err = ReadFromSSA(f) + case ".stl": + s, err = ReadFromSTL(f, o.STL) + case ".ts": + s, err = ReadFromTeletext(f, o.Teletext) + case ".ttml": + s, err = ReadFromTTML(f) + case ".vtt": + s, err = ReadFromWebVTT(f) + default: + err = ErrInvalidExtension + } + return +} + +// OpenFile opens a file regardless of other options +func OpenFile(filename string) (*Subtitles, error) { + return Open(Options{Filename: filename}) +} + +// Subtitles represents an ordered list of items with formatting +type Subtitles struct { + Items []*Item + Metadata *Metadata + Regions map[string]*Region + Styles map[string]*Style +} + +// NewSubtitles creates new subtitles +func NewSubtitles() *Subtitles { + return &Subtitles{ + Regions: make(map[string]*Region), + Styles: make(map[string]*Style), + } +} + +// Item represents a text to show between 2 time boundaries with formatting +type Item struct { + Comments []string + Index int + EndAt time.Duration + InlineStyle *StyleAttributes + Lines []Line + Region *Region + StartAt time.Duration + Style *Style +} + +// String implements the Stringer interface +func (i Item) String() string { + var os []string + for _, l := range i.Lines { + os = append(os, l.String()) + } + return strings.Join(os, " - ") +} + +// Color represents a color +type Color struct { + Alpha, Blue, Green, Red uint8 +} + +// newColorFromSSAString builds a new color based on an SSA string +func newColorFromSSAString(s string, base int) (c *Color, err error) { + var i int64 + if i, err = strconv.ParseInt(s, base, 64); err != nil { + err = fmt.Errorf("parsing int %s with base %d failed: %w", s, base, err) + return + } + c = &Color{ + Alpha: uint8(i>>24) & 0xff, + Blue: uint8(i>>16) & 0xff, + Green: uint8(i>>8) & 0xff, + Red: uint8(i) & 0xff, + } + return +} + +// SSAString expresses the color as an SSA string +func (c *Color) SSAString() string { + return fmt.Sprintf("%.8x", uint32(c.Alpha)<<24|uint32(c.Blue)<<16|uint32(c.Green)<<8|uint32(c.Red)) +} + +// TTMLString expresses the color as a TTML string +func (c *Color) TTMLString() string { + return fmt.Sprintf("%.6x", uint32(c.Red)<<16|uint32(c.Green)<<8|uint32(c.Blue)) +} + +type Justification int + +var ( + JustificationUnchanged = Justification(1) + JustificationLeft = Justification(2) + JustificationCentered = Justification(3) + JustificationRight = Justification(4) +) + +// StyleAttributes represents style attributes +type StyleAttributes struct { + SSAAlignment *int + SSAAlphaLevel *float64 + SSAAngle *float64 // degrees + SSABackColour *Color + SSABold *bool + SSABorderStyle *int + SSAEffect string + SSAEncoding *int + SSAFontName string + SSAFontSize *float64 + SSAItalic *bool + SSALayer *int + SSAMarginLeft *int // pixels + SSAMarginRight *int // pixels + SSAMarginVertical *int // pixels + SSAMarked *bool + SSAOutline *float64 // pixels + SSAOutlineColour *Color + SSAPrimaryColour *Color + SSAScaleX *float64 // % + SSAScaleY *float64 // % + SSASecondaryColour *Color + SSAShadow *float64 // pixels + SSASpacing *float64 // pixels + SSAStrikeout *bool + SSAUnderline *bool + STLBoxing *bool + STLItalics *bool + STLJustification *Justification + STLPosition *STLPosition + STLUnderline *bool + TeletextColor *Color + TeletextDoubleHeight *bool + TeletextDoubleSize *bool + TeletextDoubleWidth *bool + TeletextSpacesAfter *int + TeletextSpacesBefore *int + // TODO Use pointers with real types below + TTMLBackgroundColor *string // https://htmlcolorcodes.com/fr/ + TTMLColor *string + TTMLDirection *string + TTMLDisplay *string + TTMLDisplayAlign *string + TTMLExtent *string + TTMLFontFamily *string + TTMLFontSize *string + TTMLFontStyle *string + TTMLFontWeight *string + TTMLLineHeight *string + TTMLOpacity *string + TTMLOrigin *string + TTMLOverflow *string + TTMLPadding *string + TTMLShowBackground *string + TTMLTextAlign *string + TTMLTextDecoration *string + TTMLTextOutline *string + TTMLUnicodeBidi *string + TTMLVisibility *string + TTMLWrapOption *string + TTMLWritingMode *string + TTMLZIndex *int + WebVTTAlign string + WebVTTItalics bool + WebVTTLine string + WebVTTLines int + WebVTTPosition string + WebVTTRegionAnchor string + WebVTTScroll string + WebVTTSize string + WebVTTVertical string + WebVTTViewportAnchor string + WebVTTWidth string +} + +func (sa *StyleAttributes) propagateSSAAttributes() {} + +func (sa *StyleAttributes) propagateSTLAttributes() { + if sa.STLJustification != nil { + switch *sa.STLJustification { + case JustificationCentered: + // default to middle anyway? + case JustificationRight: + sa.WebVTTAlign = "right" + case JustificationLeft: + sa.WebVTTAlign = "left" + } + } +} + +func (sa *StyleAttributes) propagateTeletextAttributes() { + if sa.TeletextColor != nil { + sa.TTMLColor = astikit.StrPtr("#" + sa.TeletextColor.TTMLString()) + } +} + +//reference for migration: https://w3c.github.io/ttml-webvtt-mapping/ +func (sa *StyleAttributes) propagateTTMLAttributes() { + if sa.TTMLTextAlign != nil { + sa.WebVTTAlign = *sa.TTMLTextAlign + } + if sa.TTMLExtent != nil { + //region settings + lineHeight := 5 //assuming height of line as 5.33vh + dimensions := strings.Split(*sa.TTMLExtent, " ") + if len(dimensions) > 1 { + sa.WebVTTWidth = dimensions[0] + if height, err := strconv.Atoi(strings.ReplaceAll(dimensions[1], "%", "")); err == nil { + sa.WebVTTLines = height / lineHeight + } + //cue settings + //default TTML WritingMode is lrtb i.e. left to right, top to bottom + sa.WebVTTSize = dimensions[1] + if sa.TTMLWritingMode != nil && strings.HasPrefix(*sa.TTMLWritingMode, "tb") { + sa.WebVTTSize = dimensions[0] + } + } + } + if sa.TTMLOrigin != nil { + //region settings + sa.WebVTTRegionAnchor = "0%,0%" + sa.WebVTTViewportAnchor = strings.ReplaceAll(strings.TrimSpace(*sa.TTMLOrigin), " ", ",") + sa.WebVTTScroll = "up" + //cue settings + coordinates := strings.Split(*sa.TTMLOrigin, " ") + if len(coordinates) > 1 { + sa.WebVTTLine = coordinates[0] + sa.WebVTTPosition = coordinates[1] + if sa.TTMLWritingMode != nil && strings.HasPrefix(*sa.TTMLWritingMode, "tb") { + sa.WebVTTLine = coordinates[1] + sa.WebVTTPosition = coordinates[0] + } + } + } +} + +func (sa *StyleAttributes) propagateWebVTTAttributes() {} + +// Metadata represents metadata +// TODO Merge attributes +type Metadata struct { + Comments []string + Framerate int + Language string + SSACollisions string + SSAOriginalEditing string + SSAOriginalScript string + SSAOriginalTiming string + SSAOriginalTranslation string + SSAPlayDepth *int + SSAPlayResX, SSAPlayResY *int + SSAScriptType string + SSAScriptUpdatedBy string + SSASynchPoint string + SSATimer *float64 + SSAUpdateDetails string + SSAWrapStyle string + STLCountryOfOrigin string + STLCreationDate *time.Time + STLDisplayStandardCode string + STLMaximumNumberOfDisplayableCharactersInAnyTextRow *int + STLMaximumNumberOfDisplayableRows *int + STLPublisher string + STLRevisionDate *time.Time + STLSubtitleListReferenceCode string + STLTimecodeStartOfProgramme time.Duration + Title string + TTMLCopyright string +} + +// Region represents a subtitle's region +type Region struct { + ID string + InlineStyle *StyleAttributes + Style *Style +} + +// Style represents a subtitle's style +type Style struct { + ID string + InlineStyle *StyleAttributes + Style *Style +} + +// Line represents a set of formatted line items +type Line struct { + Items []LineItem + VoiceName string +} + +// String implement the Stringer interface +func (l Line) String() string { + var texts []string + for _, i := range l.Items { + texts = append(texts, i.Text) + } + return strings.Join(texts, " ") +} + +// LineItem represents a formatted line item +type LineItem struct { + InlineStyle *StyleAttributes + Style *Style + Text string +} + +// Add adds a duration to each time boundaries. As in the time package, duration can be negative. +func (s *Subtitles) Add(d time.Duration) { + for idx := 0; idx < len(s.Items); idx++ { + s.Items[idx].EndAt += d + s.Items[idx].StartAt += d + if s.Items[idx].EndAt <= 0 && s.Items[idx].StartAt <= 0 { + s.Items = append(s.Items[:idx], s.Items[idx+1:]...) + idx-- + } else if s.Items[idx].StartAt <= 0 { + s.Items[idx].StartAt = time.Duration(0) + } + } +} + +// Duration returns the subtitles duration +func (s Subtitles) Duration() time.Duration { + if len(s.Items) == 0 { + return time.Duration(0) + } + return s.Items[len(s.Items)-1].EndAt +} + +// ForceDuration updates the subtitles duration. +// If requested duration is bigger, then we create a dummy item. +// If requested duration is smaller, then we remove useless items and we cut the last item or add a dummy item. +func (s *Subtitles) ForceDuration(d time.Duration, addDummyItem bool) { + // Requested duration is the same as the subtitles'one + if s.Duration() == d { + return + } + + // Requested duration is bigger than subtitles'one + if s.Duration() > d { + // Find last item before input duration and update end at + var lastIndex = -1 + for index, i := range s.Items { + // Start at is bigger than input duration, we've found the last item + if i.StartAt >= d { + lastIndex = index + break + } else if i.EndAt > d { + s.Items[index].EndAt = d + } + } + + // Last index has been found + if lastIndex != -1 { + s.Items = s.Items[:lastIndex] + } + } + + // Add dummy item with the minimum duration possible + if addDummyItem && s.Duration() < d { + s.Items = append(s.Items, &Item{EndAt: d, Lines: []Line{{Items: []LineItem{{Text: "..."}}}}, StartAt: d - time.Millisecond}) + } +} + +// Fragment fragments subtitles with a specific fragment duration +func (s *Subtitles) Fragment(f time.Duration) { + // Nothing to fragment + if len(s.Items) == 0 { + return + } + + // Here we want to simulate fragments of duration f until there are no subtitles left in that period of time + var fragmentStartAt, fragmentEndAt = time.Duration(0), f + for fragmentStartAt < s.Items[len(s.Items)-1].EndAt { + // We loop through subtitles and process the ones that either contain the fragment start at, + // or contain the fragment end at + // + // It's useless processing subtitles contained between fragment start at and end at + // |____________________| <- subtitle + // | | + // fragment start at fragment end at + for i, sub := range s.Items { + // Init + var newSub = &Item{} + *newSub = *sub + + // A switch is more readable here + switch { + // Subtitle contains fragment start at + // |____________________| <- subtitle + // | | + // fragment start at fragment end at + case sub.StartAt < fragmentStartAt && sub.EndAt > fragmentStartAt: + sub.StartAt = fragmentStartAt + newSub.EndAt = fragmentStartAt + // Subtitle contains fragment end at + // |____________________| <- subtitle + // | | + // fragment start at fragment end at + case sub.StartAt < fragmentEndAt && sub.EndAt > fragmentEndAt: + sub.StartAt = fragmentEndAt + newSub.EndAt = fragmentEndAt + default: + continue + } + + // Insert new sub + s.Items = append(s.Items[:i], append([]*Item{newSub}, s.Items[i:]...)...) + } + + // Update fragments boundaries + fragmentStartAt += f + fragmentEndAt += f + } + + // Order + s.Order() +} + +// IsEmpty returns whether the subtitles are empty +func (s Subtitles) IsEmpty() bool { + return len(s.Items) == 0 +} + +// Merge merges subtitles i into subtitles +func (s *Subtitles) Merge(i *Subtitles) { + // Append items + s.Items = append(s.Items, i.Items...) + s.Order() + + // Add regions + for _, region := range i.Regions { + if _, ok := s.Regions[region.ID]; !ok { + s.Regions[region.ID] = region + } + } + + // Add styles + for _, style := range i.Styles { + if _, ok := s.Styles[style.ID]; !ok { + s.Styles[style.ID] = style + } + } +} + +// Optimize optimizes subtitles +func (s *Subtitles) Optimize() { + // Nothing to optimize + if len(s.Items) == 0 { + return + } + + // Remove unused regions and style + s.removeUnusedRegionsAndStyles() +} + +// removeUnusedRegionsAndStyles removes unused regions and styles +func (s *Subtitles) removeUnusedRegionsAndStyles() { + // Loop through items + var usedRegions, usedStyles = make(map[string]bool), make(map[string]bool) + for _, item := range s.Items { + // Add region + if item.Region != nil { + usedRegions[item.Region.ID] = true + } + + // Add style + if item.Style != nil { + usedStyles[item.Style.ID] = true + } + + // Loop through lines + for _, line := range item.Lines { + // Loop through line items + for _, lineItem := range line.Items { + // Add style + if lineItem.Style != nil { + usedStyles[lineItem.Style.ID] = true + } + } + } + } + + // Loop through regions + for id, region := range s.Regions { + if _, ok := usedRegions[region.ID]; ok { + if region.Style != nil { + usedStyles[region.Style.ID] = true + } + } else { + delete(s.Regions, id) + } + } + + // Loop through style + for id, style := range s.Styles { + if _, ok := usedStyles[style.ID]; !ok { + delete(s.Styles, id) + } + } +} + +// Order orders items +func (s *Subtitles) Order() { + // Nothing to do if less than 1 element + if len(s.Items) <= 1 { + return + } + + // Order + var swapped = true + for swapped { + swapped = false + for index := 1; index < len(s.Items); index++ { + if s.Items[index-1].StartAt > s.Items[index].StartAt { + var tmp = s.Items[index-1] + s.Items[index-1] = s.Items[index] + s.Items[index] = tmp + swapped = true + } + } + } +} + +// RemoveStyling removes the styling from the subtitles +func (s *Subtitles) RemoveStyling() { + s.Regions = map[string]*Region{} + s.Styles = map[string]*Style{} + for _, i := range s.Items { + i.Region = nil + i.Style = nil + i.InlineStyle = nil + for idxLine, l := range i.Lines { + for idxLineItem := range l.Items { + i.Lines[idxLine].Items[idxLineItem].InlineStyle = nil + i.Lines[idxLine].Items[idxLineItem].Style = nil + } + } + } +} + +// Unfragment unfragments subtitles +func (s *Subtitles) Unfragment() { + // Nothing to do if less than 1 element + if len(s.Items) <= 1 { + return + } + + // Order + s.Order() + + // Loop through items + for i := 0; i < len(s.Items)-1; i++ { + for j := i + 1; j < len(s.Items); j++ { + // Items are the same + if s.Items[i].String() == s.Items[j].String() && s.Items[i].EndAt >= s.Items[j].StartAt { + // Only override end time if longer + if s.Items[i].EndAt < s.Items[j].EndAt { + s.Items[i].EndAt = s.Items[j].EndAt + } + s.Items = append(s.Items[:j], s.Items[j+1:]...) + j-- + } else if s.Items[i].EndAt < s.Items[j].StartAt { + break + } + } + } +} + +// Write writes subtitles to a file +func (s Subtitles) Write(dst string) (err error) { + // Create the file + var f *os.File + if f, err = os.Create(dst); err != nil { + err = fmt.Errorf("astisub: creating %s failed: %w", dst, err) + return + } + defer f.Close() + + // Write the content + switch filepath.Ext(strings.ToLower(dst)) { + case ".srt": + err = s.WriteToSRT(f) + case ".ssa", ".ass": + err = s.WriteToSSA(f) + case ".stl": + err = s.WriteToSTL(f) + case ".ttml": + err = s.WriteToTTML(f) + case ".vtt": + err = s.WriteToWebVTT(f) + default: + err = ErrInvalidExtension + } + return +} + +// parseDuration parses a duration in "00:00:00.000", "00:00:00,000" or "0:00:00:00" format +func parseDuration(i, millisecondSep string, numberOfMillisecondDigits int) (o time.Duration, err error) { + // Split milliseconds + var parts = strings.Split(i, millisecondSep) + var milliseconds int + var s string + if len(parts) >= 2 { + // Invalid number of millisecond digits + s = strings.TrimSpace(parts[len(parts)-1]) + if len(s) > 3 { + err = fmt.Errorf("astisub: Invalid number of millisecond digits detected in %s", i) + return + } + + // Parse milliseconds + if milliseconds, err = strconv.Atoi(s); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err) + return + } + milliseconds *= int(math.Pow10(numberOfMillisecondDigits - len(s))) + s = strings.Join(parts[:len(parts)-1], millisecondSep) + } else { + s = i + } + + // Split hours, minutes and seconds + parts = strings.Split(strings.TrimSpace(s), ":") + var partSeconds, partMinutes, partHours string + if len(parts) == 2 { + partSeconds = parts[1] + partMinutes = parts[0] + } else if len(parts) == 3 { + partSeconds = parts[2] + partMinutes = parts[1] + partHours = parts[0] + } else { + err = fmt.Errorf("astisub: No hours, minutes or seconds detected in %s", i) + return + } + + // Parse seconds + var seconds int + s = strings.TrimSpace(partSeconds) + if seconds, err = strconv.Atoi(s); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err) + return + } + + // Parse minutes + var minutes int + s = strings.TrimSpace(partMinutes) + if minutes, err = strconv.Atoi(s); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err) + return + } + + // Parse hours + var hours int + if len(partHours) > 0 { + s = strings.TrimSpace(partHours) + if hours, err = strconv.Atoi(s); err != nil { + err = fmt.Errorf("astisub: atoi of %s failed: %w", s, err) + return + } + } + + // Generate output + o = time.Duration(milliseconds)*time.Millisecond + time.Duration(seconds)*time.Second + time.Duration(minutes)*time.Minute + time.Duration(hours)*time.Hour + return +} + +// formatDuration formats a duration +func formatDuration(i time.Duration, millisecondSep string, numberOfMillisecondDigits int) (s string) { + // Parse hours + var hours = int(i / time.Hour) + var n = i % time.Hour + if hours < 10 { + s += "0" + } + s += strconv.Itoa(hours) + ":" + + // Parse minutes + var minutes = int(n / time.Minute) + n = i % time.Minute + if minutes < 10 { + s += "0" + } + s += strconv.Itoa(minutes) + ":" + + // Parse seconds + var seconds = int(n / time.Second) + n = i % time.Second + if seconds < 10 { + s += "0" + } + s += strconv.Itoa(seconds) + millisecondSep + + // Parse milliseconds + var milliseconds = float64(n/time.Millisecond) / float64(1000) + s += fmt.Sprintf("%."+strconv.Itoa(numberOfMillisecondDigits)+"f", milliseconds)[2:] + return +} + +// appendStringToBytesWithNewLine adds a string to bytes then adds a new line +func appendStringToBytesWithNewLine(i []byte, s string) (o []byte) { + o = append(i, []byte(s)...) + o = append(o, bytesLineSeparator...) + return +} diff --git a/vendor/github.com/asticode/go-astisub/teletext.go b/vendor/github.com/asticode/go-astisub/teletext.go new file mode 100644 index 000000000..3223f778c --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/teletext.go @@ -0,0 +1,997 @@ +package astisub + +import ( + "context" + "errors" + "fmt" + "io" + "log" + "math/bits" + "sort" + "strings" + "time" + + "github.com/asticode/go-astikit" + "github.com/asticode/go-astits" +) + +// Errors +var ( + ErrNoValidTeletextPID = errors.New("astisub: no valid teletext PID") +) + +type teletextCharset [96][]byte + +type teletextNationalSubset [13][]byte + +// Chapter: 15.2 | Page: 109 | Link: http://www.etsi.org/deliver/etsi_i_ets/300700_300799/300706/01_60/ets_300706e01p.pdf +// It is indexed by triplet1 then by national option subset code +var teletextCharsets = map[uint8]map[uint8]struct { + g0 *teletextCharset + g2 *teletextCharset + national *teletextNationalSubset +}{ + 0: { + 0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetEnglish}, + 1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetFrench}, + 2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSwedishFinnishHungarian}, + 3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak}, + 4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman}, + 5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetPortugueseSpanish}, + 6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetItalian}, + 7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + }, + 1: { + 0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetPolish}, + 1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetFrench}, + 2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSwedishFinnishHungarian}, + 3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak}, + 4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman}, + 5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + 6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetItalian}, + 7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + }, + 2: { + 0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetEnglish}, + 1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetFrench}, + 2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSwedishFinnishHungarian}, + 3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak}, + 4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman}, + 5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetPortugueseSpanish}, + 6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetItalian}, + 7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + }, + 3: { + 0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + 1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + 2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + 3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + 4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + 5: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetSerbianCroatianSlovenian}, + 6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin}, + 7: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetRomanian}, + }, + 4: { + 0: {g0: teletextCharsetG0CyrillicOption1, g2: teletextCharsetG2Cyrillic}, + 1: {g0: teletextCharsetG0CyrillicOption2, g2: teletextCharsetG2Cyrillic}, + 2: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetEstonian}, + 3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetCzechSlovak}, + 4: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetGerman}, + 5: {g0: teletextCharsetG0CyrillicOption3, g2: teletextCharsetG2Cyrillic}, + 6: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetLettishLithuanian}, + }, + 6: { + 3: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Latin, national: teletextNationalSubsetTurkish}, + 7: {g0: teletextCharsetG0Greek, g2: teletextCharsetG2Greek}, + }, + 8: { + 0: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Arabic, national: teletextNationalSubsetEnglish}, + 1: {g0: teletextCharsetG0Latin, g2: teletextCharsetG2Arabic, national: teletextNationalSubsetFrench}, + 7: {g0: teletextCharsetG0Arabic, g2: teletextCharsetG2Arabic}, + }, + 10: { + 5: {g0: teletextCharsetG0Hebrew, g2: teletextCharsetG2Arabic}, + 7: {g0: teletextCharsetG0Arabic, g2: teletextCharsetG2Arabic}, + }, +} + +// Teletext G0 charsets +var ( + teletextCharsetG0CyrillicOption1 = &teletextCharset{ + []byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0xd1, 0x8b}, + []byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d}, + []byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0xe3, 0x88, 0x80}, []byte{0x33}, []byte{0x34}, + []byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b}, + []byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xd0, 0xa7}, []byte{0xd0, 0x90}, + []byte{0xd0, 0x91}, []byte{0xd0, 0xa6}, []byte{0xd0, 0x94}, []byte{0xd0, 0x95}, []byte{0xd0, 0xa4}, + []byte{0xd0, 0x93}, []byte{0xd0, 0xa5}, []byte{0xd0, 0x98}, []byte{0xd0, 0x88}, []byte{0xd0, 0x9a}, + []byte{0xd0, 0x9b}, []byte{0xd0, 0x9c}, []byte{0xd0, 0x9d}, []byte{0xd0, 0x9e}, []byte{0xd0, 0x9f}, + []byte{0xd0, 0x8c}, []byte{0xd0, 0xa0}, []byte{0xd0, 0xa1}, []byte{0xd0, 0xa2}, []byte{0xd0, 0xa3}, + []byte{0xd0, 0x92}, []byte{0xd0, 0x83}, []byte{0xd0, 0x89}, []byte{0xd0, 0x8a}, []byte{0xd0, 0x97}, + []byte{0xd0, 0x8b}, []byte{0xd0, 0x96}, []byte{0xd0, 0x82}, []byte{0xd0, 0xa8}, []byte{0xd0, 0x8f}, + []byte{0xd1, 0x87}, []byte{0xd0, 0xb0}, []byte{0xd0, 0xb1}, []byte{0xd1, 0x86}, []byte{0xd0, 0xb4}, + []byte{0xd0, 0xb5}, []byte{0xd1, 0x84}, []byte{0xd0, 0xb3}, []byte{0xd1, 0x85}, []byte{0xd0, 0xb8}, + []byte{0xd0, 0xa8}, []byte{0xd0, 0xba}, []byte{0xd0, 0xbb}, []byte{0xd0, 0xbc}, []byte{0xd0, 0xbd}, + []byte{0xd0, 0xbe}, []byte{0xd0, 0xbf}, []byte{0xd0, 0xac}, []byte{0xd1, 0x80}, []byte{0xd1, 0x81}, + []byte{0xd1, 0x82}, []byte{0xd1, 0x83}, []byte{0xd0, 0xb2}, []byte{0xd0, 0xa3}, []byte{0xd0, 0xa9}, + []byte{0xd0, 0xaa}, []byte{0xd0, 0xb7}, []byte{0xd0, 0xab}, []byte{0xd0, 0xb6}, []byte{0xd0, 0xa2}, + []byte{0xd1, 0x88}, []byte{0xd0, 0xaf}, + } + teletextCharsetG0CyrillicOption2 = &teletextCharset{ + []byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0xd1, 0x8b}, + []byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d}, + []byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34}, + []byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b}, + []byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xd0, 0xae}, []byte{0xd0, 0x90}, + []byte{0xd0, 0x91}, []byte{0xd0, 0xa6}, []byte{0xd0, 0x94}, []byte{0xd0, 0x95}, []byte{0xd0, 0xa4}, + []byte{0xd0, 0x93}, []byte{0xd0, 0xa5}, []byte{0xd0, 0x98}, []byte{0xd0, 0x99}, []byte{0xd0, 0x9a}, + []byte{0xd0, 0x9b}, []byte{0xd0, 0x9c}, []byte{0xd0, 0x9d}, []byte{0xd0, 0x9e}, []byte{0xd0, 0x9f}, + []byte{0xd0, 0xaf}, []byte{0xd0, 0xa0}, []byte{0xd0, 0xa1}, []byte{0xd0, 0xa2}, []byte{0xd0, 0xa3}, + []byte{0xd0, 0x96}, []byte{0xd0, 0x92}, []byte{0xd0, 0xac}, []byte{0xd0, 0xaa}, []byte{0xd0, 0x97}, + []byte{0xd0, 0xa8}, []byte{0xd0, 0xad}, []byte{0xd0, 0xa9}, []byte{0xd0, 0xa7}, []byte{0xd0, 0xab}, + []byte{0xd1, 0x8e}, []byte{0xd0, 0xb0}, []byte{0xd0, 0xb1}, []byte{0xd1, 0x86}, []byte{0xd0, 0xb4}, + []byte{0xd0, 0xb5}, []byte{0xd1, 0x84}, []byte{0xd0, 0xb3}, []byte{0xd1, 0x85}, []byte{0xd0, 0xb8}, + []byte{0xd0, 0xb9}, []byte{0xd0, 0xba}, []byte{0xd0, 0xbb}, []byte{0xd0, 0xbc}, []byte{0xd0, 0xbd}, + []byte{0xd0, 0xbe}, []byte{0xd0, 0xbf}, []byte{0xd1, 0x8f}, []byte{0xd1, 0x80}, []byte{0xd1, 0x81}, + []byte{0xd1, 0x82}, []byte{0xd1, 0x83}, []byte{0xd0, 0xb6}, []byte{0xd0, 0xb2}, []byte{0xd1, 0x8c}, + []byte{0xd1, 0x8a}, []byte{0xd0, 0xb7}, []byte{0xd1, 0x88}, []byte{0xd1, 0x8d}, []byte{0xd1, 0x89}, + []byte{0xd1, 0x87}, []byte{0xd1, 0x8b}, + } + teletextCharsetG0CyrillicOption3 = &teletextCharset{ + []byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0xc3, 0xaf}, + []byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d}, + []byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34}, + []byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b}, + []byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xd0, 0xae}, []byte{0xd0, 0x90}, + []byte{0xd0, 0x91}, []byte{0xd0, 0xa6}, []byte{0xd0, 0x94}, []byte{0xd0, 0x95}, []byte{0xd0, 0xa4}, + []byte{0xd0, 0x93}, []byte{0xd0, 0xa5}, []byte{0xd0, 0x98}, []byte{0xd0, 0x99}, []byte{0xd0, 0x9a}, + []byte{0xd0, 0x9b}, []byte{0xd0, 0x9c}, []byte{0xd0, 0x9d}, []byte{0xd0, 0x9e}, []byte{0xd0, 0x9f}, + []byte{0xd0, 0xaf}, []byte{0xd0, 0xa0}, []byte{0xd0, 0xa1}, []byte{0xd0, 0xa2}, []byte{0xd0, 0xa3}, + []byte{0xd0, 0x96}, []byte{0xd0, 0x92}, []byte{0xd0, 0xac}, []byte{0x49}, []byte{0xd0, 0x97}, + []byte{0xd0, 0xa8}, []byte{0xd0, 0xad}, []byte{0xd0, 0xa9}, []byte{0xd0, 0xa7}, []byte{0xc3, 0x8f}, + []byte{0xd1, 0x8e}, []byte{0xd0, 0xb0}, []byte{0xd0, 0xb1}, []byte{0xd1, 0x86}, []byte{0xd0, 0xb4}, + []byte{0xd0, 0xb5}, []byte{0xd1, 0x84}, []byte{0xd0, 0xb3}, []byte{0xd1, 0x85}, []byte{0xd0, 0xb8}, + []byte{0xd0, 0xb9}, []byte{0xd0, 0xba}, []byte{0xd0, 0xbb}, []byte{0xd0, 0xbc}, []byte{0xd0, 0xbd}, + []byte{0xd0, 0xbe}, []byte{0xd0, 0xbf}, []byte{0xd1, 0x8f}, []byte{0xd1, 0x80}, []byte{0xd1, 0x81}, + []byte{0xd1, 0x82}, []byte{0xd1, 0x83}, []byte{0xd0, 0xb6}, []byte{0xd0, 0xb2}, []byte{0xd1, 0x8c}, + []byte{0x69}, []byte{0xd0, 0xb7}, []byte{0xd1, 0x88}, []byte{0xd1, 0x8d}, []byte{0xd1, 0x89}, + []byte{0xd1, 0x87}, []byte{0xc3, 0xbf}, + } + teletextCharsetG0Greek = &teletextCharset{ + []byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0x23}, []byte{0x24}, []byte{0x25}, []byte{0x26}, + []byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d}, + []byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34}, + []byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b}, + []byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0xce, 0x90}, []byte{0xce, 0x91}, + []byte{0xce, 0x92}, []byte{0xce, 0x93}, []byte{0xce, 0x94}, []byte{0xce, 0x95}, []byte{0xce, 0x96}, + []byte{0xce, 0x97}, []byte{0xce, 0x98}, []byte{0xce, 0x99}, []byte{0xce, 0x9a}, []byte{0xce, 0x9b}, + []byte{0xce, 0x9c}, []byte{0xce, 0x9d}, []byte{0xce, 0x9e}, []byte{0xce, 0x9f}, []byte{0xce, 0xa0}, + []byte{0xce, 0xa1}, []byte{0xce, 0xa2}, []byte{0xce, 0xa3}, []byte{0xce, 0xa4}, []byte{0xce, 0xa5}, + []byte{0xce, 0xa6}, []byte{0xce, 0xa7}, []byte{0xce, 0xa8}, []byte{0xce, 0xa9}, []byte{0xce, 0xaa}, + []byte{0xce, 0xab}, []byte{0xce, 0xac}, []byte{0xce, 0xad}, []byte{0xce, 0xae}, []byte{0xce, 0xaf}, + []byte{0xce, 0xb0}, []byte{0xce, 0xb1}, []byte{0xce, 0xb2}, []byte{0xce, 0xb3}, []byte{0xce, 0xb4}, + []byte{0xce, 0xb5}, []byte{0xce, 0xb6}, []byte{0xce, 0xb7}, []byte{0xce, 0xb8}, []byte{0xce, 0xb9}, + []byte{0xce, 0xba}, []byte{0xce, 0xbb}, []byte{0xce, 0xbc}, []byte{0xce, 0xbd}, []byte{0xce, 0xbe}, + []byte{0xce, 0xbf}, []byte{0xcf, 0x80}, []byte{0xcf, 0x81}, []byte{0xcf, 0x82}, []byte{0xcf, 0x83}, + []byte{0xcf, 0x84}, []byte{0xcf, 0x85}, []byte{0xcf, 0x86}, []byte{0xcf, 0x87}, []byte{0xcf, 0x88}, + []byte{0xcf, 0x89}, []byte{0xcf, 0x8a}, []byte{0xcf, 0x8b}, []byte{0xcf, 0x8c}, []byte{0xcf, 0x8d}, + []byte{0xcf, 0x8e}, []byte{0xcf, 0x8f}, + } + teletextCharsetG0Latin = &teletextCharset{ + []byte{0x20}, []byte{0x21}, []byte{0x22}, []byte{0xc2, 0xa3}, []byte{0x24}, []byte{0x25}, []byte{0x26}, + []byte{0x27}, []byte{0x28}, []byte{0x29}, []byte{0x2a}, []byte{0x2b}, []byte{0x2c}, []byte{0x2d}, + []byte{0x2e}, []byte{0x2f}, []byte{0x30}, []byte{0x31}, []byte{0x32}, []byte{0x33}, []byte{0x34}, + []byte{0x35}, []byte{0x36}, []byte{0x37}, []byte{0x38}, []byte{0x39}, []byte{0x3a}, []byte{0x3b}, + []byte{0x3c}, []byte{0x3d}, []byte{0x3e}, []byte{0x3f}, []byte{0x40}, []byte{0x41}, []byte{0x42}, + []byte{0x43}, []byte{0x44}, []byte{0x45}, []byte{0x46}, []byte{0x47}, []byte{0x48}, []byte{0x49}, + []byte{0x4a}, []byte{0x4b}, []byte{0x4c}, []byte{0x4d}, []byte{0x4e}, []byte{0x4f}, []byte{0x50}, + []byte{0x51}, []byte{0x52}, []byte{0x53}, []byte{0x54}, []byte{0x55}, []byte{0x56}, []byte{0x57}, + []byte{0x58}, []byte{0x59}, []byte{0x5a}, []byte{0xc2, 0xab}, []byte{0xc2, 0xbd}, []byte{0xc2, 0xbb}, + []byte{0x5e}, []byte{0x23}, []byte{0x2d}, []byte{0x61}, []byte{0x62}, []byte{0x63}, []byte{0x64}, + []byte{0x65}, []byte{0x66}, []byte{0x67}, []byte{0x68}, []byte{0x69}, []byte{0x6a}, []byte{0x6b}, + []byte{0x6c}, []byte{0x6d}, []byte{0x6e}, []byte{0x6f}, []byte{0x70}, []byte{0x71}, []byte{0x72}, + []byte{0x73}, []byte{0x74}, []byte{0x75}, []byte{0x76}, []byte{0x77}, []byte{0x78}, []byte{0x79}, + []byte{0x7a}, []byte{0xc2, 0xbc}, []byte{0xc2, 0xa6}, []byte{0xc2, 0xbe}, []byte{0xc3, 0xb7}, []byte{0x7f}, + } + // TODO Add + teletextCharsetG0Arabic = teletextCharsetG0Latin + teletextCharsetG0Hebrew = teletextCharsetG0Latin +) + +// Teletext G2 charsets +var ( + teletextCharsetG2Latin = &teletextCharset{ + []byte{0x20}, []byte{0xc2, 0xa1}, []byte{0xc2, 0xa2}, []byte{0xc2, 0xa3}, []byte{0x24}, + []byte{0xc2, 0xa5}, []byte{0x23}, []byte{0xc2, 0xa7}, []byte{0xc2, 0xa4}, []byte{0xe2, 0x80, 0x98}, + []byte{0xe2, 0x80, 0x9c}, []byte{0xc2, 0xab}, []byte{0xe2, 0x86, 0x90}, []byte{0xe2, 0x86, 0x91}, + []byte{0xe2, 0x86, 0x92}, []byte{0xe2, 0x86, 0x93}, []byte{0xc2, 0xb0}, []byte{0xc2, 0xb1}, + []byte{0xc2, 0xb2}, []byte{0xc2, 0xb3}, []byte{0xc3, 0x97}, []byte{0xc2, 0xb5}, []byte{0xc2, 0xb6}, + []byte{0xc2, 0xb7}, []byte{0xc3, 0xb7}, []byte{0xe2, 0x80, 0x99}, []byte{0xe2, 0x80, 0x9d}, + []byte{0xc2, 0xbb}, []byte{0xc2, 0xbc}, []byte{0xc2, 0xbd}, []byte{0xc2, 0xbe}, []byte{0xc2, 0xbf}, + []byte{0x20}, []byte{0xcc, 0x80}, []byte{0xcc, 0x81}, []byte{0xcc, 0x82}, []byte{0xcc, 0x83}, + []byte{0xcc, 0x84}, []byte{0xcc, 0x86}, []byte{0xcc, 0x87}, []byte{0xcc, 0x88}, []byte{0x00}, + []byte{0xcc, 0x8a}, []byte{0xcc, 0xa7}, []byte{0x5f}, []byte{0xcc, 0x8b}, []byte{0xcc, 0xa8}, + []byte{0xcc, 0x8c}, []byte{0xe2, 0x80, 0x95}, []byte{0xc2, 0xb9}, []byte{0xc2, 0xae}, []byte{0xc2, 0xa9}, + []byte{0xe2, 0x84, 0xa2}, []byte{0xe2, 0x99, 0xaa}, []byte{0xe2, 0x82, 0xac}, []byte{0xe2, 0x80, 0xb0}, + []byte{0xce, 0xb1}, []byte{0x00}, []byte{0x00}, []byte{0x00}, []byte{0xe2, 0x85, 0x9b}, + []byte{0xe2, 0x85, 0x9c}, []byte{0xe2, 0x85, 0x9d}, []byte{0xe2, 0x85, 0x9e}, []byte{0xce, 0xa9}, + []byte{0xc3, 0x86}, []byte{0xc4, 0x90}, []byte{0xc2, 0xaa}, []byte{0xc4, 0xa6}, []byte{0x00}, + []byte{0xc4, 0xb2}, []byte{0xc4, 0xbf}, []byte{0xc5, 0x81}, []byte{0xc3, 0x98}, []byte{0xc5, 0x92}, + []byte{0xc2, 0xba}, []byte{0xc3, 0x9e}, []byte{0xc5, 0xa6}, []byte{0xc5, 0x8a}, []byte{0xc5, 0x89}, + []byte{0xc4, 0xb8}, []byte{0xc3, 0xa6}, []byte{0xc4, 0x91}, []byte{0xc3, 0xb0}, []byte{0xc4, 0xa7}, + []byte{0xc4, 0xb1}, []byte{0xc4, 0xb3}, []byte{0xc5, 0x80}, []byte{0xc5, 0x82}, []byte{0xc3, 0xb8}, + []byte{0xc5, 0x93}, []byte{0xc3, 0x9f}, []byte{0xc3, 0xbe}, []byte{0xc5, 0xa7}, []byte{0xc5, 0x8b}, + []byte{0x20}, + } + // TODO Add + teletextCharsetG2Arabic = teletextCharsetG2Latin + teletextCharsetG2Cyrillic = teletextCharsetG2Latin + teletextCharsetG2Greek = teletextCharsetG2Latin +) + +var teletextNationalSubsetCharactersPositionInG0 = [13]uint8{0x03, 0x04, 0x20, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x5b, 0x5c, 0x5d, 0x5e} + +// Teletext national subsets +var ( + teletextNationalSubsetCzechSlovak = &teletextNationalSubset{ + []byte{0x23}, []byte{0xc5, 0xaf}, []byte{0xc4, 0x8d}, []byte{0xc5, 0xa5}, []byte{0xc5, 0xbe}, + []byte{0xc3, 0xbd}, []byte{0xc3, 0xad}, []byte{0xc5, 0x99}, []byte{0xc3, 0xa9}, []byte{0xc3, 0xa1}, + []byte{0xc4, 0x9b}, []byte{0xc3, 0xba}, []byte{0xc5, 0xa1}, + } + teletextNationalSubsetEnglish = &teletextNationalSubset{ + []byte{0xc2, 0xa3}, []byte{0x24}, []byte{0x40}, []byte{0xc2, 0xab}, []byte{0xc2, 0xbd}, []byte{0xc2, 0xbb}, + []byte{0x5e}, []byte{0x23}, []byte{0x2d}, []byte{0xc2, 0xbc}, []byte{0xc2, 0xa6}, []byte{0xc2, 0xbe}, + []byte{0xc3, 0xb7}, + } + teletextNationalSubsetEstonian = &teletextNationalSubset{ + []byte{0x23}, []byte{0xc3, 0xb5}, []byte{0xc5, 0xa0}, []byte{0xc3, 0x84}, []byte{0xc3, 0x96}, + []byte{0xc5, 0xbe}, []byte{0xc3, 0x9c}, []byte{0xc3, 0x95}, []byte{0xc5, 0xa1}, []byte{0xc3, 0xa4}, + []byte{0xc3, 0xb6}, []byte{0xc5, 0xbe}, []byte{0xc3, 0xbc}, + } + teletextNationalSubsetFrench = &teletextNationalSubset{ + []byte{0xc3, 0xa9}, []byte{0xc3, 0xaf}, []byte{0xc3, 0xa0}, []byte{0xc3, 0xab}, []byte{0xc3, 0xaa}, + []byte{0xc3, 0xb9}, []byte{0xc3, 0xae}, []byte{0x23}, []byte{0xc3, 0xa8}, []byte{0xc3, 0xa2}, + []byte{0xc3, 0xb4}, []byte{0xc3, 0xbb}, []byte{0xc3, 0xa7}, + } + teletextNationalSubsetGerman = &teletextNationalSubset{ + []byte{0x23}, []byte{0x24}, []byte{0xc2, 0xa7}, []byte{0xc3, 0x84}, []byte{0xc3, 0x96}, []byte{0xc3, 0x9c}, + []byte{0x5e}, []byte{0x5f}, []byte{0xc2, 0xb0}, []byte{0xc3, 0xa4}, []byte{0xc3, 0xb6}, []byte{0xc3, 0xbc}, + []byte{0xc3, 0x9f}, + } + teletextNationalSubsetItalian = &teletextNationalSubset{ + []byte{0xc2, 0xa3}, []byte{0x24}, []byte{0xc3, 0xa9}, []byte{0xc2, 0xb0}, []byte{0xc3, 0xa7}, + []byte{0xc2, 0xbb}, []byte{0x5e}, []byte{0x23}, []byte{0xc3, 0xb9}, []byte{0xc3, 0xa0}, []byte{0xc3, 0xb2}, + []byte{0xc3, 0xa8}, []byte{0xc3, 0xac}, + } + teletextNationalSubsetLettishLithuanian = &teletextNationalSubset{ + []byte{0x23}, []byte{0x24}, []byte{0xc5, 0xa0}, []byte{0xc4, 0x97}, []byte{0xc4, 0x99}, []byte{0xc5, 0xbd}, + []byte{0xc4, 0x8d}, []byte{0xc5, 0xab}, []byte{0xc5, 0xa1}, []byte{0xc4, 0x85}, []byte{0xc5, 0xb3}, + []byte{0xc5, 0xbe}, []byte{0xc4, 0xaf}, + } + teletextNationalSubsetPolish = &teletextNationalSubset{ + []byte{0x23}, []byte{0xc5, 0x84}, []byte{0xc4, 0x85}, []byte{0xc5, 0xbb}, []byte{0xc5, 0x9a}, + []byte{0xc5, 0x81}, []byte{0xc4, 0x87}, []byte{0xc3, 0xb3}, []byte{0xc4, 0x99}, []byte{0xc5, 0xbc}, + []byte{0xc5, 0x9b}, []byte{0xc5, 0x82}, []byte{0xc5, 0xba}, + } + teletextNationalSubsetPortugueseSpanish = &teletextNationalSubset{ + []byte{0xc3, 0xa7}, []byte{0x24}, []byte{0xc2, 0xa1}, []byte{0xc3, 0xa1}, []byte{0xc3, 0xa9}, + []byte{0xc3, 0xad}, []byte{0xc3, 0xb3}, []byte{0xc3, 0xba}, []byte{0xc2, 0xbf}, []byte{0xc3, 0xbc}, + []byte{0xc3, 0xb1}, []byte{0xc3, 0xa8}, []byte{0xc3, 0xa0}, + } + teletextNationalSubsetRomanian = &teletextNationalSubset{ + []byte{0x23}, []byte{0xc2, 0xa4}, []byte{0xc5, 0xa2}, []byte{0xc3, 0x82}, []byte{0xc5, 0x9e}, + []byte{0xc4, 0x82}, []byte{0xc3, 0x8e}, []byte{0xc4, 0xb1}, []byte{0xc5, 0xa3}, []byte{0xc3, 0xa2}, + []byte{0xc5, 0x9f}, []byte{0xc4, 0x83}, []byte{0xc3, 0xae}, + } + teletextNationalSubsetSerbianCroatianSlovenian = &teletextNationalSubset{ + []byte{0x23}, []byte{0xc3, 0x8b}, []byte{0xc4, 0x8c}, []byte{0xc4, 0x86}, []byte{0xc5, 0xbd}, + []byte{0xc4, 0x90}, []byte{0xc5, 0xa0}, []byte{0xc3, 0xab}, []byte{0xc4, 0x8d}, []byte{0xc4, 0x87}, + []byte{0xc5, 0xbe}, []byte{0xc4, 0x91}, []byte{0xc5, 0xa1}, + } + teletextNationalSubsetSwedishFinnishHungarian = &teletextNationalSubset{ + []byte{0x23}, []byte{0xc2, 0xa4}, []byte{0xc3, 0x89}, []byte{0xc3, 0x84}, []byte{0xc3, 0x96}, + []byte{0xc3, 0x85}, []byte{0xc3, 0x9c}, []byte{0x5f}, []byte{0xc3, 0xa9}, []byte{0xc3, 0xa4}, + []byte{0xc3, 0xb6}, []byte{0xc3, 0xa5}, []byte{0xc3, 0xbc}, + } + teletextNationalSubsetTurkish = &teletextNationalSubset{ + []byte{0x54}, []byte{0xc4, 0x9f}, []byte{0xc4, 0xb0}, []byte{0xc5, 0x9e}, []byte{0xc3, 0x96}, + []byte{0xc3, 0x87}, []byte{0xc3, 0x9c}, []byte{0xc4, 0x9e}, []byte{0xc4, 0xb1}, []byte{0xc5, 0x9f}, + []byte{0xc3, 0xb6}, []byte{0xc3, 0xa7}, []byte{0xc3, 0xbc}, + } +) + +// Teletext PES data types +const ( + teletextPESDataTypeEBU = "EBU" + teletextPESDataTypeUnknown = "unknown" +) + +func teletextPESDataType(dataIdentifier uint8) string { + switch { + case dataIdentifier >= 0x10 && dataIdentifier <= 0x1f: + return teletextPESDataTypeEBU + } + return teletextPESDataTypeUnknown +} + +// Teletext PES data unit ids +const ( + teletextPESDataUnitIDEBUNonSubtitleData = 0x2 + teletextPESDataUnitIDEBUSubtitleData = 0x3 + teletextPESDataUnitIDStuffing = 0xff +) + +// TeletextOptions represents teletext options +type TeletextOptions struct { + Page int + PID int +} + +// ReadFromTeletext parses a teletext content +// http://www.etsi.org/deliver/etsi_en/300400_300499/300472/01.03.01_60/en_300472v010301p.pdf +// http://www.etsi.org/deliver/etsi_i_ets/300700_300799/300706/01_60/ets_300706e01p.pdf +// TODO Update README +// TODO Add tests +func ReadFromTeletext(r io.Reader, o TeletextOptions) (s *Subtitles, err error) { + // Init + s = &Subtitles{} + var dmx = astits.NewDemuxer(context.Background(), r) + + // Get the teletext PID + var pid uint16 + if pid, err = teletextPID(dmx, o); err != nil { + if err != ErrNoValidTeletextPID { + err = fmt.Errorf("astisub: getting teletext PID failed: %w", err) + } + return + } + + // Create character decoder + cd := newTeletextCharacterDecoder() + + // Create page buffer + b := newTeletextPageBuffer(o.Page, cd) + + // Loop in data + var firstTime, lastTime time.Time + var d *astits.DemuxerData + var ps []*teletextPage + for { + // Fetch next data + if d, err = dmx.NextData(); err != nil { + if err == astits.ErrNoMorePackets { + err = nil + break + } + err = fmt.Errorf("astisub: fetching next data failed: %w", err) + return + } + + // We only parse PES data + if d.PES == nil { + continue + } + + // This data is not of interest to us + if d.PID != pid || d.PES.Header.StreamID != astits.StreamIDPrivateStream1 { + continue + } + + // Get time + t := teletextDataTime(d) + if t.IsZero() { + continue + } + + // First and last time + if firstTime.IsZero() || firstTime.After(t) { + firstTime = t + } + if lastTime.IsZero() || lastTime.Before(t) { + lastTime = t + } + + // Append pages + ps = append(ps, b.process(d.PES, t)...) + } + + // Dump buffer + ps = append(ps, b.dump(lastTime)...) + + // Parse pages + for _, p := range ps { + p.parse(s, cd, firstTime) + } + return +} + +// TODO Add tests +func teletextDataTime(d *astits.DemuxerData) time.Time { + if d.PES.Header != nil && d.PES.Header.OptionalHeader != nil && d.PES.Header.OptionalHeader.PTS != nil { + return d.PES.Header.OptionalHeader.PTS.Time() + } else if d.FirstPacket != nil && d.FirstPacket.AdaptationField != nil && d.FirstPacket.AdaptationField.PCR != nil { + return d.FirstPacket.AdaptationField.PCR.Time() + } + return time.Time{} +} + +// If the PID teletext option is not indicated, it will walk through the ts data until it reaches a PMT packet to +// detect the first valid teletext PID +// TODO Add tests +func teletextPID(dmx *astits.Demuxer, o TeletextOptions) (pid uint16, err error) { + // PID is in the options + if o.PID > 0 { + pid = uint16(o.PID) + return + } + + // Loop in data + var d *astits.DemuxerData + for { + // Fetch next data + if d, err = dmx.NextData(); err != nil { + if err == astits.ErrNoMorePackets { + err = ErrNoValidTeletextPID + return + } + err = fmt.Errorf("astisub: fetching next data failed: %w", err) + return + } + + // PMT data + if d.PMT != nil { + // Retrieve valid teletext PIDs + var pids []uint16 + for _, s := range d.PMT.ElementaryStreams { + for _, dsc := range s.ElementaryStreamDescriptors { + if dsc.Tag == astits.DescriptorTagTeletext || dsc.Tag == astits.DescriptorTagVBITeletext { + pids = append(pids, s.ElementaryPID) + } + } + } + + // No valid teletext PIDs + if len(pids) == 0 { + err = ErrNoValidTeletextPID + return + } + + // Set pid + pid = pids[0] + log.Printf("astisub: no teletext pid specified, using pid %d", pid) + + // Rewind + if _, err = dmx.Rewind(); err != nil { + err = fmt.Errorf("astisub: rewinding failed: %w", err) + return + } + return + } + } +} + +type teletextPageBuffer struct { + cd *teletextCharacterDecoder + currentPage *teletextPage + donePages []*teletextPage + magazineNumber uint8 + pageNumber int + receiving bool +} + +func newTeletextPageBuffer(page int, cd *teletextCharacterDecoder) *teletextPageBuffer { + return &teletextPageBuffer{ + cd: cd, + magazineNumber: uint8(page / 100), + pageNumber: page % 100, + } +} + +// TODO Add tests +func (b *teletextPageBuffer) dump(lastTime time.Time) (ps []*teletextPage) { + if b.currentPage != nil { + b.currentPage.end = lastTime + ps = []*teletextPage{b.currentPage} + } + return +} + +// TODO Add tests +func (b *teletextPageBuffer) process(d *astits.PESData, t time.Time) (ps []*teletextPage) { + // Data identifier + var offset int + dataIdentifier := uint8(d.Data[offset]) + offset += 1 + + // Check data type + if teletextPESDataType(dataIdentifier) != teletextPESDataTypeEBU { + return + } + + // Loop through data units + for offset < len(d.Data) { + // ID + id := uint8(d.Data[offset]) + offset += 1 + + // Length + length := uint8(d.Data[offset]) + offset += 1 + + // Offset end + offsetEnd := offset + int(length) + if offsetEnd > len(d.Data) { + break + } + + // Parse data unit + b.parseDataUnit(d.Data[offset:offsetEnd], id, t) + + // Seek to end of data unit + offset = offsetEnd + } + + // Dump buffer + ps = b.donePages + b.donePages = []*teletextPage(nil) + return ps +} + +// TODO Add tests +func (b *teletextPageBuffer) parseDataUnit(i []byte, id uint8, t time.Time) { + // Check id + if id != teletextPESDataUnitIDEBUSubtitleData { + return + } + + // Field parity: i[0]&0x20 > 0 + // Line offset: uint8(i[0] & 0x1f) + // Framing code + framingCode := uint8(i[1]) + + // Check framing code + if framingCode != 0xe4 { + return + } + + // Magazine number and packet number + h1, ok := astikit.ByteHamming84Decode(i[2]) + if !ok { + return + } + h2, ok := astikit.ByteHamming84Decode(i[3]) + if !ok { + return + } + h := h2<<4 | h1 + magazineNumber := h & 0x7 + if magazineNumber == 0 { + magazineNumber = 8 + } + packetNumber := h >> 3 + + // Parse packet + b.parsePacket(i[4:], magazineNumber, packetNumber, t) +} + +// TODO Add tests +func (b *teletextPageBuffer) parsePacket(i []byte, magazineNumber, packetNumber uint8, t time.Time) { + if packetNumber == 0 { + b.parsePacketHeader(i, magazineNumber, t) + } else if b.receiving && magazineNumber == b.magazineNumber && (packetNumber >= 1 && packetNumber <= 25) { + b.parsePacketData(i, packetNumber) + } else { + // Designation code + designationCode, ok := astikit.ByteHamming84Decode(i[0]) + if !ok { + return + } + + // Parse packet + if b.receiving && magazineNumber == b.magazineNumber && packetNumber == 26 { + // TODO Implement + } else if b.receiving && magazineNumber == b.magazineNumber && packetNumber == 28 { + b.parsePacket28And29(i[1:], packetNumber, designationCode) + } else if magazineNumber == b.magazineNumber && packetNumber == 29 { + b.parsePacket28And29(i[1:], packetNumber, designationCode) + } else if magazineNumber == 8 && packetNumber == 30 { + b.parsePacket30(i, designationCode) + } + } +} + +// TODO Add tests +func (b *teletextPageBuffer) parsePacketHeader(i []byte, magazineNumber uint8, t time.Time) (transmissionDone bool) { + // Page number units + pageNumberUnits, ok := astikit.ByteHamming84Decode(i[0]) + if !ok { + return + } + + // Page number tens + pageNumberTens, ok := astikit.ByteHamming84Decode(i[1]) + if !ok { + return + } + pageNumber := int(pageNumberTens)*10 + int(pageNumberUnits) + + // 0xff is a reserved page number value + if pageNumberTens == 0xf && pageNumberUnits == 0xf { + return + } + + // Update magazine and page number + if b.magazineNumber == 0 && b.pageNumber == 0 { + // C6 + controlBits, ok := astikit.ByteHamming84Decode(i[5]) + if !ok { + return + } + subtitleFlag := controlBits&0x8 > 0 + + // This is a subtitle page + if subtitleFlag { + b.magazineNumber = magazineNumber + b.pageNumber = pageNumber + log.Printf("astisub: no teletext page specified, using page %d%.2d", b.magazineNumber, b.pageNumber) + } + } + + // C11 --> C14 + controlBits, ok := astikit.ByteHamming84Decode(i[7]) + if !ok { + return + } + magazineSerial := controlBits&0x1 > 0 + charsetCode := controlBits >> 1 + + // Page transmission is done + if b.receiving && ((magazineSerial && pageNumber != b.pageNumber) || + (!magazineSerial && pageNumber != b.pageNumber && magazineNumber == b.magazineNumber)) { + b.receiving = false + return + } + + // Invalid magazine or page number + if pageNumber != b.pageNumber || magazineNumber != b.magazineNumber { + return + } + + // Now that we know when the previous page ends we can add it to the done slice + if b.currentPage != nil { + b.currentPage.end = t + b.donePages = append(b.donePages, b.currentPage) + } + + // Reset + b.receiving = true + b.currentPage = newTeletextPage(charsetCode, t) + return +} + +// TODO Add tests +func (b *teletextPageBuffer) parsePacketData(i []byte, packetNumber uint8) { + // Make sure the map is initialized + if _, ok := b.currentPage.data[packetNumber]; !ok { + b.currentPage.data[packetNumber] = make([]byte, 40) + } + + // Loop through input + b.currentPage.rows = append(b.currentPage.rows, int(packetNumber)) + for idx := uint8(0); idx < 40; idx++ { + v, ok := astikit.ByteParity(bits.Reverse8(i[idx])) + if !ok { + v = 0 + } + b.currentPage.data[packetNumber][idx] = v + } +} + +// TODO Add tests +func (b *teletextPageBuffer) parsePacket28And29(i []byte, packetNumber, designationCode uint8) { + // Invalid designation code + if designationCode != 0 && designationCode != 4 { + return + } + + // Triplet 1 + // TODO triplet1 should be the results of hamming 24/18 decoding + triplet1 := uint32(i[2])<<16 | uint32(i[1])<<8 | uint32(i[0]) + + // We only process x/28 format 1 + if packetNumber == 28 && triplet1&0xf > 0 { + return + } + + // Update character decoder + if packetNumber == 28 { + b.cd.setTripletX28(triplet1) + } else { + b.cd.setTripletM29(triplet1) + } +} + +// TODO Add tests +func (b *teletextPageBuffer) parsePacket30(i []byte, designationCode uint8) { + // Switch on designation code to determine format + switch designationCode { + case 0, 1: + b.parsePacket30Format1(i) + case 2, 3: + b.parsePacket30Format2(i) + } +} + +func (b *teletextPageBuffer) parsePacket30Format1(i []byte) { + // TODO Implement + +} + +func (b *teletextPageBuffer) parsePacket30Format2(i []byte) { + // TODO Implement +} + +type teletextCharacterDecoder struct { + c teletextCharset + lastPageCharsetCode *uint8 + tripletM29 *uint32 + tripletX28 *uint32 +} + +func newTeletextCharacterDecoder() *teletextCharacterDecoder { + return &teletextCharacterDecoder{} +} + +// TODO Add tests +func (d *teletextCharacterDecoder) setTripletM29(i uint32) { + if *d.tripletM29 != i { + d.tripletM29 = astikit.UInt32Ptr(i) + d.updateCharset(d.lastPageCharsetCode, true) + } +} + +// TODO Add tests +func (d *teletextCharacterDecoder) setTripletX28(i uint32) { + if *d.tripletX28 != i { + d.tripletX28 = astikit.UInt32Ptr(i) + d.updateCharset(d.lastPageCharsetCode, true) + } +} + +// TODO Add tests +func (d *teletextCharacterDecoder) decode(i byte) []byte { + if i < 0x20 { + return []byte{} + } + return d.c[i-0x20] +} + +// TODO Add tests +func (d *teletextCharacterDecoder) updateCharset(pageCharsetCode *uint8, force bool) { + // Charset is up to date + if d.lastPageCharsetCode != nil && *pageCharsetCode == *d.lastPageCharsetCode && !force { + return + } + d.lastPageCharsetCode = pageCharsetCode + + // Get triplet + var triplet uint32 + if d.tripletX28 != nil { + triplet = *d.tripletX28 + } else if d.tripletM29 != nil { + triplet = *d.tripletM29 + } + + // Get charsets + d.c = *teletextCharsetG0Latin + var nationalOptionSubset *teletextNationalSubset + if v1, ok := teletextCharsets[uint8((triplet&0x3f80)>>10)]; ok { + if v2, ok := v1[*pageCharsetCode]; ok { + d.c = *v2.g0 + nationalOptionSubset = v2.national + } + } + + // Update g0 with national option subset + if nationalOptionSubset != nil { + for k, v := range nationalOptionSubset { + d.c[teletextNationalSubsetCharactersPositionInG0[k]] = v + } + } +} + +type teletextPage struct { + charsetCode uint8 + data map[uint8][]byte + end time.Time + rows []int + start time.Time +} + +func newTeletextPage(charsetCode uint8, start time.Time) *teletextPage { + return &teletextPage{ + charsetCode: charsetCode, + data: make(map[uint8][]byte), + start: start, + } +} + +func (p *teletextPage) parse(s *Subtitles, d *teletextCharacterDecoder, firstTime time.Time) { + // Update charset + d.updateCharset(astikit.UInt8Ptr(p.charsetCode), false) + + // No data + if len(p.data) == 0 { + return + } + + // Order rows + sort.Ints(p.rows) + + // Create item + i := &Item{ + EndAt: p.end.Sub(firstTime), + StartAt: p.start.Sub(firstTime), + } + + // Loop through rows + for _, idxRow := range p.rows { + parseTeletextRow(i, d, nil, p.data[uint8(idxRow)]) + } + + // Append item + s.Items = append(s.Items, i) +} + +type decoder interface { + decode(i byte) []byte +} + +type styler interface { + hasBeenSet() bool + hasChanged(s *StyleAttributes) bool + parseSpacingAttribute(i byte) + propagateStyleAttributes(s *StyleAttributes) + update(sa *StyleAttributes) +} + +func parseTeletextRow(i *Item, d decoder, fs func() styler, row []byte) { + // Loop through columns + var l = Line{} + var li = LineItem{InlineStyle: &StyleAttributes{}} + var started bool + var s styler + for _, v := range row { + // Create specific styler + if fs != nil { + s = fs() + } + + // Get spacing attributes + var color *Color + var doubleHeight, doubleSize, doubleWidth *bool + switch v { + case 0x0: + color = ColorBlack + case 0x1: + color = ColorRed + case 0x2: + color = ColorGreen + case 0x3: + color = ColorYellow + case 0x4: + color = ColorBlue + case 0x5: + color = ColorMagenta + case 0x6: + color = ColorCyan + case 0x7: + color = ColorWhite + case 0xa: + started = false + case 0xb: + started = true + case 0xc: + doubleHeight = astikit.BoolPtr(false) + doubleSize = astikit.BoolPtr(false) + doubleWidth = astikit.BoolPtr(false) + case 0xd: + doubleHeight = astikit.BoolPtr(true) + case 0xe: + doubleWidth = astikit.BoolPtr(true) + case 0xf: + doubleSize = astikit.BoolPtr(true) + default: + if s != nil { + s.parseSpacingAttribute(v) + } + } + + // Style has been set + if color != nil || doubleHeight != nil || doubleSize != nil || doubleWidth != nil || (s != nil && s.hasBeenSet()) { + // Style has changed + if color != li.InlineStyle.TeletextColor || doubleHeight != li.InlineStyle.TeletextDoubleHeight || + doubleSize != li.InlineStyle.TeletextDoubleSize || doubleWidth != li.InlineStyle.TeletextDoubleWidth || + (s != nil && s.hasChanged(li.InlineStyle)) { + // Line has started + if started { + // Append line item + appendTeletextLineItem(&l, li, s) + + // Create new line item + sa := &StyleAttributes{} + *sa = *li.InlineStyle + li = LineItem{InlineStyle: sa} + } + + // Update style attributes + if color != nil && color != li.InlineStyle.TeletextColor { + li.InlineStyle.TeletextColor = color + } + if doubleHeight != nil && doubleHeight != li.InlineStyle.TeletextDoubleHeight { + li.InlineStyle.TeletextDoubleHeight = doubleHeight + } + if doubleSize != nil && doubleSize != li.InlineStyle.TeletextDoubleSize { + li.InlineStyle.TeletextDoubleSize = doubleSize + } + if doubleWidth != nil && doubleWidth != li.InlineStyle.TeletextDoubleWidth { + li.InlineStyle.TeletextDoubleWidth = doubleWidth + } + if s != nil { + s.update(li.InlineStyle) + } + } + } else if started { + // Append text + li.Text += string(d.decode(v)) + } + } + + // Append line item + appendTeletextLineItem(&l, li, s) + + // Append line + if len(l.Items) > 0 { + i.Lines = append(i.Lines, l) + } +} + +func appendTeletextLineItem(l *Line, li LineItem, s styler) { + // There's some text + if len(strings.TrimSpace(li.Text)) > 0 { + // Make sure inline style exists + if li.InlineStyle == nil { + li.InlineStyle = &StyleAttributes{} + } + + // Get number of spaces before + li.InlineStyle.TeletextSpacesBefore = astikit.IntPtr(0) + for _, c := range li.Text { + if c == ' ' { + *li.InlineStyle.TeletextSpacesBefore++ + } else { + break + } + } + + // Get number of spaces after + li.InlineStyle.TeletextSpacesAfter = astikit.IntPtr(0) + for idx := len(li.Text) - 1; idx >= 0; idx-- { + if li.Text[idx] == ' ' { + *li.InlineStyle.TeletextSpacesAfter++ + } else { + break + } + } + + // Propagate style attributes + li.InlineStyle.propagateTeletextAttributes() + if s != nil { + s.propagateStyleAttributes(li.InlineStyle) + } + + // Append line item + li.Text = strings.TrimSpace(li.Text) + l.Items = append(l.Items, li) + } +} diff --git a/vendor/github.com/asticode/go-astisub/ttml.go b/vendor/github.com/asticode/go-astisub/ttml.go new file mode 100644 index 000000000..2e8046baa --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/ttml.go @@ -0,0 +1,686 @@ +package astisub + +import ( + "encoding/xml" + "fmt" + "io" + "regexp" + "sort" + "strconv" + "strings" + "time" + + "github.com/asticode/go-astikit" +) + +// https://www.w3.org/TR/ttaf1-dfxp/ +// http://www.skynav.com:8080/ttv/check +// https://www.speechpad.com/captions/ttml + +// TTML languages +const ( + ttmlLanguageChinese = "zh" + ttmlLanguageEnglish = "en" + ttmlLanguageJapanese = "ja" + ttmlLanguageFrench = "fr" + ttmlLanguageNorwegian = "no" +) + +// TTML language mapping +var ttmlLanguageMapping = astikit.NewBiMap(). + Set(ttmlLanguageChinese, LanguageChinese). + Set(ttmlLanguageEnglish, LanguageEnglish). + Set(ttmlLanguageFrench, LanguageFrench). + Set(ttmlLanguageJapanese, LanguageJapanese). + Set(ttmlLanguageNorwegian, LanguageNorwegian) + +// TTML Clock Time Frames and Offset Time +var ( + ttmlRegexpClockTimeFrames = regexp.MustCompile(`\:[\d]+$`) + ttmlRegexpOffsetTime = regexp.MustCompile(`^(\d+(\.\d+)?)(h|m|s|ms|f|t)$`) +) + +// TTMLIn represents an input TTML that must be unmarshaled +// We split it from the output TTML as we can't add strict namespace without breaking retrocompatibility +type TTMLIn struct { + Framerate int `xml:"frameRate,attr"` + Lang string `xml:"lang,attr"` + Metadata TTMLInMetadata `xml:"head>metadata"` + Regions []TTMLInRegion `xml:"head>layout>region"` + Styles []TTMLInStyle `xml:"head>styling>style"` + Subtitles []TTMLInSubtitle `xml:"body>div>p"` + Tickrate int `xml:"tickRate,attr"` + XMLName xml.Name `xml:"tt"` +} + +// metadata returns the Metadata of the TTML +func (t TTMLIn) metadata() (m *Metadata) { + m = &Metadata{ + Framerate: t.Framerate, + Title: t.Metadata.Title, + TTMLCopyright: t.Metadata.Copyright, + } + if v, ok := ttmlLanguageMapping.Get(astikit.StrPad(t.Lang, ' ', 2, astikit.PadCut)); ok { + m.Language = v.(string) + } + return +} + +// TTMLInMetadata represents an input TTML Metadata +type TTMLInMetadata struct { + Copyright string `xml:"copyright"` + Title string `xml:"title"` +} + +// TTMLInStyleAttributes represents input TTML style attributes +type TTMLInStyleAttributes struct { + BackgroundColor *string `xml:"backgroundColor,attr,omitempty"` + Color *string `xml:"color,attr,omitempty"` + Direction *string `xml:"direction,attr,omitempty"` + Display *string `xml:"display,attr,omitempty"` + DisplayAlign *string `xml:"displayAlign,attr,omitempty"` + Extent *string `xml:"extent,attr,omitempty"` + FontFamily *string `xml:"fontFamily,attr,omitempty"` + FontSize *string `xml:"fontSize,attr,omitempty"` + FontStyle *string `xml:"fontStyle,attr,omitempty"` + FontWeight *string `xml:"fontWeight,attr,omitempty"` + LineHeight *string `xml:"lineHeight,attr,omitempty"` + Opacity *string `xml:"opacity,attr,omitempty"` + Origin *string `xml:"origin,attr,omitempty"` + Overflow *string `xml:"overflow,attr,omitempty"` + Padding *string `xml:"padding,attr,omitempty"` + ShowBackground *string `xml:"showBackground,attr,omitempty"` + TextAlign *string `xml:"textAlign,attr,omitempty"` + TextDecoration *string `xml:"textDecoration,attr,omitempty"` + TextOutline *string `xml:"textOutline,attr,omitempty"` + UnicodeBidi *string `xml:"unicodeBidi,attr,omitempty"` + Visibility *string `xml:"visibility,attr,omitempty"` + WrapOption *string `xml:"wrapOption,attr,omitempty"` + WritingMode *string `xml:"writingMode,attr,omitempty"` + ZIndex *int `xml:"zIndex,attr,omitempty"` +} + +// StyleAttributes converts TTMLInStyleAttributes into a StyleAttributes +func (s TTMLInStyleAttributes) styleAttributes() (o *StyleAttributes) { + o = &StyleAttributes{ + TTMLBackgroundColor: s.BackgroundColor, + TTMLColor: s.Color, + TTMLDirection: s.Direction, + TTMLDisplay: s.Display, + TTMLDisplayAlign: s.DisplayAlign, + TTMLExtent: s.Extent, + TTMLFontFamily: s.FontFamily, + TTMLFontSize: s.FontSize, + TTMLFontStyle: s.FontStyle, + TTMLFontWeight: s.FontWeight, + TTMLLineHeight: s.LineHeight, + TTMLOpacity: s.Opacity, + TTMLOrigin: s.Origin, + TTMLOverflow: s.Overflow, + TTMLPadding: s.Padding, + TTMLShowBackground: s.ShowBackground, + TTMLTextAlign: s.TextAlign, + TTMLTextDecoration: s.TextDecoration, + TTMLTextOutline: s.TextOutline, + TTMLUnicodeBidi: s.UnicodeBidi, + TTMLVisibility: s.Visibility, + TTMLWrapOption: s.WrapOption, + TTMLWritingMode: s.WritingMode, + TTMLZIndex: s.ZIndex, + } + o.propagateTTMLAttributes() + return +} + +// TTMLInHeader represents an input TTML header +type TTMLInHeader struct { + ID string `xml:"id,attr,omitempty"` + Style string `xml:"style,attr,omitempty"` + TTMLInStyleAttributes +} + +// TTMLInRegion represents an input TTML region +type TTMLInRegion struct { + TTMLInHeader + XMLName xml.Name `xml:"region"` +} + +// TTMLInStyle represents an input TTML style +type TTMLInStyle struct { + TTMLInHeader + XMLName xml.Name `xml:"style"` +} + +// TTMLInSubtitle represents an input TTML subtitle +type TTMLInSubtitle struct { + Begin *TTMLInDuration `xml:"begin,attr,omitempty"` + End *TTMLInDuration `xml:"end,attr,omitempty"` + ID string `xml:"id,attr,omitempty"` + Items string `xml:",innerxml"` // We must store inner XML here since there's no tag to describe both any tag and chardata + Region string `xml:"region,attr,omitempty"` + Style string `xml:"style,attr,omitempty"` + TTMLInStyleAttributes +} + +// TTMLInItems represents input TTML items +type TTMLInItems []TTMLInItem + +// UnmarshalXML implements the XML unmarshaler interface +func (i *TTMLInItems) UnmarshalXML(d *xml.Decoder, start xml.StartElement) (err error) { + // Get next tokens + var t xml.Token + for { + // Get next token + if t, err = d.Token(); err != nil { + if err == io.EOF { + break + } + err = fmt.Errorf("astisub: getting next token failed: %w", err) + return + } + + // Start element + if se, ok := t.(xml.StartElement); ok { + var e = TTMLInItem{} + if err = d.DecodeElement(&e, &se); err != nil { + err = fmt.Errorf("astisub: decoding xml.StartElement failed: %w", err) + return + } + *i = append(*i, e) + } else if b, ok := t.(xml.CharData); ok { + var str = strings.TrimSpace(string(b)) + if len(str) > 0 { + *i = append(*i, TTMLInItem{Text: str}) + } + } + } + return nil +} + +// TTMLInItem represents an input TTML item +type TTMLInItem struct { + Style string `xml:"style,attr,omitempty"` + Text string `xml:",chardata"` + TTMLInStyleAttributes + XMLName xml.Name +} + +// TTMLInDuration represents an input TTML duration +type TTMLInDuration struct { + d time.Duration + frames, framerate int // Framerate is in frame/s + ticks, tickrate int // Tickrate is in ticks/s +} + +// UnmarshalText implements the TextUnmarshaler interface +// Possible formats are: +// - hh:mm:ss.mmm +// - hh:mm:ss:fff (fff being frames) +// - [ticks]t ([ticks] being the tick amount) +func (d *TTMLInDuration) UnmarshalText(i []byte) (err error) { + // Reset duration + d.d = time.Duration(0) + d.frames = 0 + d.ticks = 0 + + // Check offset time + text := string(i) + if matches := ttmlRegexpOffsetTime.FindStringSubmatch(text); matches != nil { + // Parse value + var value float64 + if value, err = strconv.ParseFloat(matches[1], 64); err != nil { + err = fmt.Errorf("astisub: failed to parse value %s", matches[1]) + return + } + + // Parse metric + metric := matches[3] + + // Update duration + if metric == "t" { + d.ticks = int(value) + } else if metric == "f" { + d.frames = int(value) + } else { + // Get timebase + var timebase time.Duration + switch metric { + case "h": + timebase = time.Hour + case "m": + timebase = time.Minute + case "s": + timebase = time.Second + case "ms": + timebase = time.Millisecond + default: + err = fmt.Errorf("astisub: invalid metric %s", metric) + return + } + + // Update duration + d.d = time.Duration(value * float64(timebase.Nanoseconds())) + } + return + } + + // Extract clock time frames + if indexes := ttmlRegexpClockTimeFrames.FindStringIndex(text); indexes != nil { + // Parse frames + var s = text[indexes[0]+1 : indexes[1]] + if d.frames, err = strconv.Atoi(s); err != nil { + err = fmt.Errorf("astisub: atoi %s failed: %w", s, err) + return + } + + // Update text + text = text[:indexes[0]] + ".000" + } + + d.d, err = parseDuration(text, ".", 3) + return +} + +// duration returns the input TTML Duration's time.Duration +func (d TTMLInDuration) duration() (o time.Duration) { + if d.ticks > 0 && d.tickrate > 0 { + return time.Duration(float64(d.ticks) * 1e9 / float64(d.tickrate)) + } + o = d.d + if d.frames > 0 && d.framerate > 0 { + o += time.Duration(float64(d.frames) / float64(d.framerate) * float64(time.Second.Nanoseconds())) + } + return +} + +// ReadFromTTML parses a .ttml content +func ReadFromTTML(i io.Reader) (o *Subtitles, err error) { + // Init + o = NewSubtitles() + + // Unmarshal XML + var ttml TTMLIn + if err = xml.NewDecoder(i).Decode(&ttml); err != nil { + err = fmt.Errorf("astisub: xml decoding failed: %w", err) + return + } + + // Add metadata + o.Metadata = ttml.metadata() + + // Loop through styles + var parentStyles = make(map[string]*Style) + for _, ts := range ttml.Styles { + var s = &Style{ + ID: ts.ID, + InlineStyle: ts.TTMLInStyleAttributes.styleAttributes(), + } + o.Styles[s.ID] = s + if len(ts.Style) > 0 { + parentStyles[ts.Style] = s + } + } + + // Take care of parent styles + for id, s := range parentStyles { + if _, ok := o.Styles[id]; !ok { + err = fmt.Errorf("astisub: Style %s requested by style %s doesn't exist", id, s.ID) + return + } + s.Style = o.Styles[id] + } + + // Loop through regions + for _, tr := range ttml.Regions { + var r = &Region{ + ID: tr.ID, + InlineStyle: tr.TTMLInStyleAttributes.styleAttributes(), + } + if len(tr.Style) > 0 { + if _, ok := o.Styles[tr.Style]; !ok { + err = fmt.Errorf("astisub: Style %s requested by region %s doesn't exist", tr.Style, r.ID) + return + } + r.Style = o.Styles[tr.Style] + } + o.Regions[r.ID] = r + } + + // Loop through subtitles + for _, ts := range ttml.Subtitles { + // Init item + ts.Begin.framerate = ttml.Framerate + ts.Begin.tickrate = ttml.Tickrate + ts.End.framerate = ttml.Framerate + ts.End.tickrate = ttml.Tickrate + + var s = &Item{ + EndAt: ts.End.duration(), + InlineStyle: ts.TTMLInStyleAttributes.styleAttributes(), + StartAt: ts.Begin.duration(), + } + + // Add region + if len(ts.Region) > 0 { + if _, ok := o.Regions[ts.Region]; !ok { + err = fmt.Errorf("astisub: Region %s requested by subtitle between %s and %s doesn't exist", ts.Region, s.StartAt, s.EndAt) + return + } + s.Region = o.Regions[ts.Region] + } + + // Add style + if len(ts.Style) > 0 { + if _, ok := o.Styles[ts.Style]; !ok { + err = fmt.Errorf("astisub: Style %s requested by subtitle between %s and %s doesn't exist", ts.Style, s.StartAt, s.EndAt) + return + } + s.Style = o.Styles[ts.Style] + } + + // Unmarshal items + var items = TTMLInItems{} + if err = xml.Unmarshal([]byte(""+ts.Items+""), &items); err != nil { + err = fmt.Errorf("astisub: unmarshaling items failed: %w", err) + return + } + + // Loop through texts + var l = &Line{} + for _, tt := range items { + // New line specified with the "br" tag + if strings.ToLower(tt.XMLName.Local) == "br" { + s.Lines = append(s.Lines, *l) + l = &Line{} + continue + } + + // New line decoded as a line break. This can happen if there's a "br" tag within the text since + // since the go xml unmarshaler will unmarshal a "br" tag as a line break if the field has the + // chardata xml tag. + for idx, li := range strings.Split(tt.Text, "\n") { + // New line + if idx > 0 { + s.Lines = append(s.Lines, *l) + l = &Line{} + } + + // Init line item + var t = LineItem{ + InlineStyle: tt.TTMLInStyleAttributes.styleAttributes(), + Text: strings.TrimSpace(li), + } + + // Add style + if len(tt.Style) > 0 { + if _, ok := o.Styles[tt.Style]; !ok { + err = fmt.Errorf("astisub: Style %s requested by item with text %s doesn't exist", tt.Style, tt.Text) + return + } + t.Style = o.Styles[tt.Style] + } + + // Append items + l.Items = append(l.Items, t) + } + + } + s.Lines = append(s.Lines, *l) + + // Append subtitle + o.Items = append(o.Items, s) + } + return +} + +// TTMLOut represents an output TTML that must be marshaled +// We split it from the input TTML as this time we'll add strict namespaces +type TTMLOut struct { + Lang string `xml:"xml:lang,attr,omitempty"` + Metadata *TTMLOutMetadata `xml:"head>metadata,omitempty"` + Styles []TTMLOutStyle `xml:"head>styling>style,omitempty"` //!\\ Order is important! Keep Styling above Layout + Regions []TTMLOutRegion `xml:"head>layout>region,omitempty"` + Subtitles []TTMLOutSubtitle `xml:"body>div>p,omitempty"` + XMLName xml.Name `xml:"http://www.w3.org/ns/ttml tt"` + XMLNamespaceTTM string `xml:"xmlns:ttm,attr"` + XMLNamespaceTTS string `xml:"xmlns:tts,attr"` +} + +// TTMLOutMetadata represents an output TTML Metadata +type TTMLOutMetadata struct { + Copyright string `xml:"ttm:copyright,omitempty"` + Title string `xml:"ttm:title,omitempty"` +} + +// TTMLOutStyleAttributes represents output TTML style attributes +type TTMLOutStyleAttributes struct { + BackgroundColor *string `xml:"tts:backgroundColor,attr,omitempty"` + Color *string `xml:"tts:color,attr,omitempty"` + Direction *string `xml:"tts:direction,attr,omitempty"` + Display *string `xml:"tts:display,attr,omitempty"` + DisplayAlign *string `xml:"tts:displayAlign,attr,omitempty"` + Extent *string `xml:"tts:extent,attr,omitempty"` + FontFamily *string `xml:"tts:fontFamily,attr,omitempty"` + FontSize *string `xml:"tts:fontSize,attr,omitempty"` + FontStyle *string `xml:"tts:fontStyle,attr,omitempty"` + FontWeight *string `xml:"tts:fontWeight,attr,omitempty"` + LineHeight *string `xml:"tts:lineHeight,attr,omitempty"` + Opacity *string `xml:"tts:opacity,attr,omitempty"` + Origin *string `xml:"tts:origin,attr,omitempty"` + Overflow *string `xml:"tts:overflow,attr,omitempty"` + Padding *string `xml:"tts:padding,attr,omitempty"` + ShowBackground *string `xml:"tts:showBackground,attr,omitempty"` + TextAlign *string `xml:"tts:textAlign,attr,omitempty"` + TextDecoration *string `xml:"tts:textDecoration,attr,omitempty"` + TextOutline *string `xml:"tts:textOutline,attr,omitempty"` + UnicodeBidi *string `xml:"tts:unicodeBidi,attr,omitempty"` + Visibility *string `xml:"tts:visibility,attr,omitempty"` + WrapOption *string `xml:"tts:wrapOption,attr,omitempty"` + WritingMode *string `xml:"tts:writingMode,attr,omitempty"` + ZIndex *int `xml:"tts:zIndex,attr,omitempty"` +} + +// ttmlOutStyleAttributesFromStyleAttributes converts StyleAttributes into a TTMLOutStyleAttributes +func ttmlOutStyleAttributesFromStyleAttributes(s *StyleAttributes) TTMLOutStyleAttributes { + if s == nil { + return TTMLOutStyleAttributes{} + } + return TTMLOutStyleAttributes{ + BackgroundColor: s.TTMLBackgroundColor, + Color: s.TTMLColor, + Direction: s.TTMLDirection, + Display: s.TTMLDisplay, + DisplayAlign: s.TTMLDisplayAlign, + Extent: s.TTMLExtent, + FontFamily: s.TTMLFontFamily, + FontSize: s.TTMLFontSize, + FontStyle: s.TTMLFontStyle, + FontWeight: s.TTMLFontWeight, + LineHeight: s.TTMLLineHeight, + Opacity: s.TTMLOpacity, + Origin: s.TTMLOrigin, + Overflow: s.TTMLOverflow, + Padding: s.TTMLPadding, + ShowBackground: s.TTMLShowBackground, + TextAlign: s.TTMLTextAlign, + TextDecoration: s.TTMLTextDecoration, + TextOutline: s.TTMLTextOutline, + UnicodeBidi: s.TTMLUnicodeBidi, + Visibility: s.TTMLVisibility, + WrapOption: s.TTMLWrapOption, + WritingMode: s.TTMLWritingMode, + ZIndex: s.TTMLZIndex, + } +} + +// TTMLOutHeader represents an output TTML header +type TTMLOutHeader struct { + ID string `xml:"xml:id,attr,omitempty"` + Style string `xml:"style,attr,omitempty"` + TTMLOutStyleAttributes +} + +// TTMLOutRegion represents an output TTML region +type TTMLOutRegion struct { + TTMLOutHeader + XMLName xml.Name `xml:"region"` +} + +// TTMLOutStyle represents an output TTML style +type TTMLOutStyle struct { + TTMLOutHeader + XMLName xml.Name `xml:"style"` +} + +// TTMLOutSubtitle represents an output TTML subtitle +type TTMLOutSubtitle struct { + Begin TTMLOutDuration `xml:"begin,attr"` + End TTMLOutDuration `xml:"end,attr"` + ID string `xml:"id,attr,omitempty"` + Items []TTMLOutItem + Region string `xml:"region,attr,omitempty"` + Style string `xml:"style,attr,omitempty"` + TTMLOutStyleAttributes +} + +// TTMLOutItem represents an output TTML Item +type TTMLOutItem struct { + Style string `xml:"style,attr,omitempty"` + Text string `xml:",chardata"` + TTMLOutStyleAttributes + XMLName xml.Name +} + +// TTMLOutDuration represents an output TTML duration +type TTMLOutDuration time.Duration + +// MarshalText implements the TextMarshaler interface +func (t TTMLOutDuration) MarshalText() ([]byte, error) { + return []byte(formatDuration(time.Duration(t), ".", 3)), nil +} + +// WriteToTTML writes subtitles in .ttml format +func (s Subtitles) WriteToTTML(o io.Writer) (err error) { + // Do not write anything if no subtitles + if len(s.Items) == 0 { + return ErrNoSubtitlesToWrite + } + + // Init TTML + var ttml = TTMLOut{ + XMLNamespaceTTM: "http://www.w3.org/ns/ttml#metadata", + XMLNamespaceTTS: "http://www.w3.org/ns/ttml#styling", + } + + // Add metadata + if s.Metadata != nil { + if v, ok := ttmlLanguageMapping.GetInverse(s.Metadata.Language); ok { + ttml.Lang = v.(string) + } + if len(s.Metadata.TTMLCopyright) > 0 || len(s.Metadata.Title) > 0 { + ttml.Metadata = &TTMLOutMetadata{ + Copyright: s.Metadata.TTMLCopyright, + Title: s.Metadata.Title, + } + } + } + + // Add regions + var k []string + for _, region := range s.Regions { + k = append(k, region.ID) + } + sort.Strings(k) + for _, id := range k { + var ttmlRegion = TTMLOutRegion{TTMLOutHeader: TTMLOutHeader{ + ID: s.Regions[id].ID, + TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(s.Regions[id].InlineStyle), + }} + if s.Regions[id].Style != nil { + ttmlRegion.Style = s.Regions[id].Style.ID + } + ttml.Regions = append(ttml.Regions, ttmlRegion) + } + + // Add styles + k = []string{} + for _, style := range s.Styles { + k = append(k, style.ID) + } + sort.Strings(k) + for _, id := range k { + var ttmlStyle = TTMLOutStyle{TTMLOutHeader: TTMLOutHeader{ + ID: s.Styles[id].ID, + TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(s.Styles[id].InlineStyle), + }} + if s.Styles[id].Style != nil { + ttmlStyle.Style = s.Styles[id].Style.ID + } + ttml.Styles = append(ttml.Styles, ttmlStyle) + } + + // Add items + for _, item := range s.Items { + // Init subtitle + var ttmlSubtitle = TTMLOutSubtitle{ + Begin: TTMLOutDuration(item.StartAt), + End: TTMLOutDuration(item.EndAt), + TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(item.InlineStyle), + } + + // Add region + if item.Region != nil { + ttmlSubtitle.Region = item.Region.ID + } + + // Add style + if item.Style != nil { + ttmlSubtitle.Style = item.Style.ID + } + + // Add lines + for _, line := range item.Lines { + // Loop through line items + for idx, lineItem := range line.Items { + // Init ttml item + var ttmlItem = TTMLOutItem{ + Text: lineItem.Text, + TTMLOutStyleAttributes: ttmlOutStyleAttributesFromStyleAttributes(lineItem.InlineStyle), + XMLName: xml.Name{Local: "span"}, + } + // condition to avoid adding space as the last character. + if idx < len(line.Items)-1 { + ttmlItem.Text = ttmlItem.Text + " " + } + + // Add style + if lineItem.Style != nil { + ttmlItem.Style = lineItem.Style.ID + } + + // Add ttml item + ttmlSubtitle.Items = append(ttmlSubtitle.Items, ttmlItem) + } + + // Add line break + ttmlSubtitle.Items = append(ttmlSubtitle.Items, TTMLOutItem{XMLName: xml.Name{Local: "br"}}) + } + + // Remove last line break + if len(ttmlSubtitle.Items) > 0 { + ttmlSubtitle.Items = ttmlSubtitle.Items[:len(ttmlSubtitle.Items)-1] + } + + // Append subtitle + ttml.Subtitles = append(ttml.Subtitles, ttmlSubtitle) + } + + // Marshal XML + var e = xml.NewEncoder(o) + e.Indent("", " ") + if err = e.Encode(ttml); err != nil { + err = fmt.Errorf("astisub: xml encoding failed: %w", err) + return + } + return +} diff --git a/vendor/github.com/asticode/go-astisub/webvtt.go b/vendor/github.com/asticode/go-astisub/webvtt.go new file mode 100644 index 000000000..c186d49f7 --- /dev/null +++ b/vendor/github.com/asticode/go-astisub/webvtt.go @@ -0,0 +1,537 @@ +package astisub + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "regexp" + "sort" + "strconv" + "strings" + "time" + + "golang.org/x/net/html" +) + +// https://www.w3.org/TR/webvtt1/ + +// Constants +const ( + webvttBlockNameComment = "comment" + webvttBlockNameRegion = "region" + webvttBlockNameStyle = "style" + webvttBlockNameText = "text" + webvttTimeBoundariesSeparator = " --> " + webvttTimestampMap = "X-TIMESTAMP-MAP" +) + +// Vars +var ( + bytesWebVTTItalicEndTag = []byte("") + bytesWebVTTItalicStartTag = []byte("") + bytesWebVTTTimeBoundariesSeparator = []byte(webvttTimeBoundariesSeparator) + webVTTRegexpStartTag = regexp.MustCompile(`()`) +) + +// parseDurationWebVTT parses a .vtt duration +func parseDurationWebVTT(i string) (time.Duration, error) { + return parseDuration(i, ".", 3) +} + +// https://tools.ietf.org/html/rfc8216#section-3.5 +// Eg., `X-TIMESTAMP-MAP=LOCAL:00:00:00.000,MPEGTS:900000` => 10s +// `X-TIMESTAMP-MAP=LOCAL:00:00:00.000,MPEGTS:180000` => 2s +func parseTimestampMapWebVTT(line string) (timeOffset time.Duration, err error) { + splits := strings.Split(line, "=") + if len(splits) <= 1 { + err = fmt.Errorf("astisub: invalid X-TIMESTAMP-MAP, no '=' found") + return + } + right := splits[1] + + var local time.Duration + var mpegts int64 + for _, split := range strings.Split(right, ",") { + splits := strings.SplitN(split, ":", 2) + if len(splits) <= 1 { + err = fmt.Errorf("astisub: invalid X-TIMESTAMP-MAP, part %q didn't contain ':'", right) + return + } + + switch strings.ToLower(strings.TrimSpace(splits[0])) { + case "local": + local, err = parseDurationWebVTT(splits[1]) + if err != nil { + err = fmt.Errorf("astisub: parsing webvtt duration failed: %w", err) + return + } + case "mpegts": + mpegts, err = strconv.ParseInt(splits[1], 10, 0) + if err != nil { + err = fmt.Errorf("astisub: parsing int %s failed: %w", splits[1], err) + return + } + } + } + + timeOffset = time.Duration(mpegts)*time.Second/90000 - local + return +} + +// ReadFromWebVTT parses a .vtt content +// TODO Tags (u, i, b) +// TODO Class +func ReadFromWebVTT(i io.Reader) (o *Subtitles, err error) { + // Init + o = NewSubtitles() + var scanner = bufio.NewScanner(i) + var line string + var lineNum int + + // Skip the header + for scanner.Scan() { + lineNum++ + line = scanner.Text() + line = strings.TrimPrefix(line, string(BytesBOM)) + if fs := strings.Fields(line); len(fs) > 0 && fs[0] == "WEBVTT" { + break + } + } + + // Scan + var item = &Item{} + var blockName string + var comments []string + var index int + var timeOffset time.Duration + + for scanner.Scan() { + // Fetch line + line = strings.TrimSpace(scanner.Text()) + lineNum++ + + switch { + // Comment + case strings.HasPrefix(line, "NOTE "): + blockName = webvttBlockNameComment + comments = append(comments, strings.TrimPrefix(line, "NOTE ")) + // Empty line + case len(line) == 0: + // Reset block name + blockName = "" + // Region + case strings.HasPrefix(line, "Region: "): + // Add region styles + var r = &Region{InlineStyle: &StyleAttributes{}} + for _, part := range strings.Split(strings.TrimPrefix(line, "Region: "), " ") { + // Split on "=" + var split = strings.Split(part, "=") + if len(split) <= 1 { + err = fmt.Errorf("astisub: line %d: Invalid region style %s", lineNum, part) + return + } + + // Switch on key + switch split[0] { + case "id": + r.ID = split[1] + case "lines": + if r.InlineStyle.WebVTTLines, err = strconv.Atoi(split[1]); err != nil { + err = fmt.Errorf("atoi of %s failed: %w", split[1], err) + return + } + case "regionanchor": + r.InlineStyle.WebVTTRegionAnchor = split[1] + case "scroll": + r.InlineStyle.WebVTTScroll = split[1] + case "viewportanchor": + r.InlineStyle.WebVTTViewportAnchor = split[1] + case "width": + r.InlineStyle.WebVTTWidth = split[1] + } + } + r.InlineStyle.propagateWebVTTAttributes() + + // Add region + o.Regions[r.ID] = r + // Style + case strings.HasPrefix(line, "STYLE"): + blockName = webvttBlockNameStyle + // Time boundaries + case strings.Contains(line, webvttTimeBoundariesSeparator): + // Set block name + blockName = webvttBlockNameText + + // Init new item + item = &Item{ + Comments: comments, + Index: index, + InlineStyle: &StyleAttributes{}, + } + + // Reset index + index = 0 + + // Split line on time boundaries + var left = strings.Split(line, webvttTimeBoundariesSeparator) + + // Split line on space to get remaining of time data + var right = strings.Split(left[1], " ") + + // Parse time boundaries + if item.StartAt, err = parseDurationWebVTT(left[0]); err != nil { + err = fmt.Errorf("astisub: line %d: parsing webvtt duration %s failed: %w", lineNum, left[0], err) + return + } + if item.EndAt, err = parseDurationWebVTT(right[0]); err != nil { + err = fmt.Errorf("astisub: line %d: parsing webvtt duration %s failed: %w", lineNum, right[0], err) + return + } + + // Parse style + if len(right) > 1 { + // Add styles + for index := 1; index < len(right); index++ { + // Empty + if right[index] == "" { + continue + } + + // Split line on ":" + var split = strings.Split(right[index], ":") + if len(split) <= 1 { + err = fmt.Errorf("astisub: line %d: Invalid inline style '%s'", lineNum, right[index]) + return + } + + // Switch on key + switch split[0] { + case "align": + item.InlineStyle.WebVTTAlign = split[1] + case "line": + item.InlineStyle.WebVTTLine = split[1] + case "position": + item.InlineStyle.WebVTTPosition = split[1] + case "region": + if _, ok := o.Regions[split[1]]; !ok { + err = fmt.Errorf("astisub: line %d: Unknown region %s", lineNum, split[1]) + return + } + item.Region = o.Regions[split[1]] + case "size": + item.InlineStyle.WebVTTSize = split[1] + case "vertical": + item.InlineStyle.WebVTTVertical = split[1] + } + } + } + item.InlineStyle.propagateWebVTTAttributes() + + // Reset comments + comments = []string{} + + // Append item + o.Items = append(o.Items, item) + + case strings.HasPrefix(line, webvttTimestampMap): + if len(item.Lines) > 0 { + err = errors.New("astisub: found timestamp map after processing subtitle items") + return + } + + timeOffset, err = parseTimestampMapWebVTT(line) + if err != nil { + err = fmt.Errorf("astisub: parsing webvtt timestamp map failed: %w", err) + return + } + + // Text + default: + // Switch on block name + switch blockName { + case webvttBlockNameComment: + comments = append(comments, line) + case webvttBlockNameStyle: + // TODO Do something with the style + case webvttBlockNameText: + // Parse line + if l := parseTextWebVTT(line); len(l.Items) > 0 { + item.Lines = append(item.Lines, l) + } + default: + // This is the ID + index, _ = strconv.Atoi(line) + } + } + } + + if timeOffset > 0 { + o.Add(timeOffset) + } + return +} + +// parseTextWebVTT parses the input line to fill the Line +func parseTextWebVTT(i string) (o Line) { + // Create tokenizer + tr := html.NewTokenizer(strings.NewReader(i)) + + // Loop + italic := false + for { + // Get next tag + t := tr.Next() + + // Process error + if err := tr.Err(); err != nil { + break + } + + switch t { + case html.EndTagToken: + // Parse italic + if bytes.Equal(tr.Raw(), bytesWebVTTItalicEndTag) { + italic = false + continue + } + case html.StartTagToken: + // Parse voice name + if matches := webVTTRegexpStartTag.FindStringSubmatch(string(tr.Raw())); len(matches) > 3 { + if s := strings.TrimSpace(matches[3]); s != "" { + o.VoiceName = s + } + continue + } + + // Parse italic + if bytes.Equal(tr.Raw(), bytesWebVTTItalicStartTag) { + italic = true + continue + } + case html.TextToken: + if s := strings.TrimSpace(string(tr.Raw())); s != "" { + // Get style attribute + var sa *StyleAttributes + if italic { + sa = &StyleAttributes{ + WebVTTItalics: italic, + } + sa.propagateWebVTTAttributes() + } + + // Append item + o.Items = append(o.Items, LineItem{ + InlineStyle: sa, + Text: s, + }) + } + } + } + return +} + +// formatDurationWebVTT formats a .vtt duration +func formatDurationWebVTT(i time.Duration) string { + return formatDuration(i, ".", 3) +} + +// WriteToWebVTT writes subtitles in .vtt format +func (s Subtitles) WriteToWebVTT(o io.Writer) (err error) { + // Do not write anything if no subtitles + if len(s.Items) == 0 { + err = ErrNoSubtitlesToWrite + return + } + + // Add header + var c []byte + c = append(c, []byte("WEBVTT\n\n")...) + + // Add regions + var k []string + for _, region := range s.Regions { + k = append(k, region.ID) + } + sort.Strings(k) + for _, id := range k { + c = append(c, []byte("Region: id="+s.Regions[id].ID)...) + if s.Regions[id].InlineStyle.WebVTTLines != 0 { + c = append(c, bytesSpace...) + c = append(c, []byte("lines="+strconv.Itoa(s.Regions[id].InlineStyle.WebVTTLines))...) + } else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTLines != 0 { + c = append(c, bytesSpace...) + c = append(c, []byte("lines="+strconv.Itoa(s.Regions[id].Style.InlineStyle.WebVTTLines))...) + } + if s.Regions[id].InlineStyle.WebVTTRegionAnchor != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("regionanchor="+s.Regions[id].InlineStyle.WebVTTRegionAnchor)...) + } else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTRegionAnchor != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("regionanchor="+s.Regions[id].Style.InlineStyle.WebVTTRegionAnchor)...) + } + if s.Regions[id].InlineStyle.WebVTTScroll != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("scroll="+s.Regions[id].InlineStyle.WebVTTScroll)...) + } else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTScroll != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("scroll="+s.Regions[id].Style.InlineStyle.WebVTTScroll)...) + } + if s.Regions[id].InlineStyle.WebVTTViewportAnchor != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("viewportanchor="+s.Regions[id].InlineStyle.WebVTTViewportAnchor)...) + } else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTViewportAnchor != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("viewportanchor="+s.Regions[id].Style.InlineStyle.WebVTTViewportAnchor)...) + } + if s.Regions[id].InlineStyle.WebVTTWidth != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("width="+s.Regions[id].InlineStyle.WebVTTWidth)...) + } else if s.Regions[id].Style != nil && s.Regions[id].Style.InlineStyle != nil && s.Regions[id].Style.InlineStyle.WebVTTWidth != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("width="+s.Regions[id].Style.InlineStyle.WebVTTWidth)...) + } + c = append(c, bytesLineSeparator...) + } + if len(s.Regions) > 0 { + c = append(c, bytesLineSeparator...) + } + + // Loop through subtitles + for index, item := range s.Items { + // Add comments + if len(item.Comments) > 0 { + c = append(c, []byte("NOTE ")...) + for _, comment := range item.Comments { + c = append(c, []byte(comment)...) + c = append(c, bytesLineSeparator...) + } + c = append(c, bytesLineSeparator...) + } + + // Add time boundaries + c = append(c, []byte(strconv.Itoa(index+1))...) + c = append(c, bytesLineSeparator...) + c = append(c, []byte(formatDurationWebVTT(item.StartAt))...) + c = append(c, bytesWebVTTTimeBoundariesSeparator...) + c = append(c, []byte(formatDurationWebVTT(item.EndAt))...) + + // Add styles + if item.InlineStyle != nil { + if item.InlineStyle.WebVTTAlign != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("align:"+item.InlineStyle.WebVTTAlign)...) + } else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTAlign != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("align:"+item.Style.InlineStyle.WebVTTAlign)...) + } + if item.InlineStyle.WebVTTLine != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("line:"+item.InlineStyle.WebVTTLine)...) + } else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTLine != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("line:"+item.Style.InlineStyle.WebVTTLine)...) + } + if item.InlineStyle.WebVTTPosition != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("position:"+item.InlineStyle.WebVTTPosition)...) + } else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTPosition != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("position:"+item.Style.InlineStyle.WebVTTPosition)...) + } + if item.Region != nil { + c = append(c, bytesSpace...) + c = append(c, []byte("region:"+item.Region.ID)...) + } + if item.InlineStyle.WebVTTSize != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("size:"+item.InlineStyle.WebVTTSize)...) + } else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTSize != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("size:"+item.Style.InlineStyle.WebVTTSize)...) + } + if item.InlineStyle.WebVTTVertical != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("vertical:"+item.InlineStyle.WebVTTVertical)...) + } else if item.Style != nil && item.Style.InlineStyle != nil && item.Style.InlineStyle.WebVTTVertical != "" { + c = append(c, bytesSpace...) + c = append(c, []byte("vertical:"+item.Style.InlineStyle.WebVTTVertical)...) + } + } + + // Add new line + c = append(c, bytesLineSeparator...) + + // Loop through lines + for _, l := range item.Lines { + c = append(c, l.webVTTBytes()...) + } + + // Add new line + c = append(c, bytesLineSeparator...) + } + + // Remove last new line + c = c[:len(c)-1] + + // Write + if _, err = o.Write(c); err != nil { + err = fmt.Errorf("astisub: writing failed: %w", err) + return + } + return +} + +func (l Line) webVTTBytes() (c []byte) { + if l.VoiceName != "" { + c = append(c, []byte("")...) + } + for idx, li := range l.Items { + c = append(c, li.webVTTBytes()...) + // condition to avoid adding space as the last character. + if idx < len(l.Items)-1 { + c = append(c, []byte(" ")...) + } + } + c = append(c, bytesLineSeparator...) + return +} + +func (li LineItem) webVTTBytes() (c []byte) { + // Get color + var color string + if li.InlineStyle != nil && li.InlineStyle.TTMLColor != nil { + color = cssColor(*li.InlineStyle.TTMLColor) + } + + // Get italics + i := li.InlineStyle != nil && li.InlineStyle.WebVTTItalics + + // Append + if color != "" { + c = append(c, []byte("")...) + } + if i { + c = append(c, []byte("")...) + } + c = append(c, []byte(li.Text)...) + if i { + c = append(c, []byte("")...) + } + if color != "" { + c = append(c, []byte("")...) + } + return +} + +func cssColor(rgb string) string { + colors := map[string]string{ + "#00ffff": "cyan", // narrator, thought + "#ffff00": "yellow", // out of vision + "#ff0000": "red", // noises + "#ff00ff": "magenta", // song + "#00ff00": "lime", // foreign speak + } + return colors[strings.ToLower(rgb)] // returning the empty string is ok +} diff --git a/vendor/github.com/asticode/go-astits/.gitignore b/vendor/github.com/asticode/go-astits/.gitignore new file mode 100644 index 000000000..5be2b41d1 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/.gitignore @@ -0,0 +1,5 @@ +.DS_Store +Thumbs.db +.idea/ +cover* +test diff --git a/vendor/github.com/asticode/go-astits/.travis.yml b/vendor/github.com/asticode/go-astits/.travis.yml new file mode 100644 index 000000000..93968bfd5 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/.travis.yml @@ -0,0 +1,14 @@ +language: go +go: + - 1.x + - tip +install: + - go get -t ./... + - go get golang.org/x/tools/cmd/cover + - go get github.com/mattn/goveralls +matrix: + allow_failures: + - go: tip +script: + - go test -race -v -coverprofile=coverage.out + - $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci \ No newline at end of file diff --git a/vendor/github.com/asticode/go-astits/LICENSE b/vendor/github.com/asticode/go-astits/LICENSE new file mode 100644 index 000000000..d9954a4c7 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Quentin Renard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/asticode/go-astits/README.md b/vendor/github.com/asticode/go-astits/README.md new file mode 100644 index 000000000..e5756c6cd --- /dev/null +++ b/vendor/github.com/asticode/go-astits/README.md @@ -0,0 +1,193 @@ +[![GoReportCard](http://goreportcard.com/badge/github.com/asticode/go-astits)](http://goreportcard.com/report/github.com/asticode/go-astits) +[![GoDoc](https://godoc.org/github.com/asticode/go-astits?status.svg)](https://godoc.org/github.com/asticode/go-astits) +[![Travis](https://travis-ci.org/asticode/go-astits.svg?branch=master)](https://travis-ci.org/asticode/go-astits#) +[![Coveralls](https://coveralls.io/repos/github/asticode/go-astits/badge.svg?branch=master)](https://coveralls.io/github/asticode/go-astits) + +This is a Golang library to natively demux and mux MPEG Transport Streams (ts) in GO. + +WARNING: this library is not yet production ready. Use at your own risks! + +# Installation + +To install the library use the following: + + go get -u github.com/asticode/go-astits/... + +# Before looking at the code... + +The transport stream is made of packets.
+Each packet has a header, an optional adaptation field and a payload.
+Several payloads can be appended and parsed as a data. + +``` + TRANSPORT STREAM + +--------------------------------------------------------------------------------------------------+ + | | + + PACKET PACKET + +----------------------------------------------+----------------------------------------------+---- + | | | + + +--------+---------------------------+---------+--------+---------------------------+---------+ + | HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | HEADER | OPTIONAL ADAPTATION FIELD | PAYLOAD | ... + +--------+---------------------------+---------+--------+---------------------------+---------+ + + | | | | + +---------+ +---------+ + | | + +----------------------------------------------+ + DATA +``` + +# Using the library in your code + +WARNING: the code below doesn't handle errors for readability purposes. However you SHOULD! + +## Demux + +```go +// Create a cancellable context in case you want to stop reading packets/data any time you want +ctx, cancel := context.WithCancel(context.Background()) + +// Handle SIGTERM signal +ch := make(chan os.Signal, 1) +signal.Notify(ch, syscall.SIGTERM) +go func() { + <-ch + cancel() +}() + +// Open your file or initialize any kind of io.Reader +// Buffering using bufio.Reader is recommended for performance +f, _ := os.Open("/path/to/file.ts") +defer f.Close() + +// Create the demuxer +dmx := astits.NewDemuxer(ctx, f) +for { + // Get the next data + d, _ := dmx.NextData() + + // Data is a PMT data + if d.PMT != nil { + // Loop through elementary streams + for _, es := range d.PMT.ElementaryStreams { + fmt.Printf("Stream detected: %d\n", es.ElementaryPID) + } + return + } +} +``` + +## Mux + +```go +// Create a cancellable context in case you want to stop writing packets/data any time you want +ctx, cancel := context.WithCancel(context.Background()) + +// Handle SIGTERM signal +ch := make(chan os.Signal, 1) +signal.Notify(ch, syscall.SIGTERM) +go func() { + <-ch + cancel() +}() + +// Create your file or initialize any kind of io.Writer +// Buffering using bufio.Writer is recommended for performance +f, _ := os.Create("/path/to/file.ts") +defer f.Close() + +// Create the muxer +mx := astits.NewMuxer(ctx, f) + +// Add an elementary stream +mx.AddElementaryStream(astits.PMTElementaryStream{ + ElementaryPID: 1, + StreamType: astits.StreamTypeMetadata, +}) + +// Write tables +// Using that function is not mandatory, WriteData will retransmit tables from time to time +mx.WriteTables() + +// Write data +mx.WriteData(&astits.MuxerData{ + PES: &astits.PESData{ + Data: []byte("test"), + }, + PID: 1, +}) +``` + +## Options + +In order to pass options to the demuxer or the muxer, look for the methods prefixed with `DemuxerOpt` or `MuxerOpt` and add them upon calling `NewDemuxer` or `NewMuxer` : + +```go +// This is your custom packets parser +p := func(ps []*astits.Packet) (ds []*astits.Data, skip bool, err error) { + // This is your logic + skip = true + return +} + +// Now you can create a demuxer with the proper options +dmx := NewDemuxer(ctx, f, DemuxerOptPacketSize(192), DemuxerOptPacketsParser(p)) +``` + +# CLI + +This library provides 2 CLIs that will automatically get installed in `GOPATH/bin` on `go get` execution. + +## astits-probe + +### List streams + + $ astits-probe -i -f + +### List packets + + $ astits-probe packets -i + +### List data + + $ astits-probe data -i -d + +## astits-es-split + +### Split streams into separate .ts files + + $ astits-es-split -o + +# Features and roadmap + +- [x] Add demuxer +- [x] Add muxer +- [x] Demux PES packets +- [x] Mux PES packets +- [x] Demux PAT packets +- [x] Mux PAT packets +- [x] Demux PMT packets +- [x] Mux PMT packets +- [x] Demux EIT packets +- [ ] Mux EIT packets +- [x] Demux NIT packets +- [ ] Mux NIT packets +- [x] Demux SDT packets +- [ ] Mux SDT packets +- [x] Demux TOT packets +- [ ] Mux TOT packets +- [ ] Demux BAT packets +- [ ] Mux BAT packets +- [ ] Demux DIT packets +- [ ] Mux DIT packets +- [ ] Demux RST packets +- [ ] Mux RST packets +- [ ] Demux SIT packets +- [ ] Mux SIT packets +- [ ] Mux ST packets +- [ ] Demux TDT packets +- [ ] Mux TDT packets +- [ ] Demux TSDT packets +- [ ] Mux TSDT packets diff --git a/vendor/github.com/asticode/go-astits/clock_reference.go b/vendor/github.com/asticode/go-astits/clock_reference.go new file mode 100644 index 000000000..73c147c6f --- /dev/null +++ b/vendor/github.com/asticode/go-astits/clock_reference.go @@ -0,0 +1,29 @@ +package astits + +import ( + "time" +) + +// ClockReference represents a clock reference +// Base is based on a 90 kHz clock and extension is based on a 27 MHz clock +type ClockReference struct { + Base, Extension int64 +} + +// newClockReference builds a new clock reference +func newClockReference(base, extension int64) *ClockReference { + return &ClockReference{ + Base: base, + Extension: extension, + } +} + +// Duration converts the clock reference into duration +func (p ClockReference) Duration() time.Duration { + return time.Duration(p.Base*1e9/90000) + time.Duration(p.Extension*1e9/27000000) +} + +// Time converts the clock reference into time +func (p ClockReference) Time() time.Time { + return time.Unix(0, p.Duration().Nanoseconds()) +} diff --git a/vendor/github.com/asticode/go-astits/crc32.go b/vendor/github.com/asticode/go-astits/crc32.go new file mode 100644 index 000000000..5a3f601bc --- /dev/null +++ b/vendor/github.com/asticode/go-astits/crc32.go @@ -0,0 +1,25 @@ +package astits + +const ( + crc32Polynomial = uint32(0xffffffff) +) + +// computeCRC32 computes a CRC32 +// https://stackoverflow.com/questions/35034042/how-to-calculate-crc32-in-psi-si-packet +func computeCRC32(bs []byte) uint32 { + return updateCRC32(crc32Polynomial, bs) +} + +func updateCRC32(crc32 uint32, bs []byte) uint32 { + for _, b := range bs { + for i := 0; i < 8; i++ { + if (crc32 >= uint32(0x80000000)) != (b >= uint8(0x80)) { + crc32 = (crc32 << 1) ^ 0x04C11DB7 + } else { + crc32 = crc32 << 1 + } + b <<= 1 + } + } + return crc32 +} diff --git a/vendor/github.com/asticode/go-astits/data.go b/vendor/github.com/asticode/go-astits/data.go new file mode 100644 index 000000000..f06bdab31 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data.go @@ -0,0 +1,117 @@ +package astits + +import ( + "fmt" + + "github.com/asticode/go-astikit" +) + +// PIDs +const ( + PIDPAT uint16 = 0x0 // Program Association Table (PAT) contains a directory listing of all Program Map Tables. + PIDCAT uint16 = 0x1 // Conditional Access Table (CAT) contains a directory listing of all ITU-T Rec. H.222 entitlement management message streams used by Program Map Tables. + PIDTSDT uint16 = 0x2 // Transport Stream Description Table (TSDT) contains descriptors related to the overall transport stream + PIDNull uint16 = 0x1fff // Null Packet (used for fixed bandwidth padding) +) + +// DemuxerData represents a data parsed by Demuxer +type DemuxerData struct { + EIT *EITData + FirstPacket *Packet + NIT *NITData + PAT *PATData + PES *PESData + PID uint16 + PMT *PMTData + SDT *SDTData + TOT *TOTData +} + +// MuxerData represents a data to be written by Muxer +type MuxerData struct { + PID uint16 + AdaptationField *PacketAdaptationField + PES *PESData +} + +// parseData parses a payload spanning over multiple packets and returns a set of data +func parseData(ps []*Packet, prs PacketsParser, pm programMap) (ds []*DemuxerData, err error) { + // Use custom parser first + if prs != nil { + var skip bool + if ds, skip, err = prs(ps); err != nil { + err = fmt.Errorf("astits: custom packets parsing failed: %w", err) + return + } else if skip { + return + } + } + + // Get payload length + var l int + for _, p := range ps { + l += len(p.Payload) + } + + // Append payload + var payload = make([]byte, l) + var c int + for _, p := range ps { + c += copy(payload[c:], p.Payload) + } + + // Create reader + i := astikit.NewBytesIterator(payload) + + // Parse PID + pid := ps[0].Header.PID + + // Parse payload + if pid == PIDCAT { + // Information in a CAT payload is private and dependent on the CA system. Use the PacketsParser + // to parse this type of payload + } else if isPSIPayload(pid, pm) { + // Parse PSI data + var psiData *PSIData + if psiData, err = parsePSIData(i); err != nil { + err = fmt.Errorf("astits: parsing PSI data failed: %w", err) + return + } + + // Append data + ds = psiData.toData(ps[0], pid) + } else if isPESPayload(payload) { + // Parse PES data + var pesData *PESData + if pesData, err = parsePESData(i); err != nil { + err = fmt.Errorf("astits: parsing PES data failed: %w", err) + return + } + + // Append data + ds = append(ds, &DemuxerData{ + FirstPacket: ps[0], + PES: pesData, + PID: pid, + }) + } + return +} + +// isPSIPayload checks whether the payload is a PSI one +func isPSIPayload(pid uint16, pm programMap) bool { + return pid == PIDPAT || // PAT + pm.exists(pid) || // PMT + ((pid >= 0x10 && pid <= 0x14) || (pid >= 0x1e && pid <= 0x1f)) //DVB +} + +// isPESPayload checks whether the payload is a PES one +func isPESPayload(i []byte) bool { + // Packet is not big enough + if len(i) < 3 { + return false + } + + // Check prefix + return uint32(i[0])<<16|uint32(i[1])<<8|uint32(i[2]) == 1 +} diff --git a/vendor/github.com/asticode/go-astits/data_eit.go b/vendor/github.com/asticode/go-astits/data_eit.go new file mode 100644 index 000000000..4dce53a2a --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_eit.go @@ -0,0 +1,124 @@ +package astits + +import ( + "fmt" + "time" + + "github.com/asticode/go-astikit" +) + +// EITData represents an EIT data +// Page: 36 | Chapter: 5.2.4 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf +// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf +type EITData struct { + Events []*EITDataEvent + LastTableID uint8 + OriginalNetworkID uint16 + SegmentLastSectionNumber uint8 + ServiceID uint16 + TransportStreamID uint16 +} + +// EITDataEvent represents an EIT data event +type EITDataEvent struct { + Descriptors []*Descriptor + Duration time.Duration + EventID uint16 + HasFreeCSAMode bool // When true indicates that access to one or more streams may be controlled by a CA system. + RunningStatus uint8 + StartTime time.Time +} + +// parseEITSection parses an EIT section +func parseEITSection(i *astikit.BytesIterator, offsetSectionsEnd int, tableIDExtension uint16) (d *EITData, err error) { + // Create data + d = &EITData{ServiceID: tableIDExtension} + + // Get next 2 bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Transport stream ID + d.TransportStreamID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Get next 2 bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Original network ID + d.OriginalNetworkID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Segment last section number + d.SegmentLastSectionNumber = uint8(b) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Last table ID + d.LastTableID = uint8(b) + + // Loop until end of section data is reached + for i.Offset() < offsetSectionsEnd { + // Get next 2 bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Event ID + var e = &EITDataEvent{} + e.EventID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Start time + if e.StartTime, err = parseDVBTime(i); err != nil { + err = fmt.Errorf("astits: parsing DVB time") + return + } + + // Duration + if e.Duration, err = parseDVBDurationSeconds(i); err != nil { + err = fmt.Errorf("astits: parsing DVB duration seconds failed: %w", err) + return + } + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Running status + e.RunningStatus = uint8(b) >> 5 + + // Free CA mode + e.HasFreeCSAMode = uint8(b&0x10) > 0 + + // We need to rewind since the current byte is used by the descriptor as well + i.Skip(-1) + + // Descriptors + if e.Descriptors, err = parseDescriptors(i); err != nil { + err = fmt.Errorf("astits: parsing descriptors failed: %w", err) + return + } + + // Add event + d.Events = append(d.Events, e) + } + return +} diff --git a/vendor/github.com/asticode/go-astits/data_nit.go b/vendor/github.com/asticode/go-astits/data_nit.go new file mode 100644 index 000000000..5191b5d20 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_nit.go @@ -0,0 +1,80 @@ +package astits + +import ( + "fmt" + + "github.com/asticode/go-astikit" +) + +// NITData represents a NIT data +// Page: 29 | Chapter: 5.2.1 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf +// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf +type NITData struct { + NetworkDescriptors []*Descriptor + NetworkID uint16 + TransportStreams []*NITDataTransportStream +} + +// NITDataTransportStream represents a NIT data transport stream +type NITDataTransportStream struct { + OriginalNetworkID uint16 + TransportDescriptors []*Descriptor + TransportStreamID uint16 +} + +// parseNITSection parses a NIT section +func parseNITSection(i *astikit.BytesIterator, tableIDExtension uint16) (d *NITData, err error) { + // Create data + d = &NITData{NetworkID: tableIDExtension} + + // Network descriptors + if d.NetworkDescriptors, err = parseDescriptors(i); err != nil { + err = fmt.Errorf("astits: parsing descriptors failed: %w", err) + return + } + + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Transport stream loop length + transportStreamLoopLength := int(uint16(bs[0]&0xf)<<8 | uint16(bs[1])) + + // Transport stream loop + offsetEnd := i.Offset() + transportStreamLoopLength + for i.Offset() < offsetEnd { + // Create transport stream + ts := &NITDataTransportStream{} + + // Get next bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Transport stream ID + ts.TransportStreamID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Get next bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Original network ID + ts.OriginalNetworkID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Transport descriptors + if ts.TransportDescriptors, err = parseDescriptors(i); err != nil { + err = fmt.Errorf("astits: parsing descriptors failed: %w", err) + return + } + + // Append transport stream + d.TransportStreams = append(d.TransportStreams, ts) + } + return +} diff --git a/vendor/github.com/asticode/go-astits/data_pat.go b/vendor/github.com/asticode/go-astits/data_pat.go new file mode 100644 index 000000000..3b5b55511 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_pat.go @@ -0,0 +1,63 @@ +package astits + +import ( + "fmt" + + "github.com/asticode/go-astikit" +) + +const ( + patSectionEntryBytesSize = 4 // 16 bits + 3 reserved + 13 bits = 32 bits +) + +// PATData represents a PAT data +// https://en.wikipedia.org/wiki/Program-specific_information +type PATData struct { + Programs []*PATProgram + TransportStreamID uint16 +} + +// PATProgram represents a PAT program +type PATProgram struct { + ProgramMapID uint16 // The packet identifier that contains the associated PMT + ProgramNumber uint16 // Relates to the Table ID extension in the associated PMT. A value of 0 is reserved for a NIT packet identifier. +} + +// parsePATSection parses a PAT section +func parsePATSection(i *astikit.BytesIterator, offsetSectionsEnd int, tableIDExtension uint16) (d *PATData, err error) { + // Create data + d = &PATData{TransportStreamID: tableIDExtension} + + // Loop until end of section data is reached + for i.Offset() < offsetSectionsEnd { + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(4); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Append program + d.Programs = append(d.Programs, &PATProgram{ + ProgramMapID: uint16(bs[2]&0x1f)<<8 | uint16(bs[3]), + ProgramNumber: uint16(bs[0])<<8 | uint16(bs[1]), + }) + } + return +} + +func calcPATSectionLength(d *PATData) uint16 { + return uint16(4 * len(d.Programs)) +} + +func writePATSection(w *astikit.BitsWriter, d *PATData) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + for _, p := range d.Programs { + b.Write(p.ProgramNumber) + b.WriteN(uint8(0xff), 3) + b.WriteN(p.ProgramMapID, 13) + } + + return len(d.Programs) * patSectionEntryBytesSize, b.Err() +} diff --git a/vendor/github.com/asticode/go-astits/data_pes.go b/vendor/github.com/asticode/go-astits/data_pes.go new file mode 100644 index 000000000..9b3493f4b --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_pes.go @@ -0,0 +1,747 @@ +package astits + +import ( + "fmt" + + "github.com/asticode/go-astikit" +) + +// P-STD buffer scales +const ( + PSTDBufferScale128Bytes = 0 + PSTDBufferScale1024Bytes = 1 +) + +// PTS DTS indicator +const ( + PTSDTSIndicatorBothPresent = 3 + PTSDTSIndicatorIsForbidden = 1 + PTSDTSIndicatorNoPTSOrDTS = 0 + PTSDTSIndicatorOnlyPTS = 2 +) + +// Stream IDs +const ( + StreamIDPrivateStream1 = 189 + StreamIDPaddingStream = 190 + StreamIDPrivateStream2 = 191 +) + +// Trick mode controls +const ( + TrickModeControlFastForward = 0 + TrickModeControlFastReverse = 3 + TrickModeControlFreezeFrame = 2 + TrickModeControlSlowMotion = 1 + TrickModeControlSlowReverse = 4 +) + +const ( + pesHeaderLength = 6 + ptsOrDTSByteLength = 5 + escrLength = 6 + dsmTrickModeLength = 1 +) + +// PESData represents a PES data +// https://en.wikipedia.org/wiki/Packetized_elementary_stream +// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html +// http://happy.emu.id.au/lab/tut/dttb/dtbtut4b.htm +type PESData struct { + Data []byte + Header *PESHeader +} + +// PESHeader represents a packet PES header +type PESHeader struct { + OptionalHeader *PESOptionalHeader + PacketLength uint16 // Specifies the number of bytes remaining in the packet after this field. Can be zero. If the PES packet length is set to zero, the PES packet can be of any length. A value of zero for the PES packet length can be used only when the PES packet payload is a video elementary stream. + StreamID uint8 // Examples: Audio streams (0xC0-0xDF), Video streams (0xE0-0xEF) +} + +// PESOptionalHeader represents a PES optional header +type PESOptionalHeader struct { + AdditionalCopyInfo uint8 + CRC uint16 + DataAlignmentIndicator bool // True indicates that the PES packet header is immediately followed by the video start code or audio syncword + DSMTrickMode *DSMTrickMode + DTS *ClockReference + ESCR *ClockReference + ESRate uint32 + Extension2Data []byte + Extension2Length uint8 + HasAdditionalCopyInfo bool + HasCRC bool + HasDSMTrickMode bool + HasESCR bool + HasESRate bool + HasExtension bool + HasExtension2 bool + HasOptionalFields bool + HasPackHeaderField bool + HasPrivateData bool + HasProgramPacketSequenceCounter bool + HasPSTDBuffer bool + HeaderLength uint8 + IsCopyrighted bool + IsOriginal bool + MarkerBits uint8 + MPEG1OrMPEG2ID uint8 + OriginalStuffingLength uint8 + PacketSequenceCounter uint8 + PackField uint8 + Priority bool + PrivateData []byte + PSTDBufferScale uint8 + PSTDBufferSize uint16 + PTS *ClockReference + PTSDTSIndicator uint8 + ScramblingControl uint8 +} + +// DSMTrickMode represents a DSM trick mode +// https://books.google.fr/books?id=vwUrAwAAQBAJ&pg=PT501&lpg=PT501&dq=dsm+trick+mode+control&source=bl&ots=fI-9IHXMRL&sig=PWnhxrsoMWNQcl1rMCPmJGNO9Ds&hl=fr&sa=X&ved=0ahUKEwjogafD8bjXAhVQ3KQKHeHKD5oQ6AEINDAB#v=onepage&q=dsm%20trick%20mode%20control&f=false +type DSMTrickMode struct { + FieldID uint8 + FrequencyTruncation uint8 + IntraSliceRefresh uint8 + RepeatControl uint8 + TrickModeControl uint8 +} + +func (h *PESHeader) IsVideoStream() bool { + return h.StreamID == 0xe0 || + h.StreamID == 0xfd +} + +// parsePESData parses a PES data +func parsePESData(i *astikit.BytesIterator) (d *PESData, err error) { + // Create data + d = &PESData{} + + // Skip first 3 bytes that are there to identify the PES payload + i.Seek(3) + + // Parse header + var dataStart, dataEnd int + if d.Header, dataStart, dataEnd, err = parsePESHeader(i); err != nil { + err = fmt.Errorf("astits: parsing PES header failed: %w", err) + return + } + + // Seek to data + i.Seek(dataStart) + + // Extract data + if d.Data, err = i.NextBytes(dataEnd - dataStart); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + return +} + +// hasPESOptionalHeader checks whether the data has a PES optional header +func hasPESOptionalHeader(streamID uint8) bool { + return streamID != StreamIDPaddingStream && streamID != StreamIDPrivateStream2 +} + +// parsePESData parses a PES header +func parsePESHeader(i *astikit.BytesIterator) (h *PESHeader, dataStart, dataEnd int, err error) { + // Create header + h = &PESHeader{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Stream ID + h.StreamID = uint8(b) + + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Length + h.PacketLength = uint16(bs[0])<<8 | uint16(bs[1]) + + // Update data end + if h.PacketLength > 0 { + dataEnd = i.Offset() + int(h.PacketLength) + } else { + dataEnd = i.Len() + } + + // Optional header + if hasPESOptionalHeader(h.StreamID) { + if h.OptionalHeader, dataStart, err = parsePESOptionalHeader(i); err != nil { + err = fmt.Errorf("astits: parsing PES optional header failed: %w", err) + return + } + } else { + dataStart = i.Offset() + } + return +} + +// parsePESOptionalHeader parses a PES optional header +func parsePESOptionalHeader(i *astikit.BytesIterator) (h *PESOptionalHeader, dataStart int, err error) { + // Create header + h = &PESOptionalHeader{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Marker bits + h.MarkerBits = uint8(b) >> 6 + + // Scrambling control + h.ScramblingControl = uint8(b) >> 4 & 0x3 + + // Priority + h.Priority = uint8(b)&0x8 > 0 + + // Data alignment indicator + h.DataAlignmentIndicator = uint8(b)&0x4 > 0 + + // Copyrighted + h.IsCopyrighted = uint(b)&0x2 > 0 + + // Original or copy + h.IsOriginal = uint8(b)&0x1 > 0 + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // PTS DST indicator + h.PTSDTSIndicator = uint8(b) >> 6 & 0x3 + + // Flags + h.HasESCR = uint8(b)&0x20 > 0 + h.HasESRate = uint8(b)&0x10 > 0 + h.HasDSMTrickMode = uint8(b)&0x8 > 0 + h.HasAdditionalCopyInfo = uint8(b)&0x4 > 0 + h.HasCRC = uint8(b)&0x2 > 0 + h.HasExtension = uint8(b)&0x1 > 0 + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Header length + h.HeaderLength = uint8(b) + + // Update data start + dataStart = i.Offset() + int(h.HeaderLength) + + // PTS/DTS + if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS { + if h.PTS, err = parsePTSOrDTS(i); err != nil { + err = fmt.Errorf("astits: parsing PTS failed: %w", err) + return + } + } else if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent { + if h.PTS, err = parsePTSOrDTS(i); err != nil { + err = fmt.Errorf("astits: parsing PTS failed: %w", err) + return + } + if h.DTS, err = parsePTSOrDTS(i); err != nil { + err = fmt.Errorf("astits: parsing PTS failed: %w", err) + return + } + } + + // ESCR + if h.HasESCR { + if h.ESCR, err = parseESCR(i); err != nil { + err = fmt.Errorf("astits: parsing ESCR failed: %w", err) + return + } + } + + // ES rate + if h.HasESRate { + var bs []byte + if bs, err = i.NextBytesNoCopy(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + h.ESRate = uint32(bs[0])&0x7f<<15 | uint32(bs[1])<<7 | uint32(bs[2])>>1 + } + + // Trick mode + if h.HasDSMTrickMode { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + h.DSMTrickMode = parseDSMTrickMode(b) + } + + // Additional copy info + if h.HasAdditionalCopyInfo { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + h.AdditionalCopyInfo = b & 0x7f + } + + // CRC + if h.HasCRC { + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + h.CRC = uint16(bs[0])>>8 | uint16(bs[1]) + } + + // Extension + if h.HasExtension { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Flags + h.HasPrivateData = b&0x80 > 0 + h.HasPackHeaderField = b&0x40 > 0 + h.HasProgramPacketSequenceCounter = b&0x20 > 0 + h.HasPSTDBuffer = b&0x10 > 0 + h.HasExtension2 = b&0x1 > 0 + + // Private data + if h.HasPrivateData { + if h.PrivateData, err = i.NextBytes(16); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + + // Pack field length + if h.HasPackHeaderField { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + // TODO it's only a length of pack_header, should read it all. now it's wrong + h.PackField = uint8(b) + } + + // Program packet sequence counter + if h.HasProgramPacketSequenceCounter { + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + h.PacketSequenceCounter = uint8(bs[0]) & 0x7f + h.MPEG1OrMPEG2ID = uint8(bs[1]) >> 6 & 0x1 + h.OriginalStuffingLength = uint8(bs[1]) & 0x3f + } + + // P-STD buffer + if h.HasPSTDBuffer { + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + h.PSTDBufferScale = bs[0] >> 5 & 0x1 + h.PSTDBufferSize = uint16(bs[0])&0x1f<<8 | uint16(bs[1]) + } + + // Extension 2 + if h.HasExtension2 { + // Length + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + h.Extension2Length = uint8(b) & 0x7f + + // Data + if h.Extension2Data, err = i.NextBytes(int(h.Extension2Length)); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + } + return +} + +// parseDSMTrickMode parses a DSM trick mode +func parseDSMTrickMode(i byte) (m *DSMTrickMode) { + m = &DSMTrickMode{} + m.TrickModeControl = i >> 5 + if m.TrickModeControl == TrickModeControlFastForward || m.TrickModeControl == TrickModeControlFastReverse { + m.FieldID = i >> 3 & 0x3 + m.IntraSliceRefresh = i >> 2 & 0x1 + m.FrequencyTruncation = i & 0x3 + } else if m.TrickModeControl == TrickModeControlFreezeFrame { + m.FieldID = i >> 3 & 0x3 + } else if m.TrickModeControl == TrickModeControlSlowMotion || m.TrickModeControl == TrickModeControlSlowReverse { + m.RepeatControl = i & 0x1f + } + return +} + +// parsePTSOrDTS parses a PTS or a DTS +func parsePTSOrDTS(i *astikit.BytesIterator) (cr *ClockReference, err error) { + var bs []byte + if bs, err = i.NextBytesNoCopy(5); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + cr = newClockReference(int64(uint64(bs[0])>>1&0x7<<30|uint64(bs[1])<<22|uint64(bs[2])>>1&0x7f<<15|uint64(bs[3])<<7|uint64(bs[4])>>1&0x7f), 0) + return +} + +// parseESCR parses an ESCR +func parseESCR(i *astikit.BytesIterator) (cr *ClockReference, err error) { + var bs []byte + if bs, err = i.NextBytesNoCopy(6); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + escr := uint64(bs[0])>>3&0x7<<39 | uint64(bs[0])&0x3<<37 | uint64(bs[1])<<29 | uint64(bs[2])>>3<<24 | uint64(bs[2])&0x3<<22 | uint64(bs[3])<<14 | uint64(bs[4])>>3<<9 | uint64(bs[4])&0x3<<7 | uint64(bs[5])>>1 + cr = newClockReference(int64(escr>>9), int64(escr&0x1ff)) + return +} + +// will count how many total bytes and payload bytes will be written when writePESData is called with the same arguments +// should be used by the caller of writePESData to determine AF stuffing size needed to be applied +// since the length of video PES packets are often zero, we can't just stuff it with 0xff-s at the end +func calcPESDataLength(h *PESHeader, payloadLeft []byte, isPayloadStart bool, bytesAvailable int) (totalBytes, payloadBytes int) { + totalBytes += pesHeaderLength + if isPayloadStart { + totalBytes += int(calcPESOptionalHeaderLength(h.OptionalHeader)) + } + bytesAvailable -= totalBytes + + if len(payloadLeft) < bytesAvailable { + payloadBytes = len(payloadLeft) + } else { + payloadBytes = bytesAvailable + } + + return +} + +// first packet will contain PES header with optional PES header and payload, if possible +// all consequential packets will contain just payload +// for the last packet caller must add AF with stuffing, see calcPESDataLength +func writePESData(w *astikit.BitsWriter, h *PESHeader, payloadLeft []byte, isPayloadStart bool, bytesAvailable int) (totalBytesWritten, payloadBytesWritten int, err error) { + if isPayloadStart { + var n int + n, err = writePESHeader(w, h, len(payloadLeft)) + if err != nil { + return + } + totalBytesWritten += n + } + + payloadBytesWritten = bytesAvailable - totalBytesWritten + if payloadBytesWritten > len(payloadLeft) { + payloadBytesWritten = len(payloadLeft) + } + + err = w.Write(payloadLeft[:payloadBytesWritten]) + if err != nil { + return + } + + totalBytesWritten += payloadBytesWritten + return +} + +func writePESHeader(w *astikit.BitsWriter, h *PESHeader, payloadSize int) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(uint32(0x000001), 24) // packet_start_code_prefix + b.Write(h.StreamID) + + pesPacketLength := 0 + + if !h.IsVideoStream() { + pesPacketLength = payloadSize + if hasPESOptionalHeader(h.StreamID) { + pesPacketLength += int(calcPESOptionalHeaderLength(h.OptionalHeader)) + } + if pesPacketLength > 0xffff { + pesPacketLength = 0 + } + } + + b.Write(uint16(pesPacketLength)) + + bytesWritten := pesHeaderLength + + if hasPESOptionalHeader(h.StreamID) { + n, err := writePESOptionalHeader(w, h.OptionalHeader) + if err != nil { + return 0, err + } + bytesWritten += n + } + + return bytesWritten, b.Err() +} + +func calcPESOptionalHeaderLength(h *PESOptionalHeader) uint8 { + if h == nil { + return 0 + } + return 3 + calcPESOptionalHeaderDataLength(h) +} + +func calcPESOptionalHeaderDataLength(h *PESOptionalHeader) (length uint8) { + if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS { + length += ptsOrDTSByteLength + } else if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent { + length += 2 * ptsOrDTSByteLength + } + + if h.HasESCR { + length += escrLength + } + + if h.HasESRate { + length += 3 + } + + if h.HasDSMTrickMode { + length += dsmTrickModeLength + } + + if h.HasAdditionalCopyInfo { + length++ + } + + if h.HasCRC { + //length += 4 // TODO + } + + if h.HasExtension { + length++ + + if h.HasPrivateData { + length += 16 + } + + if h.HasPackHeaderField { + // TODO + } + + if h.HasProgramPacketSequenceCounter { + length += 2 + } + + if h.HasPSTDBuffer { + length += 2 + } + + if h.HasExtension2 { + length += 1 + uint8(len(h.Extension2Data)) + } + } + + return +} + +func writePESOptionalHeader(w *astikit.BitsWriter, h *PESOptionalHeader) (int, error) { + if h == nil { + return 0, nil + } + + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(uint8(0b10), 2) // marker bits + b.WriteN(h.ScramblingControl, 2) + b.Write(h.Priority) + b.Write(h.DataAlignmentIndicator) + b.Write(h.IsCopyrighted) + b.Write(h.IsOriginal) + + b.WriteN(h.PTSDTSIndicator, 2) + b.Write(h.HasESCR) + b.Write(h.HasESRate) + b.Write(h.HasDSMTrickMode) + b.Write(h.HasAdditionalCopyInfo) + b.Write(false) // CRC of previous PES packet. not supported yet + //b.Write(h.HasCRC) + b.Write(h.HasExtension) + + pesOptionalHeaderDataLength := calcPESOptionalHeaderDataLength(h) + b.Write(pesOptionalHeaderDataLength) + + bytesWritten := 3 + + if h.PTSDTSIndicator == PTSDTSIndicatorOnlyPTS { + n, err := writePTSOrDTS(w, 0b0010, h.PTS) + if err != nil { + return 0, err + } + bytesWritten += n + } + + if h.PTSDTSIndicator == PTSDTSIndicatorBothPresent { + n, err := writePTSOrDTS(w, 0b0011, h.PTS) + if err != nil { + return 0, err + } + bytesWritten += n + + n, err = writePTSOrDTS(w, 0b0001, h.DTS) + if err != nil { + return 0, err + } + bytesWritten += n + } + + if h.HasESCR { + n, err := writeESCR(w, h.ESCR) + if err != nil { + return 0, err + } + bytesWritten += n + } + + if h.HasESRate { + b.Write(true) + b.WriteN(h.ESRate, 22) + b.Write(true) + bytesWritten += 3 + } + + if h.HasDSMTrickMode { + n, err := writeDSMTrickMode(w, h.DSMTrickMode) + if err != nil { + return 0, err + } + bytesWritten += n + } + + if h.HasAdditionalCopyInfo { + b.Write(true) // marker_bit + b.WriteN(h.AdditionalCopyInfo, 7) + bytesWritten++ + } + + if h.HasCRC { + // TODO, not supported + } + + if h.HasExtension { + // exp 10110001 + // act 10111111 + b.Write(h.HasPrivateData) + b.Write(false) // TODO pack_header_field_flag, not implemented + //b.Write(h.HasPackHeaderField) + b.Write(h.HasProgramPacketSequenceCounter) + b.Write(h.HasPSTDBuffer) + b.WriteN(uint8(0xff), 3) // reserved + b.Write(h.HasExtension2) + bytesWritten++ + + if h.HasPrivateData { + b.WriteBytesN(h.PrivateData, 16, 0) + bytesWritten += 16 + } + + if h.HasPackHeaderField { + // TODO (see parsePESOptionalHeader) + } + + if h.HasProgramPacketSequenceCounter { + b.Write(true) // marker_bit + b.WriteN(h.PacketSequenceCounter, 7) + b.Write(true) // marker_bit + b.WriteN(h.MPEG1OrMPEG2ID, 1) + b.WriteN(h.OriginalStuffingLength, 6) + bytesWritten += 2 + } + + if h.HasPSTDBuffer { + b.WriteN(uint8(0b01), 2) + b.WriteN(h.PSTDBufferScale, 1) + b.WriteN(h.PSTDBufferSize, 13) + bytesWritten += 2 + } + + if h.HasExtension2 { + b.Write(true) // marker_bit + b.WriteN(uint8(len(h.Extension2Data)), 7) + b.Write(h.Extension2Data) + bytesWritten += 1 + len(h.Extension2Data) + } + } + + return bytesWritten, b.Err() +} + +func writeDSMTrickMode(w *astikit.BitsWriter, m *DSMTrickMode) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(m.TrickModeControl, 3) + if m.TrickModeControl == TrickModeControlFastForward || m.TrickModeControl == TrickModeControlFastReverse { + b.WriteN(m.FieldID, 2) + b.Write(m.IntraSliceRefresh == 1) // it should be boolean + b.WriteN(m.FrequencyTruncation, 2) + } else if m.TrickModeControl == TrickModeControlFreezeFrame { + b.WriteN(m.FieldID, 2) + b.WriteN(uint8(0xff), 3) // reserved + } else if m.TrickModeControl == TrickModeControlSlowMotion || m.TrickModeControl == TrickModeControlSlowReverse { + b.WriteN(m.RepeatControl, 5) + } else { + b.WriteN(uint8(0xff), 5) // reserved + } + + return dsmTrickModeLength, b.Err() +} + +func writeESCR(w *astikit.BitsWriter, cr *ClockReference) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(uint8(0xff), 2) + b.WriteN(uint64(cr.Base>>30), 3) + b.Write(true) + b.WriteN(uint64(cr.Base>>15), 15) + b.Write(true) + b.WriteN(uint64(cr.Base), 15) + b.Write(true) + b.WriteN(uint64(cr.Extension), 9) + b.Write(true) + + return escrLength, b.Err() +} + +func writePTSOrDTS(w *astikit.BitsWriter, flag uint8, cr *ClockReference) (bytesWritten int, retErr error) { + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(flag, 4) + b.WriteN(uint64(cr.Base>>30), 3) + b.Write(true) + b.WriteN(uint64(cr.Base>>15), 15) + b.Write(true) + b.WriteN(uint64(cr.Base), 15) + b.Write(true) + + return ptsOrDTSByteLength, b.Err() +} diff --git a/vendor/github.com/asticode/go-astits/data_pmt.go b/vendor/github.com/asticode/go-astits/data_pmt.go new file mode 100644 index 000000000..d1ea54fa3 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_pmt.go @@ -0,0 +1,256 @@ +package astits + +import ( + "fmt" + + "github.com/asticode/go-astikit" +) + +type StreamType uint8 + +// Stream types +const ( + StreamTypeMPEG1Video StreamType = 0x01 + StreamTypeMPEG2Video StreamType = 0x02 + StreamTypeMPEG1Audio StreamType = 0x03 // ISO/IEC 11172-3 + StreamTypeMPEG2HalvedSampleRateAudio StreamType = 0x04 // ISO/IEC 13818-3 + StreamTypeMPEG2Audio StreamType = 0x04 + StreamTypePrivateSection StreamType = 0x05 + StreamTypePrivateData StreamType = 0x06 + StreamTypeMPEG2PacketizedData StreamType = 0x06 // Rec. ITU-T H.222 | ISO/IEC 13818-1 i.e., DVB subtitles/VBI and AC-3 + StreamTypeADTS StreamType = 0x0F // ISO/IEC 13818-7 Audio with ADTS transport syntax + StreamTypeAACAudio StreamType = 0x0f + StreamTypeMPEG4Video StreamType = 0x10 + StreamTypeAACLATMAudio StreamType = 0x11 + StreamTypeMetadata StreamType = 0x15 + StreamTypeH264Video StreamType = 0x1B // Rec. ITU-T H.264 | ISO/IEC 14496-10 + StreamTypeH265Video StreamType = 0x24 // Rec. ITU-T H.265 | ISO/IEC 23008-2 + StreamTypeHEVCVideo StreamType = 0x24 + StreamTypeCAVSVideo StreamType = 0x42 + StreamTypeVC1Video StreamType = 0xea + StreamTypeDIRACVideo StreamType = 0xd1 + StreamTypeAC3Audio StreamType = 0x81 + StreamTypeDTSAudio StreamType = 0x82 + StreamTypeTRUEHDAudio StreamType = 0x83 + StreamTypeEAC3Audio StreamType = 0x87 +) + +// PMTData represents a PMT data +// https://en.wikipedia.org/wiki/Program-specific_information +type PMTData struct { + ElementaryStreams []*PMTElementaryStream + PCRPID uint16 // The packet identifier that contains the program clock reference used to improve the random access accuracy of the stream's timing that is derived from the program timestamp. If this is unused. then it is set to 0x1FFF (all bits on). + ProgramDescriptors []*Descriptor // Program descriptors + ProgramNumber uint16 +} + +// PMTElementaryStream represents a PMT elementary stream +type PMTElementaryStream struct { + ElementaryPID uint16 // The packet identifier that contains the stream type data. + ElementaryStreamDescriptors []*Descriptor // Elementary stream descriptors + StreamType StreamType // This defines the structure of the data contained within the elementary packet identifier. +} + +// parsePMTSection parses a PMT section +func parsePMTSection(i *astikit.BytesIterator, offsetSectionsEnd int, tableIDExtension uint16) (d *PMTData, err error) { + // Create data + d = &PMTData{ProgramNumber: tableIDExtension} + + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // PCR PID + d.PCRPID = uint16(bs[0]&0x1f)<<8 | uint16(bs[1]) + + // Program descriptors + if d.ProgramDescriptors, err = parseDescriptors(i); err != nil { + err = fmt.Errorf("astits: parsing descriptors failed: %w", err) + return + } + + // Loop until end of section data is reached + for i.Offset() < offsetSectionsEnd { + // Create stream + e := &PMTElementaryStream{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Stream type + e.StreamType = StreamType(b) + + // Get next bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Elementary PID + e.ElementaryPID = uint16(bs[0]&0x1f)<<8 | uint16(bs[1]) + + // Elementary descriptors + if e.ElementaryStreamDescriptors, err = parseDescriptors(i); err != nil { + err = fmt.Errorf("astits: parsing descriptors failed: %w", err) + return + } + + // Add elementary stream + d.ElementaryStreams = append(d.ElementaryStreams, e) + } + return +} + +func calcPMTProgramInfoLength(d *PMTData) uint16 { + ret := uint16(2) // program_info_length + ret += calcDescriptorsLength(d.ProgramDescriptors) + + for _, es := range d.ElementaryStreams { + ret += 5 // stream_type, elementary_pid, es_info_length + ret += calcDescriptorsLength(es.ElementaryStreamDescriptors) + } + + return ret +} + +func calcPMTSectionLength(d *PMTData) uint16 { + ret := uint16(4) + ret += calcDescriptorsLength(d.ProgramDescriptors) + + for _, es := range d.ElementaryStreams { + ret += 5 + ret += calcDescriptorsLength(es.ElementaryStreamDescriptors) + } + + return ret +} + +func writePMTSection(w *astikit.BitsWriter, d *PMTData) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + // TODO split into sections + + b.WriteN(uint8(0xff), 3) + b.WriteN(d.PCRPID, 13) + bytesWritten := 2 + + n, err := writeDescriptorsWithLength(w, d.ProgramDescriptors) + if err != nil { + return 0, err + } + bytesWritten += n + + for _, es := range d.ElementaryStreams { + b.Write(uint8(es.StreamType)) + b.WriteN(uint8(0xff), 3) + b.WriteN(es.ElementaryPID, 13) + bytesWritten += 3 + + n, err = writeDescriptorsWithLength(w, es.ElementaryStreamDescriptors) + if err != nil { + return 0, err + } + bytesWritten += n + } + + return bytesWritten, b.Err() +} + +func (t StreamType) IsVideo() bool { + switch t { + case StreamTypeMPEG1Video, + StreamTypeMPEG2Video, + StreamTypeMPEG4Video, + StreamTypeH264Video, + StreamTypeH265Video, + StreamTypeCAVSVideo, + StreamTypeVC1Video, + StreamTypeDIRACVideo: + return true + } + return false +} + +func (t StreamType) IsAudio() bool { + switch t { + case StreamTypeMPEG1Audio, + StreamTypeMPEG2Audio, + StreamTypeAACAudio, + StreamTypeAACLATMAudio, + StreamTypeAC3Audio, + StreamTypeDTSAudio, + StreamTypeTRUEHDAudio, + StreamTypeEAC3Audio: + return true + } + return false +} + +func (t StreamType) String() string { + switch t { + case StreamTypeMPEG1Video: + return "MPEG1 Video" + case StreamTypeMPEG2Video: + return "MPEG2 Video" + case StreamTypeMPEG1Audio: + return "MPEG1 Audio" + case StreamTypeMPEG2Audio: + return "MPEG2 Audio" + case StreamTypePrivateSection: + return "Private Section" + case StreamTypePrivateData: + return "Private Data" + case StreamTypeAACAudio: + return "AAC Audio" + case StreamTypeMPEG4Video: + return "MPEG4 Video" + case StreamTypeAACLATMAudio: + return "AAC LATM Audio" + case StreamTypeMetadata: + return "Metadata" + case StreamTypeH264Video: + return "H264 Video" + case StreamTypeH265Video: + return "H265 Video" + case StreamTypeCAVSVideo: + return "CAVS Video" + case StreamTypeVC1Video: + return "VC1 Video" + case StreamTypeDIRACVideo: + return "DIRAC Video" + case StreamTypeAC3Audio: + return "AC3 Audio" + case StreamTypeDTSAudio: + return "DTS Audio" + case StreamTypeTRUEHDAudio: + return "TRUEHD Audio" + case StreamTypeEAC3Audio: + return "EAC3 Audio" + } + return "Unknown" +} + +func (t StreamType) ToPESStreamID() uint8 { + switch t { + case StreamTypeMPEG1Video, StreamTypeMPEG2Video, StreamTypeMPEG4Video, StreamTypeH264Video, + StreamTypeH265Video, StreamTypeCAVSVideo, StreamTypeVC1Video: + return 0xe0 + case StreamTypeDIRACVideo: + return 0xfd + case StreamTypeMPEG2Audio, StreamTypeAACAudio, StreamTypeAACLATMAudio: + return 0xc0 + case StreamTypeAC3Audio, StreamTypeEAC3Audio: // m2ts_mode??? + return 0xfd + case StreamTypePrivateSection, StreamTypePrivateData, StreamTypeMetadata: + return 0xfc + default: + return 0xbd + } +} diff --git a/vendor/github.com/asticode/go-astits/data_psi.go b/vendor/github.com/asticode/go-astits/data_psi.go new file mode 100644 index 000000000..e0c3e7806 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_psi.go @@ -0,0 +1,608 @@ +package astits + +import ( + "fmt" + + "github.com/asticode/go-astikit" +) + +// PSI table IDs +const ( + PSITableTypeBAT = "BAT" + PSITableTypeDIT = "DIT" + PSITableTypeEIT = "EIT" + PSITableTypeNIT = "NIT" + PSITableTypeNull = "Null" + PSITableTypePAT = "PAT" + PSITableTypePMT = "PMT" + PSITableTypeRST = "RST" + PSITableTypeSDT = "SDT" + PSITableTypeSIT = "SIT" + PSITableTypeST = "ST" + PSITableTypeTDT = "TDT" + PSITableTypeTOT = "TOT" + PSITableTypeUnknown = "Unknown" +) + +type PSITableID uint16 + +const ( + PSITableIDPAT PSITableID = 0x00 + PSITableIDPMT PSITableID = 0x02 + PSITableIDBAT PSITableID = 0x4a + PSITableIDDIT PSITableID = 0x7e + PSITableIDRST PSITableID = 0x71 + PSITableIDSIT PSITableID = 0x7f + PSITableIDST PSITableID = 0x72 + PSITableIDTDT PSITableID = 0x70 + PSITableIDTOT PSITableID = 0x73 + PSITableIDNull PSITableID = 0xff + + PSITableIDEITStart PSITableID = 0x4e + PSITableIDEITEnd PSITableID = 0x6f + PSITableIDSDTVariant1 PSITableID = 0x42 + PSITableIDSDTVariant2 PSITableID = 0x46 + PSITableIDNITVariant1 PSITableID = 0x40 + PSITableIDNITVariant2 PSITableID = 0x41 +) + +// PSIData represents a PSI data +// https://en.wikipedia.org/wiki/Program-specific_information +type PSIData struct { + PointerField int // Present at the start of the TS packet payload signaled by the payload_unit_start_indicator bit in the TS header. Used to set packet alignment bytes or content before the start of tabled payload data. + Sections []*PSISection +} + +// PSISection represents a PSI section +type PSISection struct { + CRC32 uint32 // A checksum of the entire table excluding the pointer field, pointer filler bytes and the trailing CRC32. + Header *PSISectionHeader + Syntax *PSISectionSyntax +} + +// PSISectionHeader represents a PSI section header +type PSISectionHeader struct { + PrivateBit bool // The PAT, PMT, and CAT all set this to 0. Other tables set this to 1. + SectionLength uint16 // The number of bytes that follow for the syntax section (with CRC value) and/or table data. These bytes must not exceed a value of 1021. + SectionSyntaxIndicator bool // A flag that indicates if the syntax section follows the section length. The PAT, PMT, and CAT all set this to 1. + TableID PSITableID // Table Identifier, that defines the structure of the syntax section and other contained data. As an exception, if this is the byte that immediately follow previous table section and is set to 0xFF, then it indicates that the repeat of table section end here and the rest of TS data payload shall be stuffed with 0xFF. Consequently the value 0xFF shall not be used for the Table Identifier. + TableType string +} + +// PSISectionSyntax represents a PSI section syntax +type PSISectionSyntax struct { + Data *PSISectionSyntaxData + Header *PSISectionSyntaxHeader +} + +// PSISectionSyntaxHeader represents a PSI section syntax header +type PSISectionSyntaxHeader struct { + CurrentNextIndicator bool // Indicates if data is current in effect or is for future use. If the bit is flagged on, then the data is to be used at the present moment. + LastSectionNumber uint8 // This indicates which table is the last table in the sequence of tables. + SectionNumber uint8 // This is an index indicating which table this is in a related sequence of tables. The first table starts from 0. + TableIDExtension uint16 // Informational only identifier. The PAT uses this for the transport stream identifier and the PMT uses this for the Program number. + VersionNumber uint8 // Syntax version number. Incremented when data is changed and wrapped around on overflow for values greater than 32. +} + +// PSISectionSyntaxData represents a PSI section syntax data +type PSISectionSyntaxData struct { + EIT *EITData + NIT *NITData + PAT *PATData + PMT *PMTData + SDT *SDTData + TOT *TOTData +} + +// parsePSIData parses a PSI data +func parsePSIData(i *astikit.BytesIterator) (d *PSIData, err error) { + // Init data + d = &PSIData{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Pointer field + d.PointerField = int(b) + + // Pointer filler bytes + i.Skip(d.PointerField) + + // Parse sections + var s *PSISection + var stop bool + for i.HasBytesLeft() && !stop { + if s, stop, err = parsePSISection(i); err != nil { + err = fmt.Errorf("astits: parsing PSI table failed: %w", err) + return + } + d.Sections = append(d.Sections, s) + } + return +} + +// parsePSISection parses a PSI section +func parsePSISection(i *astikit.BytesIterator) (s *PSISection, stop bool, err error) { + // Init section + s = &PSISection{} + + // Parse header + var offsetStart, offsetSectionsEnd, offsetEnd int + if s.Header, offsetStart, _, offsetSectionsEnd, offsetEnd, err = parsePSISectionHeader(i); err != nil { + err = fmt.Errorf("astits: parsing PSI section header failed: %w", err) + return + } + + // Check whether we need to stop the parsing + if shouldStopPSIParsing(s.Header.TableID) { + stop = true + return + } + + // Check whether there's a syntax section + if s.Header.SectionLength > 0 { + // Parse syntax + if s.Syntax, err = parsePSISectionSyntax(i, s.Header, offsetSectionsEnd); err != nil { + err = fmt.Errorf("astits: parsing PSI section syntax failed: %w", err) + return + } + + // Process CRC32 + if s.Header.TableID.hasCRC32() { + // Seek to the end of the sections + i.Seek(offsetSectionsEnd) + + // Parse CRC32 + if s.CRC32, err = parseCRC32(i); err != nil { + err = fmt.Errorf("astits: parsing CRC32 failed: %w", err) + return + } + + // Get CRC32 data + i.Seek(offsetStart) + var crc32Data []byte + if crc32Data, err = i.NextBytesNoCopy(offsetSectionsEnd - offsetStart); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Compute CRC32 + crc32 := computeCRC32(crc32Data) + + // Check CRC32 + if crc32 != s.CRC32 { + err = fmt.Errorf("astits: Table CRC32 %x != computed CRC32 %x", s.CRC32, crc32) + return + } + } + } + + // Seek to the end of the section + i.Seek(offsetEnd) + return +} + +// parseCRC32 parses a CRC32 +func parseCRC32(i *astikit.BytesIterator) (c uint32, err error) { + var bs []byte + if bs, err = i.NextBytesNoCopy(4); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + c = uint32(bs[0])<<24 | uint32(bs[1])<<16 | uint32(bs[2])<<8 | uint32(bs[3]) + return +} + +// shouldStopPSIParsing checks whether the PSI parsing should be stopped +func shouldStopPSIParsing(tableID PSITableID) bool { + return tableID == PSITableIDNull || + tableID.isUnknown() +} + +// parsePSISectionHeader parses a PSI section header +func parsePSISectionHeader(i *astikit.BytesIterator) (h *PSISectionHeader, offsetStart, offsetSectionsStart, offsetSectionsEnd, offsetEnd int, err error) { + // Init + h = &PSISectionHeader{} + offsetStart = i.Offset() + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Table ID + h.TableID = PSITableID(b) + + // Table type + h.TableType = h.TableID.Type() + + // Check whether we need to stop the parsing + if shouldStopPSIParsing(h.TableID) { + return + } + + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Section syntax indicator + h.SectionSyntaxIndicator = bs[0]&0x80 > 0 + + // Private bit + h.PrivateBit = bs[0]&0x40 > 0 + + // Section length + h.SectionLength = uint16(bs[0]&0xf)<<8 | uint16(bs[1]) + + // Offsets + offsetSectionsStart = i.Offset() + offsetEnd = offsetSectionsStart + int(h.SectionLength) + offsetSectionsEnd = offsetEnd + if h.TableID.hasCRC32() { + offsetSectionsEnd -= 4 + } + return +} + +// PSITableID.Type() returns the psi table type based on the table id +// Page: 28 | https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf +// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf +func (t PSITableID) Type() string { + switch { + case t == PSITableIDBAT: + return PSITableTypeBAT + case t >= PSITableIDEITStart && t <= PSITableIDEITEnd: + return PSITableTypeEIT + case t == PSITableIDDIT: + return PSITableTypeDIT + case t == PSITableIDNITVariant1, t == PSITableIDNITVariant2: + return PSITableTypeNIT + case t == PSITableIDNull: + return PSITableTypeNull + case t == PSITableIDPAT: + return PSITableTypePAT + case t == PSITableIDPMT: + return PSITableTypePMT + case t == PSITableIDRST: + return PSITableTypeRST + case t == PSITableIDSDTVariant1, t == PSITableIDSDTVariant2: + return PSITableTypeSDT + case t == PSITableIDSIT: + return PSITableTypeSIT + case t == PSITableIDST: + return PSITableTypeST + case t == PSITableIDTDT: + return PSITableTypeTDT + case t == PSITableIDTOT: + return PSITableTypeTOT + default: + return PSITableTypeUnknown + } +} + +// hasPSISyntaxHeader checks whether the section has a syntax header +func (t PSITableID) hasPSISyntaxHeader() bool { + return t == PSITableIDPAT || + t == PSITableIDPMT || + t == PSITableIDNITVariant1 || t == PSITableIDNITVariant2 || + t == PSITableIDSDTVariant1 || t == PSITableIDSDTVariant2 || + (t >= PSITableIDEITStart && t <= PSITableIDEITEnd) +} + +// hasCRC32 checks whether the table has a CRC32 +func (t PSITableID) hasCRC32() bool { + return t == PSITableIDPAT || + t == PSITableIDPMT || + t == PSITableIDTOT || + t == PSITableIDNITVariant1 || t == PSITableIDNITVariant2 || + t == PSITableIDSDTVariant1 || t == PSITableIDSDTVariant2 || + (t >= PSITableIDEITStart && t <= PSITableIDEITEnd) +} + +func (t PSITableID) isUnknown() bool { + switch t { + case PSITableIDBAT, + PSITableIDDIT, + PSITableIDNITVariant1, PSITableIDNITVariant2, + PSITableIDNull, + PSITableIDPAT, + PSITableIDPMT, + PSITableIDRST, + PSITableIDSDTVariant1, PSITableIDSDTVariant2, + PSITableIDSIT, + PSITableIDST, + PSITableIDTDT, + PSITableIDTOT: + return false + } + if t >= PSITableIDEITStart && t <= PSITableIDEITEnd { + return false + } + return true +} + +// parsePSISectionSyntax parses a PSI section syntax +func parsePSISectionSyntax(i *astikit.BytesIterator, h *PSISectionHeader, offsetSectionsEnd int) (s *PSISectionSyntax, err error) { + // Init + s = &PSISectionSyntax{} + + // Header + if h.TableID.hasPSISyntaxHeader() { + if s.Header, err = parsePSISectionSyntaxHeader(i); err != nil { + err = fmt.Errorf("astits: parsing PSI section syntax header failed: %w", err) + return + } + } + + // Parse data + if s.Data, err = parsePSISectionSyntaxData(i, h, s.Header, offsetSectionsEnd); err != nil { + err = fmt.Errorf("astits: parsing PSI section syntax data failed: %w", err) + return + } + return +} + +// parsePSISectionSyntaxHeader parses a PSI section syntax header +func parsePSISectionSyntaxHeader(i *astikit.BytesIterator) (h *PSISectionSyntaxHeader, err error) { + // Init + h = &PSISectionSyntaxHeader{} + + // Get next 2 bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Table ID extension + h.TableIDExtension = uint16(bs[0])<<8 | uint16(bs[1]) + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Version number + h.VersionNumber = uint8(b&0x3f) >> 1 + + // Current/Next indicator + h.CurrentNextIndicator = b&0x1 > 0 + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Section number + h.SectionNumber = uint8(b) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Last section number + h.LastSectionNumber = uint8(b) + return +} + +// parsePSISectionSyntaxData parses a PSI section data +func parsePSISectionSyntaxData(i *astikit.BytesIterator, h *PSISectionHeader, sh *PSISectionSyntaxHeader, offsetSectionsEnd int) (d *PSISectionSyntaxData, err error) { + // Init + d = &PSISectionSyntaxData{} + + // Switch on table type + switch h.TableID { + case PSITableIDBAT: + // TODO Parse BAT + case PSITableIDDIT: + // TODO Parse DIT + case PSITableIDNITVariant1, PSITableIDNITVariant2: + if d.NIT, err = parseNITSection(i, sh.TableIDExtension); err != nil { + err = fmt.Errorf("astits: parsing NIT section failed: %w", err) + return + } + case PSITableIDPAT: + if d.PAT, err = parsePATSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil { + err = fmt.Errorf("astits: parsing PAT section failed: %w", err) + return + } + case PSITableIDPMT: + if d.PMT, err = parsePMTSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil { + err = fmt.Errorf("astits: parsing PMT section failed: %w", err) + return + } + case PSITableIDRST: + // TODO Parse RST + case PSITableIDSDTVariant1, PSITableIDSDTVariant2: + if d.SDT, err = parseSDTSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil { + err = fmt.Errorf("astits: parsing PMT section failed: %w", err) + return + } + case PSITableIDSIT: + // TODO Parse SIT + case PSITableIDST: + // TODO Parse ST + case PSITableIDTOT: + if d.TOT, err = parseTOTSection(i); err != nil { + err = fmt.Errorf("astits: parsing TOT section failed: %w", err) + return + } + case PSITableIDTDT: + // TODO Parse TDT + } + + if h.TableID >= PSITableIDEITStart && h.TableID <= PSITableIDEITEnd { + if d.EIT, err = parseEITSection(i, offsetSectionsEnd, sh.TableIDExtension); err != nil { + err = fmt.Errorf("astits: parsing EIT section failed: %w", err) + return + } + } + + return +} + +// toData parses the PSI tables and returns a set of DemuxerData +func (d *PSIData) toData(firstPacket *Packet, pid uint16) (ds []*DemuxerData) { + // Loop through sections + for _, s := range d.Sections { + // Switch on table type + switch s.Header.TableID { + case PSITableIDNITVariant1, PSITableIDNITVariant2: + ds = append(ds, &DemuxerData{FirstPacket: firstPacket, NIT: s.Syntax.Data.NIT, PID: pid}) + case PSITableIDPAT: + ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PAT: s.Syntax.Data.PAT, PID: pid}) + case PSITableIDPMT: + ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PID: pid, PMT: s.Syntax.Data.PMT}) + case PSITableIDSDTVariant1, PSITableIDSDTVariant2: + ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PID: pid, SDT: s.Syntax.Data.SDT}) + case PSITableIDTOT: + ds = append(ds, &DemuxerData{FirstPacket: firstPacket, PID: pid, TOT: s.Syntax.Data.TOT}) + } + if s.Header.TableID >= PSITableIDEITStart && s.Header.TableID <= PSITableIDEITEnd { + ds = append(ds, &DemuxerData{EIT: s.Syntax.Data.EIT, FirstPacket: firstPacket, PID: pid}) + } + } + return +} + +func writePSIData(w *astikit.BitsWriter, d *PSIData) (int, error) { + b := astikit.NewBitsWriterBatch(w) + b.Write(uint8(d.PointerField)) + for i := 0; i < d.PointerField; i++ { + b.Write(uint8(0x00)) + } + + bytesWritten := 1 + d.PointerField + + if err := b.Err(); err != nil { + return 0, err + } + + for _, s := range d.Sections { + n, err := writePSISection(w, s) + if err != nil { + return 0, err + } + bytesWritten += n + } + + return bytesWritten, nil +} + +func calcPSISectionLength(s *PSISection) uint16 { + ret := uint16(0) + if s.Header.TableID.hasPSISyntaxHeader() { + ret += 5 // PSI syntax header length + } + + switch s.Header.TableID { + case PSITableIDPAT: + ret += calcPATSectionLength(s.Syntax.Data.PAT) + case PSITableIDPMT: + ret += calcPMTSectionLength(s.Syntax.Data.PMT) + } + + if s.Header.TableID.hasCRC32() { + ret += 4 + } + + return ret +} + +func writePSISection(w *astikit.BitsWriter, s *PSISection) (int, error) { + if s.Header.TableID != PSITableIDPAT && s.Header.TableID != PSITableIDPMT { + return 0, fmt.Errorf("writePSISection: table %s is not implemented", s.Header.TableID.Type()) + } + + b := astikit.NewBitsWriterBatch(w) + + sectionLength := calcPSISectionLength(s) + sectionCRC32 := crc32Polynomial + + if s.Header.TableID.hasCRC32() { + w.SetWriteCallback(func(bs []byte) { + sectionCRC32 = updateCRC32(sectionCRC32, bs) + }) + defer w.SetWriteCallback(nil) + } + + b.Write(uint8(s.Header.TableID)) + b.Write(s.Header.SectionSyntaxIndicator) + b.Write(s.Header.PrivateBit) + b.WriteN(uint8(0xff), 2) + b.WriteN(sectionLength, 12) + bytesWritten := 3 + + if s.Header.SectionLength > 0 { + n, err := writePSISectionSyntax(w, s) + if err != nil { + return 0, err + } + bytesWritten += n + + if s.Header.TableID.hasCRC32() { + b.Write(sectionCRC32) + bytesWritten += 4 + } + } + + return bytesWritten, b.Err() +} + +func writePSISectionSyntax(w *astikit.BitsWriter, s *PSISection) (int, error) { + bytesWritten := 0 + if s.Header.TableID.hasPSISyntaxHeader() { + n, err := writePSISectionSyntaxHeader(w, s.Syntax.Header) + if err != nil { + return 0, err + } + bytesWritten += n + } + + n, err := writePSISectionSyntaxData(w, s.Syntax.Data, s.Header.TableID) + if err != nil { + return 0, err + } + bytesWritten += n + + return bytesWritten, nil +} + +func writePSISectionSyntaxHeader(w *astikit.BitsWriter, h *PSISectionSyntaxHeader) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + b.Write(h.TableIDExtension) + b.WriteN(uint8(0xff), 2) + b.WriteN(h.VersionNumber, 5) + b.Write(h.CurrentNextIndicator) + b.Write(h.SectionNumber) + b.Write(h.LastSectionNumber) + + return 5, b.Err() +} + +func writePSISectionSyntaxData(w *astikit.BitsWriter, d *PSISectionSyntaxData, tableID PSITableID) (int, error) { + switch tableID { + // TODO write other table types + case PSITableIDPAT: + return writePATSection(w, d.PAT) + case PSITableIDPMT: + return writePMTSection(w, d.PMT) + } + + return 0, nil +} diff --git a/vendor/github.com/asticode/go-astits/data_sdt.go b/vendor/github.com/asticode/go-astits/data_sdt.go new file mode 100644 index 000000000..f060f2678 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_sdt.go @@ -0,0 +1,108 @@ +package astits + +import ( + "fmt" + + "github.com/asticode/go-astikit" +) + +// Running statuses +const ( + RunningStatusNotRunning = 1 + RunningStatusPausing = 3 + RunningStatusRunning = 4 + RunningStatusServiceOffAir = 5 + RunningStatusStartsInAFewSeconds = 2 + RunningStatusUndefined = 0 +) + +// SDTData represents an SDT data +// Page: 33 | Chapter: 5.2.3 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf +// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf +type SDTData struct { + OriginalNetworkID uint16 + Services []*SDTDataService + TransportStreamID uint16 +} + +// SDTDataService represents an SDT data service +type SDTDataService struct { + Descriptors []*Descriptor + HasEITPresentFollowing bool // When true indicates that EIT present/following information for the service is present in the current TS + HasEITSchedule bool // When true indicates that EIT schedule information for the service is present in the current TS + HasFreeCSAMode bool // When true indicates that access to one or more streams may be controlled by a CA system. + RunningStatus uint8 + ServiceID uint16 +} + +// parseSDTSection parses an SDT section +func parseSDTSection(i *astikit.BytesIterator, offsetSectionsEnd int, tableIDExtension uint16) (d *SDTData, err error) { + // Create data + d = &SDTData{TransportStreamID: tableIDExtension} + + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Original network ID + d.OriginalNetworkID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Reserved for future use + i.Skip(1) + + // Loop until end of section data is reached + for i.Offset() < offsetSectionsEnd { + // Create service + s := &SDTDataService{} + + // Get next bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Service ID + s.ServiceID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // EIT schedule flag + s.HasEITSchedule = uint8(b&0x2) > 0 + + // EIT present/following flag + s.HasEITPresentFollowing = uint8(b&0x1) > 0 + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Running status + s.RunningStatus = uint8(b) >> 5 + + // Free CA mode + s.HasFreeCSAMode = uint8(b&0x10) > 0 + + // We need to rewind since the current byte is used by the descriptor as well + i.Skip(-1) + + // Descriptors + if s.Descriptors, err = parseDescriptors(i); err != nil { + err = fmt.Errorf("astits: parsing descriptors failed: %w", err) + return + } + + // Append service + d.Services = append(d.Services, s) + } + return +} diff --git a/vendor/github.com/asticode/go-astits/data_tot.go b/vendor/github.com/asticode/go-astits/data_tot.go new file mode 100644 index 000000000..0bd64d2db --- /dev/null +++ b/vendor/github.com/asticode/go-astits/data_tot.go @@ -0,0 +1,35 @@ +package astits + +import ( + "fmt" + "time" + + "github.com/asticode/go-astikit" +) + +// TOTData represents a TOT data +// Page: 39 | Chapter: 5.2.6 | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf +// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf +type TOTData struct { + Descriptors []*Descriptor + UTCTime time.Time +} + +// parseTOTSection parses a TOT section +func parseTOTSection(i *astikit.BytesIterator) (d *TOTData, err error) { + // Create data + d = &TOTData{} + + // UTC time + if d.UTCTime, err = parseDVBTime(i); err != nil { + err = fmt.Errorf("astits: parsing DVB time failed: %w", err) + return + } + + // Descriptors + if d.Descriptors, err = parseDescriptors(i); err != nil { + err = fmt.Errorf("astits: parsing descriptors failed: %w", err) + return + } + return +} diff --git a/vendor/github.com/asticode/go-astits/demuxer.go b/vendor/github.com/asticode/go-astits/demuxer.go new file mode 100644 index 000000000..b17e82f1c --- /dev/null +++ b/vendor/github.com/asticode/go-astits/demuxer.go @@ -0,0 +1,189 @@ +package astits + +import ( + "context" + "errors" + "fmt" + "io" +) + +// Sync byte +const syncByte = '\x47' + +// Errors +var ( + ErrNoMorePackets = errors.New("astits: no more packets") + ErrPacketMustStartWithASyncByte = errors.New("astits: packet must start with a sync byte") +) + +// Demuxer represents a demuxer +// https://en.wikipedia.org/wiki/MPEG_transport_stream +// http://seidl.cs.vsb.cz/download/dvb/DVB_Poster.pdf +// http://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.13.01_40/en_300468v011301o.pdf +type Demuxer struct { + ctx context.Context + dataBuffer []*DemuxerData + optPacketSize int + optPacketsParser PacketsParser + packetBuffer *packetBuffer + packetPool *packetPool + programMap programMap + r io.Reader +} + +// PacketsParser represents an object capable of parsing a set of packets containing a unique payload spanning over those packets +// Use the skip returned argument to indicate whether the default process should still be executed on the set of packets +type PacketsParser func(ps []*Packet) (ds []*DemuxerData, skip bool, err error) + +// NewDemuxer creates a new transport stream based on a reader +func NewDemuxer(ctx context.Context, r io.Reader, opts ...func(*Demuxer)) (d *Demuxer) { + // Init + d = &Demuxer{ + ctx: ctx, + packetPool: newPacketPool(), + programMap: newProgramMap(), + r: r, + } + + // Apply options + for _, opt := range opts { + opt(d) + } + + return +} + +// DemuxerOptPacketSize returns the option to set the packet size +func DemuxerOptPacketSize(packetSize int) func(*Demuxer) { + return func(d *Demuxer) { + d.optPacketSize = packetSize + } +} + +// DemuxerOptPacketsParser returns the option to set the packets parser +func DemuxerOptPacketsParser(p PacketsParser) func(*Demuxer) { + return func(d *Demuxer) { + d.optPacketsParser = p + } +} + +// NextPacket retrieves the next packet +func (dmx *Demuxer) NextPacket() (p *Packet, err error) { + // Check ctx error + // TODO Handle ctx error another way since if the read blocks, everything blocks + // Maybe execute everything in a goroutine and listen the ctx channel in the same for loop + if err = dmx.ctx.Err(); err != nil { + return + } + + // Create packet buffer if not exists + if dmx.packetBuffer == nil { + if dmx.packetBuffer, err = newPacketBuffer(dmx.r, dmx.optPacketSize); err != nil { + err = fmt.Errorf("astits: creating packet buffer failed: %w", err) + return + } + } + + // Fetch next packet from buffer + if p, err = dmx.packetBuffer.next(); err != nil { + if err != ErrNoMorePackets { + err = fmt.Errorf("astits: fetching next packet from buffer failed: %w", err) + } + return + } + return +} + +// NextData retrieves the next data +func (dmx *Demuxer) NextData() (d *DemuxerData, err error) { + // Check data buffer + if len(dmx.dataBuffer) > 0 { + d = dmx.dataBuffer[0] + dmx.dataBuffer = dmx.dataBuffer[1:] + return + } + + // Loop through packets + var p *Packet + var ps []*Packet + var ds []*DemuxerData + for { + // Get next packet + if p, err = dmx.NextPacket(); err != nil { + // If the end of the stream has been reached, we dump the packet pool + if err == ErrNoMorePackets { + for { + // Dump packet pool + if ps = dmx.packetPool.dump(); len(ps) == 0 { + break + } + + // Parse data + if ds, err = parseData(ps, dmx.optPacketsParser, dmx.programMap); err != nil { + // We need to silence this error as there may be some incomplete data here + // We still want to try to parse all packets, in case final data is complete + continue + } + + // Update data + if d = dmx.updateData(ds); d != nil { + return + } + } + return + } + err = fmt.Errorf("astits: fetching next packet failed: %w", err) + return + } + + // Add packet to the pool + if ps = dmx.packetPool.add(p); len(ps) == 0 { + continue + } + + // Parse data + if ds, err = parseData(ps, dmx.optPacketsParser, dmx.programMap); err != nil { + err = fmt.Errorf("astits: building new data failed: %w", err) + return + } + + // Update data + if d = dmx.updateData(ds); d != nil { + return + } + } +} + +func (dmx *Demuxer) updateData(ds []*DemuxerData) (d *DemuxerData) { + // Check whether there is data to be processed + if len(ds) > 0 { + // Process data + d = ds[0] + dmx.dataBuffer = append(dmx.dataBuffer, ds[1:]...) + + // Update program map + for _, v := range ds { + if v.PAT != nil { + for _, pgm := range v.PAT.Programs { + // Program number 0 is reserved to NIT + if pgm.ProgramNumber > 0 { + dmx.programMap.set(pgm.ProgramMapID, pgm.ProgramNumber) + } + } + } + } + } + return +} + +// Rewind rewinds the demuxer reader +func (dmx *Demuxer) Rewind() (n int64, err error) { + dmx.dataBuffer = []*DemuxerData{} + dmx.packetBuffer = nil + dmx.packetPool = newPacketPool() + if n, err = rewind(dmx.r); err != nil { + err = fmt.Errorf("astits: rewinding reader failed: %w", err) + return + } + return +} diff --git a/vendor/github.com/asticode/go-astits/descriptor.go b/vendor/github.com/asticode/go-astits/descriptor.go new file mode 100644 index 000000000..943604a46 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/descriptor.go @@ -0,0 +1,2162 @@ +package astits + +import ( + "fmt" + "time" + + "github.com/asticode/go-astikit" +) + +// Audio types +// Page: 683 | https://books.google.fr/books?id=6dgWB3-rChYC&printsec=frontcover&hl=fr +const ( + AudioTypeCleanEffects = 0x1 + AudioTypeHearingImpaired = 0x2 + AudioTypeVisualImpairedCommentary = 0x3 +) + +// Data stream alignments +// Page: 85 | Chapter:2.6.11 | Link: http://ecee.colorado.edu/~ecen5653/ecen5653/papers/iso13818-1.pdf +const ( + DataStreamAligmentAudioSyncWord = 0x1 + DataStreamAligmentVideoSliceOrAccessUnit = 0x1 + DataStreamAligmentVideoAccessUnit = 0x2 + DataStreamAligmentVideoGOPOrSEQ = 0x3 + DataStreamAligmentVideoSEQ = 0x4 +) + +// Descriptor tags +// Chapter: 6.1 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +const ( + DescriptorTagAC3 = 0x6a + DescriptorTagAVCVideo = 0x28 + DescriptorTagComponent = 0x50 + DescriptorTagContent = 0x54 + DescriptorTagDataStreamAlignment = 0x6 + DescriptorTagEnhancedAC3 = 0x7a + DescriptorTagExtendedEvent = 0x4e + DescriptorTagExtension = 0x7f + DescriptorTagISO639LanguageAndAudioType = 0xa + DescriptorTagLocalTimeOffset = 0x58 + DescriptorTagMaximumBitrate = 0xe + DescriptorTagNetworkName = 0x40 + DescriptorTagParentalRating = 0x55 + DescriptorTagPrivateDataIndicator = 0xf + DescriptorTagPrivateDataSpecifier = 0x5f + DescriptorTagRegistration = 0x5 + DescriptorTagService = 0x48 + DescriptorTagShortEvent = 0x4d + DescriptorTagStreamIdentifier = 0x52 + DescriptorTagSubtitling = 0x59 + DescriptorTagTeletext = 0x56 + DescriptorTagVBIData = 0x45 + DescriptorTagVBITeletext = 0x46 +) + +// Descriptor extension tags +// Chapter: 6.3 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +const ( + DescriptorTagExtensionSupplementaryAudio = 0x6 +) + +// Service types +// Chapter: 6.2.33 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +const ( + ServiceTypeDigitalTelevisionService = 0x1 +) + +// Teletext types +// Chapter: 6.2.43 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +const ( + TeletextTypeAdditionalInformationPage = 0x3 + TeletextTypeInitialTeletextPage = 0x1 + TeletextTypeProgramSchedulePage = 0x4 + TeletextTypeTeletextSubtitlePage = 0x2 + TeletextTypeTeletextSubtitlePageForHearingImpairedPeople = 0x5 +) + +// VBI data service id +// Chapter: 6.2.47 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +const ( + VBIDataServiceIDClosedCaptioning = 0x6 + VBIDataServiceIDEBUTeletext = 0x1 + VBIDataServiceIDInvertedTeletext = 0x2 + VBIDataServiceIDMonochrome442Samples = 0x7 + VBIDataServiceIDVPS = 0x4 + VBIDataServiceIDWSS = 0x5 +) + +// Descriptor represents a descriptor +// TODO Handle UTF8 +type Descriptor struct { + AC3 *DescriptorAC3 + AVCVideo *DescriptorAVCVideo + Component *DescriptorComponent + Content *DescriptorContent + DataStreamAlignment *DescriptorDataStreamAlignment + EnhancedAC3 *DescriptorEnhancedAC3 + ExtendedEvent *DescriptorExtendedEvent + Extension *DescriptorExtension + ISO639LanguageAndAudioType *DescriptorISO639LanguageAndAudioType + Length uint8 + LocalTimeOffset *DescriptorLocalTimeOffset + MaximumBitrate *DescriptorMaximumBitrate + NetworkName *DescriptorNetworkName + ParentalRating *DescriptorParentalRating + PrivateDataIndicator *DescriptorPrivateDataIndicator + PrivateDataSpecifier *DescriptorPrivateDataSpecifier + Registration *DescriptorRegistration + Service *DescriptorService + ShortEvent *DescriptorShortEvent + StreamIdentifier *DescriptorStreamIdentifier + Subtitling *DescriptorSubtitling + Tag uint8 // the tag defines the structure of the contained data following the descriptor length. + Teletext *DescriptorTeletext + Unknown *DescriptorUnknown + UserDefined []byte + VBIData *DescriptorVBIData + VBITeletext *DescriptorTeletext +} + +// DescriptorAC3 represents an AC3 descriptor +// Chapter: Annex D | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorAC3 struct { + AdditionalInfo []byte + ASVC uint8 + BSID uint8 + ComponentType uint8 + HasASVC bool + HasBSID bool + HasComponentType bool + HasMainID bool + MainID uint8 +} + +func newDescriptorAC3(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorAC3, err error) { + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorAC3{ + HasASVC: uint8(b&0x10) > 0, + HasBSID: uint8(b&0x40) > 0, + HasComponentType: uint8(b&0x80) > 0, + HasMainID: uint8(b&0x20) > 0, + } + + // Component type + if d.HasComponentType { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.ComponentType = uint8(b) + } + + // BSID + if d.HasBSID { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.BSID = uint8(b) + } + + // Main ID + if d.HasMainID { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.MainID = uint8(b) + } + + // ASVC + if d.HasASVC { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.ASVC = uint8(b) + } + + // Additional info + if i.Offset() < offsetEnd { + if d.AdditionalInfo, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + return +} + +// DescriptorAVCVideo represents an AVC video descriptor +// No doc found unfortunately, basing the implementation on https://github.com/gfto/bitstream/blob/master/mpeg/psi/desc_28.h +type DescriptorAVCVideo struct { + AVC24HourPictureFlag bool + AVCStillPresent bool + CompatibleFlags uint8 + ConstraintSet0Flag bool + ConstraintSet1Flag bool + ConstraintSet2Flag bool + LevelIDC uint8 + ProfileIDC uint8 +} + +func newDescriptorAVCVideo(i *astikit.BytesIterator) (d *DescriptorAVCVideo, err error) { + // Init + d = &DescriptorAVCVideo{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Profile idc + d.ProfileIDC = uint8(b) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Flags + d.ConstraintSet0Flag = b&0x80 > 0 + d.ConstraintSet1Flag = b&0x40 > 0 + d.ConstraintSet2Flag = b&0x20 > 0 + d.CompatibleFlags = b & 0x1f + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Level idc + d.LevelIDC = uint8(b) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // AVC still present + d.AVCStillPresent = b&0x80 > 0 + + // AVC 24 hour picture flag + d.AVC24HourPictureFlag = b&0x40 > 0 + return +} + +// DescriptorComponent represents a component descriptor +// Chapter: 6.2.8 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorComponent struct { + ComponentTag uint8 + ComponentType uint8 + ISO639LanguageCode []byte + StreamContent uint8 + StreamContentExt uint8 + Text []byte +} + +func newDescriptorComponent(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorComponent, err error) { + // Init + d = &DescriptorComponent{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Stream content ext + d.StreamContentExt = uint8(b >> 4) + + // Stream content + d.StreamContent = uint8(b & 0xf) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Component type + d.ComponentType = uint8(b) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Component tag + d.ComponentTag = uint8(b) + + // ISO639 language code + if d.ISO639LanguageCode, err = i.NextBytes(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Text + if i.Offset() < offsetEnd { + if d.Text, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + return +} + +// DescriptorContent represents a content descriptor +// Chapter: 6.2.9 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorContent struct { + Items []*DescriptorContentItem +} + +// DescriptorContentItem represents a content item descriptor +// Chapter: 6.2.9 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorContentItem struct { + ContentNibbleLevel1 uint8 + ContentNibbleLevel2 uint8 + UserByte uint8 +} + +func newDescriptorContent(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorContent, err error) { + // Init + d = &DescriptorContent{} + + // Add items + for i.Offset() < offsetEnd { + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Append item + d.Items = append(d.Items, &DescriptorContentItem{ + ContentNibbleLevel1: uint8(bs[0] >> 4), + ContentNibbleLevel2: uint8(bs[0] & 0xf), + UserByte: uint8(bs[1]), + }) + } + return +} + +// DescriptorDataStreamAlignment represents a data stream alignment descriptor +type DescriptorDataStreamAlignment struct { + Type uint8 +} + +func newDescriptorDataStreamAlignment(i *astikit.BytesIterator) (d *DescriptorDataStreamAlignment, err error) { + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d = &DescriptorDataStreamAlignment{Type: uint8(b)} + return +} + +// DescriptorEnhancedAC3 represents an enhanced AC3 descriptor +// Chapter: Annex D | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorEnhancedAC3 struct { + AdditionalInfo []byte + ASVC uint8 + BSID uint8 + ComponentType uint8 + HasASVC bool + HasBSID bool + HasComponentType bool + HasMainID bool + HasSubStream1 bool + HasSubStream2 bool + HasSubStream3 bool + MainID uint8 + MixInfoExists bool + SubStream1 uint8 + SubStream2 uint8 + SubStream3 uint8 +} + +func newDescriptorEnhancedAC3(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorEnhancedAC3, err error) { + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorEnhancedAC3{ + HasASVC: uint8(b&0x10) > 0, + HasBSID: uint8(b&0x40) > 0, + HasComponentType: uint8(b&0x80) > 0, + HasMainID: uint8(b&0x20) > 0, + HasSubStream1: uint8(b&0x4) > 0, + HasSubStream2: uint8(b&0x2) > 0, + HasSubStream3: uint8(b&0x1) > 0, + MixInfoExists: uint8(b&0x8) > 0, + } + + // Component type + if d.HasComponentType { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.ComponentType = uint8(b) + } + + // BSID + if d.HasBSID { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.BSID = uint8(b) + } + + // Main ID + if d.HasMainID { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.MainID = uint8(b) + } + + // ASVC + if d.HasASVC { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.ASVC = uint8(b) + } + + // Substream 1 + if d.HasSubStream1 { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.SubStream1 = uint8(b) + } + + // Substream 2 + if d.HasSubStream2 { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.SubStream2 = uint8(b) + } + + // Substream 3 + if d.HasSubStream3 { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d.SubStream3 = uint8(b) + } + + // Additional info + if i.Offset() < offsetEnd { + if d.AdditionalInfo, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + return +} + +// DescriptorExtendedEvent represents an extended event descriptor +// Chapter: 6.2.15 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorExtendedEvent struct { + ISO639LanguageCode []byte + Items []*DescriptorExtendedEventItem + LastDescriptorNumber uint8 + Number uint8 + Text []byte +} + +// DescriptorExtendedEventItem represents an extended event item descriptor +// Chapter: 6.2.15 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorExtendedEventItem struct { + Content []byte + Description []byte +} + +func newDescriptorExtendedEvent(i *astikit.BytesIterator) (d *DescriptorExtendedEvent, err error) { + // Init + d = &DescriptorExtendedEvent{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Number + d.Number = uint8(b >> 4) + + // Last descriptor number + d.LastDescriptorNumber = uint8(b & 0xf) + + // ISO639 language code + if d.ISO639LanguageCode, err = i.NextBytes(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Items length + itemsLength := int(b) + + // Items + offsetEnd := i.Offset() + itemsLength + for i.Offset() < offsetEnd { + // Create item + var item *DescriptorExtendedEventItem + if item, err = newDescriptorExtendedEventItem(i); err != nil { + err = fmt.Errorf("astits: creating extended event item failed: %w", err) + return + } + + // Append item + d.Items = append(d.Items, item) + } + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Text length + textLength := int(b) + + // Text + if d.Text, err = i.NextBytes(textLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + return +} + +func newDescriptorExtendedEventItem(i *astikit.BytesIterator) (d *DescriptorExtendedEventItem, err error) { + // Init + d = &DescriptorExtendedEventItem{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Description length + descriptionLength := int(b) + + // Description + if d.Description, err = i.NextBytes(descriptionLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Content length + contentLength := int(b) + + // Content + if d.Content, err = i.NextBytes(contentLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + return +} + +// DescriptorExtension represents an extension descriptor +// Chapter: 6.2.16 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorExtension struct { + SupplementaryAudio *DescriptorExtensionSupplementaryAudio + Tag uint8 + Unknown *[]byte +} + +func newDescriptorExtension(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorExtension, err error) { + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorExtension{Tag: uint8(b)} + + // Switch on tag + switch d.Tag { + case DescriptorTagExtensionSupplementaryAudio: + if d.SupplementaryAudio, err = newDescriptorExtensionSupplementaryAudio(i, offsetEnd); err != nil { + err = fmt.Errorf("astits: parsing extension supplementary audio descriptor failed: %w", err) + return + } + default: + // Get next bytes + var b []byte + if b, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Update unknown + d.Unknown = &b + } + return +} + +// DescriptorExtensionSupplementaryAudio represents a supplementary audio extension descriptor +// Chapter: 6.4.10 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorExtensionSupplementaryAudio struct { + EditorialClassification uint8 + HasLanguageCode bool + LanguageCode []byte + MixType bool + PrivateData []byte +} + +func newDescriptorExtensionSupplementaryAudio(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorExtensionSupplementaryAudio, err error) { + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Init + d = &DescriptorExtensionSupplementaryAudio{ + EditorialClassification: uint8(b >> 2 & 0x1f), + HasLanguageCode: b&0x1 > 0, + MixType: b&0x80 > 0, + } + + // Language code + if d.HasLanguageCode { + if d.LanguageCode, err = i.NextBytes(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + + // Private data + if i.Offset() < offsetEnd { + if d.PrivateData, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + return +} + +// DescriptorISO639LanguageAndAudioType represents an ISO639 language descriptor +// https://github.com/gfto/bitstream/blob/master/mpeg/psi/desc_0a.h +// FIXME (barbashov) according to Chapter 2.6.18 ISO/IEC 13818-1:2015 there could be not one, but multiple such descriptors +type DescriptorISO639LanguageAndAudioType struct { + Language []byte + Type uint8 +} + +// In some actual cases, the length is 3 and the language is described in only 2 bytes +func newDescriptorISO639LanguageAndAudioType(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorISO639LanguageAndAudioType, err error) { + // Get next bytes + var bs []byte + if bs, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorISO639LanguageAndAudioType{ + Language: bs[0 : len(bs)-1], + Type: uint8(bs[len(bs)-1]), + } + return +} + +// DescriptorLocalTimeOffset represents a local time offset descriptor +// Chapter: 6.2.20 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorLocalTimeOffset struct { + Items []*DescriptorLocalTimeOffsetItem +} + +// DescriptorLocalTimeOffsetItem represents a local time offset item descriptor +// Chapter: 6.2.20 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorLocalTimeOffsetItem struct { + CountryCode []byte + CountryRegionID uint8 + LocalTimeOffset time.Duration + LocalTimeOffsetPolarity bool + NextTimeOffset time.Duration + TimeOfChange time.Time +} + +func newDescriptorLocalTimeOffset(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorLocalTimeOffset, err error) { + // Init + d = &DescriptorLocalTimeOffset{} + + // Add items + for i.Offset() < offsetEnd { + // Create item + itm := &DescriptorLocalTimeOffsetItem{} + + // Country code + if itm.CountryCode, err = i.NextBytes(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Country region ID + itm.CountryRegionID = uint8(b >> 2) + + // Local time offset polarity + itm.LocalTimeOffsetPolarity = b&0x1 > 0 + + // Local time offset + if itm.LocalTimeOffset, err = parseDVBDurationMinutes(i); err != nil { + err = fmt.Errorf("astits: parsing DVB durationminutes failed: %w", err) + return + } + + // Time of change + if itm.TimeOfChange, err = parseDVBTime(i); err != nil { + err = fmt.Errorf("astits: parsing DVB time failed: %w", err) + return + } + + // Next time offset + if itm.NextTimeOffset, err = parseDVBDurationMinutes(i); err != nil { + err = fmt.Errorf("astits: parsing DVB duration minutes failed: %w", err) + return + } + + // Append item + d.Items = append(d.Items, itm) + } + return +} + +// DescriptorMaximumBitrate represents a maximum bitrate descriptor +type DescriptorMaximumBitrate struct { + Bitrate uint32 // In bytes/second +} + +func newDescriptorMaximumBitrate(i *astikit.BytesIterator) (d *DescriptorMaximumBitrate, err error) { + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorMaximumBitrate{Bitrate: (uint32(bs[0]&0x3f)<<16 | uint32(bs[1])<<8 | uint32(bs[2])) * 50} + return +} + +// DescriptorNetworkName represents a network name descriptor +// Chapter: 6.2.27 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorNetworkName struct { + Name []byte +} + +func newDescriptorNetworkName(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorNetworkName, err error) { + // Create descriptor + d = &DescriptorNetworkName{} + + // Name + if d.Name, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + return +} + +// DescriptorParentalRating represents a parental rating descriptor +// Chapter: 6.2.28 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorParentalRating struct { + Items []*DescriptorParentalRatingItem +} + +// DescriptorParentalRatingItem represents a parental rating item descriptor +// Chapter: 6.2.28 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorParentalRatingItem struct { + CountryCode []byte + Rating uint8 +} + +// MinimumAge returns the minimum age for the parental rating +func (d DescriptorParentalRatingItem) MinimumAge() int { + // Undefined or user defined ratings + if d.Rating == 0 || d.Rating > 0x10 { + return 0 + } + return int(d.Rating) + 3 +} + +func newDescriptorParentalRating(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorParentalRating, err error) { + // Create descriptor + d = &DescriptorParentalRating{} + + // Add items + for i.Offset() < offsetEnd { + // Get next bytes + var bs []byte + if bs, err = i.NextBytes(4); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Append item + d.Items = append(d.Items, &DescriptorParentalRatingItem{ + CountryCode: bs[:3], + Rating: uint8(bs[3]), + }) + } + return +} + +// DescriptorPrivateDataIndicator represents a private data Indicator descriptor +type DescriptorPrivateDataIndicator struct { + Indicator uint32 +} + +func newDescriptorPrivateDataIndicator(i *astikit.BytesIterator) (d *DescriptorPrivateDataIndicator, err error) { + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(4); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorPrivateDataIndicator{Indicator: uint32(bs[0])<<24 | uint32(bs[1])<<16 | uint32(bs[2])<<8 | uint32(bs[3])} + return +} + +// DescriptorPrivateDataSpecifier represents a private data specifier descriptor +type DescriptorPrivateDataSpecifier struct { + Specifier uint32 +} + +func newDescriptorPrivateDataSpecifier(i *astikit.BytesIterator) (d *DescriptorPrivateDataSpecifier, err error) { + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(4); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorPrivateDataSpecifier{Specifier: uint32(bs[0])<<24 | uint32(bs[1])<<16 | uint32(bs[2])<<8 | uint32(bs[3])} + return +} + +// DescriptorRegistration represents a registration descriptor +// Page: 84 | http://ecee.colorado.edu/~ecen5653/ecen5653/papers/iso13818-1.pdf +type DescriptorRegistration struct { + AdditionalIdentificationInfo []byte + FormatIdentifier uint32 +} + +func newDescriptorRegistration(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorRegistration, err error) { + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(4); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorRegistration{FormatIdentifier: uint32(bs[0])<<24 | uint32(bs[1])<<16 | uint32(bs[2])<<8 | uint32(bs[3])} + + // Additional identification info + if i.Offset() < offsetEnd { + if d.AdditionalIdentificationInfo, err = i.NextBytes(offsetEnd - i.Offset()); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + return +} + +// DescriptorService represents a service descriptor +// Chapter: 6.2.33 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorService struct { + Name []byte + Provider []byte + Type uint8 +} + +func newDescriptorService(i *astikit.BytesIterator) (d *DescriptorService, err error) { + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Create descriptor + d = &DescriptorService{Type: uint8(b)} + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Provider length + providerLength := int(b) + + // Provider + if d.Provider, err = i.NextBytes(providerLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Name length + nameLength := int(b) + + // Name + if d.Name, err = i.NextBytes(nameLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + return +} + +// DescriptorShortEvent represents a short event descriptor +// Chapter: 6.2.37 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorShortEvent struct { + EventName []byte + Language []byte + Text []byte +} + +func newDescriptorShortEvent(i *astikit.BytesIterator) (d *DescriptorShortEvent, err error) { + // Create descriptor + d = &DescriptorShortEvent{} + + // Language + if d.Language, err = i.NextBytes(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Event length + eventLength := int(b) + + // Event name + if d.EventName, err = i.NextBytes(eventLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Text length + textLength := int(b) + + // Text + if d.Text, err = i.NextBytes(textLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + return +} + +// DescriptorStreamIdentifier represents a stream identifier descriptor +// Chapter: 6.2.39 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorStreamIdentifier struct { + ComponentTag uint8 +} + +func newDescriptorStreamIdentifier(i *astikit.BytesIterator) (d *DescriptorStreamIdentifier, err error) { + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + d = &DescriptorStreamIdentifier{ComponentTag: uint8(b)} + return +} + +// DescriptorSubtitling represents a subtitling descriptor +// Chapter: 6.2.41 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorSubtitling struct { + Items []*DescriptorSubtitlingItem +} + +// DescriptorSubtitlingItem represents subtitling descriptor item +// Chapter: 6.2.41 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorSubtitlingItem struct { + AncillaryPageID uint16 + CompositionPageID uint16 + Language []byte + Type uint8 +} + +func newDescriptorSubtitling(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorSubtitling, err error) { + // Create descriptor + d = &DescriptorSubtitling{} + + // Loop + for i.Offset() < offsetEnd { + // Create item + itm := &DescriptorSubtitlingItem{} + + // Language + if itm.Language, err = i.NextBytes(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Type + itm.Type = uint8(b) + + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Composition page ID + itm.CompositionPageID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Get next bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Ancillary page ID + itm.AncillaryPageID = uint16(bs[0])<<8 | uint16(bs[1]) + + // Append item + d.Items = append(d.Items, itm) + } + return +} + +// DescriptorTeletext represents a teletext descriptor +// Chapter: 6.2.43 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorTeletext struct { + Items []*DescriptorTeletextItem +} + +// DescriptorTeletextItem represents a teletext descriptor item +// Chapter: 6.2.43 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorTeletextItem struct { + Language []byte + Magazine uint8 + Page uint8 + Type uint8 +} + +func newDescriptorTeletext(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorTeletext, err error) { + // Create descriptor + d = &DescriptorTeletext{} + + // Loop + for i.Offset() < offsetEnd { + // Create item + itm := &DescriptorTeletextItem{} + + // Language + if itm.Language, err = i.NextBytes(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Type + itm.Type = uint8(b) >> 3 + + // Magazine + itm.Magazine = uint8(b & 0x7) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Page + itm.Page = uint8(b)>>4*10 + uint8(b&0xf) + + // Append item + d.Items = append(d.Items, itm) + } + return +} + +type DescriptorUnknown struct { + Content []byte + Tag uint8 +} + +func newDescriptorUnknown(i *astikit.BytesIterator, tag, length uint8) (d *DescriptorUnknown, err error) { + // Create descriptor + d = &DescriptorUnknown{Tag: tag} + + // Get next bytes + if d.Content, err = i.NextBytes(int(length)); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + return +} + +// DescriptorVBIData represents a VBI data descriptor +// Chapter: 6.2.47 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorVBIData struct { + Services []*DescriptorVBIDataService +} + +// DescriptorVBIDataService represents a vbi data service descriptor +// Chapter: 6.2.47 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorVBIDataService struct { + DataServiceID uint8 + Descriptors []*DescriptorVBIDataDescriptor +} + +// DescriptorVBIDataItem represents a vbi data descriptor item +// Chapter: 6.2.47 | Link: https://www.etsi.org/deliver/etsi_en/300400_300499/300468/01.15.01_60/en_300468v011501p.pdf +type DescriptorVBIDataDescriptor struct { + FieldParity bool + LineOffset uint8 +} + +func newDescriptorVBIData(i *astikit.BytesIterator, offsetEnd int) (d *DescriptorVBIData, err error) { + // Create descriptor + d = &DescriptorVBIData{} + + // Loop + for i.Offset() < offsetEnd { + // Create service + srv := &DescriptorVBIDataService{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Data service ID + srv.DataServiceID = uint8(b) + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Data service descriptor length + dataServiceDescriptorLength := int(b) + + // Data service descriptor + offsetDataEnd := i.Offset() + dataServiceDescriptorLength + for i.Offset() < offsetDataEnd { + if srv.DataServiceID == VBIDataServiceIDClosedCaptioning || + srv.DataServiceID == VBIDataServiceIDEBUTeletext || + srv.DataServiceID == VBIDataServiceIDInvertedTeletext || + srv.DataServiceID == VBIDataServiceIDMonochrome442Samples || + srv.DataServiceID == VBIDataServiceIDVPS || + srv.DataServiceID == VBIDataServiceIDWSS { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Append data + srv.Descriptors = append(srv.Descriptors, &DescriptorVBIDataDescriptor{ + FieldParity: b&0x20 > 0, + LineOffset: uint8(b & 0x1f), + }) + } + } + + // Append service + d.Services = append(d.Services, srv) + } + return +} + +// parseDescriptors parses descriptors +func parseDescriptors(i *astikit.BytesIterator) (o []*Descriptor, err error) { + // Get next 2 bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Get length + length := int(uint16(bs[0]&0xf)<<8 | uint16(bs[1])) + + // Loop + if length > 0 { + offsetEnd := i.Offset() + length + for i.Offset() < offsetEnd { + // Get next 2 bytes + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Create descriptor + d := &Descriptor{ + Length: uint8(bs[1]), + Tag: uint8(bs[0]), + } + + // Parse data + if d.Length > 0 { + // Unfortunately there's no way to be sure the real descriptor length is the same as the one indicated + // previously therefore we must fetch bytes in descriptor functions and seek at the end + offsetDescriptorEnd := i.Offset() + int(d.Length) + + // User defined + if d.Tag >= 0x80 && d.Tag <= 0xfe { + // Get next bytes + if d.UserDefined, err = i.NextBytes(int(d.Length)); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } else { + // Switch on tag + switch d.Tag { + case DescriptorTagAC3: + if d.AC3, err = newDescriptorAC3(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing AC3 descriptor failed: %w", err) + return + } + case DescriptorTagAVCVideo: + if d.AVCVideo, err = newDescriptorAVCVideo(i); err != nil { + err = fmt.Errorf("astits: parsing AVC Video descriptor failed: %w", err) + return + } + case DescriptorTagComponent: + if d.Component, err = newDescriptorComponent(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Component descriptor failed: %w", err) + return + } + case DescriptorTagContent: + if d.Content, err = newDescriptorContent(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Content descriptor failed: %w", err) + return + } + case DescriptorTagDataStreamAlignment: + if d.DataStreamAlignment, err = newDescriptorDataStreamAlignment(i); err != nil { + err = fmt.Errorf("astits: parsing Data Stream Alignment descriptor failed: %w", err) + return + } + case DescriptorTagEnhancedAC3: + if d.EnhancedAC3, err = newDescriptorEnhancedAC3(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Enhanced AC3 descriptor failed: %w", err) + return + } + case DescriptorTagExtendedEvent: + if d.ExtendedEvent, err = newDescriptorExtendedEvent(i); err != nil { + err = fmt.Errorf("astits: parsing Extended event descriptor failed: %w", err) + return + } + case DescriptorTagExtension: + if d.Extension, err = newDescriptorExtension(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Extension descriptor failed: %w", err) + return + } + case DescriptorTagISO639LanguageAndAudioType: + if d.ISO639LanguageAndAudioType, err = newDescriptorISO639LanguageAndAudioType(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing ISO639 Language and Audio Type descriptor failed: %w", err) + return + } + case DescriptorTagLocalTimeOffset: + if d.LocalTimeOffset, err = newDescriptorLocalTimeOffset(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Local Time Offset descriptor failed: %w", err) + return + } + case DescriptorTagMaximumBitrate: + if d.MaximumBitrate, err = newDescriptorMaximumBitrate(i); err != nil { + err = fmt.Errorf("astits: parsing Maximum Bitrate descriptor failed: %w", err) + return + } + case DescriptorTagNetworkName: + if d.NetworkName, err = newDescriptorNetworkName(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Network Name descriptor failed: %w", err) + return + } + case DescriptorTagParentalRating: + if d.ParentalRating, err = newDescriptorParentalRating(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Parental Rating descriptor failed: %w", err) + return + } + case DescriptorTagPrivateDataIndicator: + if d.PrivateDataIndicator, err = newDescriptorPrivateDataIndicator(i); err != nil { + err = fmt.Errorf("astits: parsing Private Data Indicator descriptor failed: %w", err) + return + } + case DescriptorTagPrivateDataSpecifier: + if d.PrivateDataSpecifier, err = newDescriptorPrivateDataSpecifier(i); err != nil { + err = fmt.Errorf("astits: parsing Private Data Specifier descriptor failed: %w", err) + return + } + case DescriptorTagRegistration: + if d.Registration, err = newDescriptorRegistration(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Registration descriptor failed: %w", err) + return + } + case DescriptorTagService: + if d.Service, err = newDescriptorService(i); err != nil { + err = fmt.Errorf("astits: parsing Service descriptor failed: %w", err) + return + } + case DescriptorTagShortEvent: + if d.ShortEvent, err = newDescriptorShortEvent(i); err != nil { + err = fmt.Errorf("astits: parsing Short Event descriptor failed: %w", err) + return + } + case DescriptorTagStreamIdentifier: + if d.StreamIdentifier, err = newDescriptorStreamIdentifier(i); err != nil { + err = fmt.Errorf("astits: parsing Stream Identifier descriptor failed: %w", err) + return + } + case DescriptorTagSubtitling: + if d.Subtitling, err = newDescriptorSubtitling(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Subtitling descriptor failed: %w", err) + return + } + case DescriptorTagTeletext: + if d.Teletext, err = newDescriptorTeletext(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing Teletext descriptor failed: %w", err) + return + } + case DescriptorTagVBIData: + if d.VBIData, err = newDescriptorVBIData(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing VBI Date descriptor failed: %w", err) + return + } + case DescriptorTagVBITeletext: + if d.VBITeletext, err = newDescriptorTeletext(i, offsetDescriptorEnd); err != nil { + err = fmt.Errorf("astits: parsing VBI Teletext descriptor failed: %w", err) + return + } + default: + if d.Unknown, err = newDescriptorUnknown(i, d.Tag, d.Length); err != nil { + err = fmt.Errorf("astits: parsing unknown descriptor failed: %w", err) + return + } + } + } + + // Seek in iterator to make sure we move to the end of the descriptor since its content may be + // corrupted + i.Seek(offsetDescriptorEnd) + } + o = append(o, d) + } + } + return +} + +func calcDescriptorUserDefinedLength(d []byte) uint8 { + return uint8(len(d)) +} + +func writeDescriptorUserDefined(w *astikit.BitsWriter, d []byte) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d) + + return b.Err() +} + +func calcDescriptorAC3Length(d *DescriptorAC3) uint8 { + ret := 1 // flags + + if d.HasComponentType { + ret++ + } + if d.HasBSID { + ret++ + } + if d.HasMainID { + ret++ + } + if d.HasASVC { + ret++ + } + + ret += len(d.AdditionalInfo) + + return uint8(ret) +} + +func writeDescriptorAC3(w *astikit.BitsWriter, d *DescriptorAC3) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.HasComponentType) + b.Write(d.HasBSID) + b.Write(d.HasMainID) + b.Write(d.HasASVC) + b.WriteN(uint8(0xff), 4) + + if d.HasComponentType { + b.Write(d.ComponentType) + } + if d.HasBSID { + b.Write(d.BSID) + } + if d.HasMainID { + b.Write(d.MainID) + } + if d.HasASVC { + b.Write(d.ASVC) + } + b.Write(d.AdditionalInfo) + + return b.Err() +} + +func calcDescriptorAVCVideoLength(d *DescriptorAVCVideo) uint8 { + return 4 +} + +func writeDescriptorAVCVideo(w *astikit.BitsWriter, d *DescriptorAVCVideo) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.ProfileIDC) + + b.Write(d.ConstraintSet0Flag) + b.Write(d.ConstraintSet1Flag) + b.Write(d.ConstraintSet2Flag) + b.WriteN(d.CompatibleFlags, 5) + + b.Write(d.LevelIDC) + + b.Write(d.AVCStillPresent) + b.Write(d.AVC24HourPictureFlag) + b.WriteN(uint8(0xff), 6) + + return b.Err() +} + +func calcDescriptorComponentLength(d *DescriptorComponent) uint8 { + return uint8(6 + len(d.Text)) +} + +func writeDescriptorComponent(w *astikit.BitsWriter, d *DescriptorComponent) error { + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(d.StreamContentExt, 4) + b.WriteN(d.StreamContent, 4) + + b.Write(d.ComponentType) + b.Write(d.ComponentTag) + + b.WriteBytesN(d.ISO639LanguageCode, 3, 0) + + b.Write(d.Text) + + return b.Err() +} + +func calcDescriptorContentLength(d *DescriptorContent) uint8 { + return uint8(2 * len(d.Items)) +} + +func writeDescriptorContent(w *astikit.BitsWriter, d *DescriptorContent) error { + b := astikit.NewBitsWriterBatch(w) + + for _, item := range d.Items { + b.WriteN(item.ContentNibbleLevel1, 4) + b.WriteN(item.ContentNibbleLevel2, 4) + b.Write(item.UserByte) + } + + return b.Err() +} + +func calcDescriptorDataStreamAlignmentLength(d *DescriptorDataStreamAlignment) uint8 { + return 1 +} + +func writeDescriptorDataStreamAlignment(w *astikit.BitsWriter, d *DescriptorDataStreamAlignment) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.Type) + + return b.Err() +} + +func calcDescriptorEnhancedAC3Length(d *DescriptorEnhancedAC3) uint8 { + ret := 1 // flags + + if d.HasComponentType { + ret++ + } + if d.HasBSID { + ret++ + } + if d.HasMainID { + ret++ + } + if d.HasASVC { + ret++ + } + if d.HasSubStream1 { + ret++ + } + if d.HasSubStream2 { + ret++ + } + if d.HasSubStream3 { + ret++ + } + + ret += len(d.AdditionalInfo) + + return uint8(ret) +} + +func writeDescriptorEnhancedAC3(w *astikit.BitsWriter, d *DescriptorEnhancedAC3) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.HasComponentType) + b.Write(d.HasBSID) + b.Write(d.HasMainID) + b.Write(d.HasASVC) + b.Write(d.MixInfoExists) + b.Write(d.HasSubStream1) + b.Write(d.HasSubStream2) + b.Write(d.HasSubStream3) + + if d.HasComponentType { + b.Write(d.ComponentType) + } + if d.HasBSID { + b.Write(d.BSID) + } + if d.HasMainID { + b.Write(d.MainID) + } + if d.HasASVC { + b.Write(d.ASVC) + } + if d.HasSubStream1 { + b.Write(d.SubStream1) + } + if d.HasSubStream2 { + b.Write(d.SubStream2) + } + if d.HasSubStream3 { + b.Write(d.SubStream3) + } + + b.Write(d.AdditionalInfo) + + return b.Err() +} + +func calcDescriptorExtendedEventLength(d *DescriptorExtendedEvent) (descriptorLength, lengthOfItems uint8) { + ret := 1 + 3 + 1 // numbers, language and items length + + itemsRet := 0 + for _, item := range d.Items { + itemsRet += 1 // description length + itemsRet += len(item.Description) + itemsRet += 1 // content length + itemsRet += len(item.Content) + } + + ret += itemsRet + + ret += 1 // text length + ret += len(d.Text) + + return uint8(ret), uint8(itemsRet) +} + +func writeDescriptorExtendedEvent(w *astikit.BitsWriter, d *DescriptorExtendedEvent) error { + b := astikit.NewBitsWriterBatch(w) + + var lengthOfItems uint8 + + _, lengthOfItems = calcDescriptorExtendedEventLength(d) + + b.WriteN(d.Number, 4) + b.WriteN(d.LastDescriptorNumber, 4) + + b.WriteBytesN(d.ISO639LanguageCode, 3, 0) + + b.Write(lengthOfItems) + for _, item := range d.Items { + b.Write(uint8(len(item.Description))) + b.Write(item.Description) + b.Write(uint8(len(item.Content))) + b.Write(item.Content) + } + + b.Write(uint8(len(d.Text))) + b.Write(d.Text) + + return b.Err() +} + +func calcDescriptorExtensionSupplementaryAudioLength(d *DescriptorExtensionSupplementaryAudio) int { + ret := 1 + if d.HasLanguageCode { + ret += 3 + } + ret += len(d.PrivateData) + return ret +} + +func calcDescriptorExtensionLength(d *DescriptorExtension) uint8 { + ret := 1 // tag + + switch d.Tag { + case DescriptorTagExtensionSupplementaryAudio: + ret += calcDescriptorExtensionSupplementaryAudioLength(d.SupplementaryAudio) + default: + if d.Unknown != nil { + ret += len(*d.Unknown) + } + } + + return uint8(ret) +} + +func writeDescriptorExtensionSupplementaryAudio(w *astikit.BitsWriter, d *DescriptorExtensionSupplementaryAudio) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.MixType) + b.WriteN(d.EditorialClassification, 5) + b.Write(true) // reserved + b.Write(d.HasLanguageCode) + + if d.HasLanguageCode { + b.WriteBytesN(d.LanguageCode, 3, 0) + } + + b.Write(d.PrivateData) + + return b.Err() +} + +func writeDescriptorExtension(w *astikit.BitsWriter, d *DescriptorExtension) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.Tag) + + switch d.Tag { + case DescriptorTagExtensionSupplementaryAudio: + err := writeDescriptorExtensionSupplementaryAudio(w, d.SupplementaryAudio) + if err != nil { + return err + } + default: + if d.Unknown != nil { + b.Write(*d.Unknown) + } + } + + return b.Err() +} + +func calcDescriptorISO639LanguageAndAudioTypeLength(d *DescriptorISO639LanguageAndAudioType) uint8 { + return 3 + 1 // language code + type +} + +func writeDescriptorISO639LanguageAndAudioType(w *astikit.BitsWriter, d *DescriptorISO639LanguageAndAudioType) error { + b := astikit.NewBitsWriterBatch(w) + + b.WriteBytesN(d.Language, 3, 0) + b.Write(d.Type) + + return b.Err() +} + +func calcDescriptorLocalTimeOffsetLength(d *DescriptorLocalTimeOffset) uint8 { + return uint8(13 * len(d.Items)) +} + +func writeDescriptorLocalTimeOffset(w *astikit.BitsWriter, d *DescriptorLocalTimeOffset) error { + b := astikit.NewBitsWriterBatch(w) + + for _, item := range d.Items { + b.WriteBytesN(item.CountryCode, 3, 0) + + b.WriteN(item.CountryRegionID, 6) + b.WriteN(uint8(0xff), 1) + b.Write(item.LocalTimeOffsetPolarity) + + if _, err := writeDVBDurationMinutes(w, item.LocalTimeOffset); err != nil { + return err + } + if _, err := writeDVBTime(w, item.TimeOfChange); err != nil { + return err + } + if _, err := writeDVBDurationMinutes(w, item.NextTimeOffset); err != nil { + return err + } + } + + return b.Err() +} + +func calcDescriptorMaximumBitrateLength(d *DescriptorMaximumBitrate) uint8 { + return 3 +} + +func writeDescriptorMaximumBitrate(w *astikit.BitsWriter, d *DescriptorMaximumBitrate) error { + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(uint8(0xff), 2) + b.WriteN(uint32(d.Bitrate/50), 22) + + return b.Err() +} + +func calcDescriptorNetworkNameLength(d *DescriptorNetworkName) uint8 { + return uint8(len(d.Name)) +} + +func writeDescriptorNetworkName(w *astikit.BitsWriter, d *DescriptorNetworkName) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.Name) + + return b.Err() +} + +func calcDescriptorParentalRatingLength(d *DescriptorParentalRating) uint8 { + return uint8(4 * len(d.Items)) +} + +func writeDescriptorParentalRating(w *astikit.BitsWriter, d *DescriptorParentalRating) error { + b := astikit.NewBitsWriterBatch(w) + + for _, item := range d.Items { + b.WriteBytesN(item.CountryCode, 3, 0) + b.Write(item.Rating) + } + + return b.Err() +} + +func calcDescriptorPrivateDataIndicatorLength(d *DescriptorPrivateDataIndicator) uint8 { + return 4 +} + +func writeDescriptorPrivateDataIndicator(w *astikit.BitsWriter, d *DescriptorPrivateDataIndicator) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.Indicator) + + return b.Err() +} + +func calcDescriptorPrivateDataSpecifierLength(d *DescriptorPrivateDataSpecifier) uint8 { + return 4 +} + +func writeDescriptorPrivateDataSpecifier(w *astikit.BitsWriter, d *DescriptorPrivateDataSpecifier) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.Specifier) + + return b.Err() +} + +func calcDescriptorRegistrationLength(d *DescriptorRegistration) uint8 { + return uint8(4 + len(d.AdditionalIdentificationInfo)) +} + +func writeDescriptorRegistration(w *astikit.BitsWriter, d *DescriptorRegistration) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.FormatIdentifier) + b.Write(d.AdditionalIdentificationInfo) + + return b.Err() +} + +func calcDescriptorServiceLength(d *DescriptorService) uint8 { + ret := 3 // type and lengths + ret += len(d.Name) + ret += len(d.Provider) + return uint8(ret) +} + +func writeDescriptorService(w *astikit.BitsWriter, d *DescriptorService) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.Type) + b.Write(uint8(len(d.Provider))) + b.Write(d.Provider) + b.Write(uint8(len(d.Name))) + b.Write(d.Name) + + return b.Err() +} + +func calcDescriptorShortEventLength(d *DescriptorShortEvent) uint8 { + ret := 3 + 1 + 1 // language code and lengths + ret += len(d.EventName) + ret += len(d.Text) + return uint8(ret) +} + +func writeDescriptorShortEvent(w *astikit.BitsWriter, d *DescriptorShortEvent) error { + b := astikit.NewBitsWriterBatch(w) + + b.WriteBytesN(d.Language, 3, 0) + + b.Write(uint8(len(d.EventName))) + b.Write(d.EventName) + + b.Write(uint8(len(d.Text))) + b.Write(d.Text) + + return b.Err() +} + +func calcDescriptorStreamIdentifierLength(d *DescriptorStreamIdentifier) uint8 { + return 1 +} + +func writeDescriptorStreamIdentifier(w *astikit.BitsWriter, d *DescriptorStreamIdentifier) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.ComponentTag) + + return b.Err() +} + +func calcDescriptorSubtitlingLength(d *DescriptorSubtitling) uint8 { + return uint8(8 * len(d.Items)) +} + +func writeDescriptorSubtitling(w *astikit.BitsWriter, d *DescriptorSubtitling) error { + b := astikit.NewBitsWriterBatch(w) + + for _, item := range d.Items { + b.WriteBytesN(item.Language, 3, 0) + b.Write(item.Type) + b.Write(item.CompositionPageID) + b.Write(item.AncillaryPageID) + } + + return b.Err() +} + +func calcDescriptorTeletextLength(d *DescriptorTeletext) uint8 { + return uint8(5 * len(d.Items)) +} + +func writeDescriptorTeletext(w *astikit.BitsWriter, d *DescriptorTeletext) error { + b := astikit.NewBitsWriterBatch(w) + + for _, item := range d.Items { + b.WriteBytesN(item.Language, 3, 0) + b.WriteN(item.Type, 5) + b.WriteN(item.Magazine, 3) + b.WriteN(item.Page/10, 4) + b.WriteN(item.Page%10, 4) + } + + return b.Err() +} + +func calcDescriptorVBIDataLength(d *DescriptorVBIData) uint8 { + return uint8(3 * len(d.Services)) +} + +func writeDescriptorVBIData(w *astikit.BitsWriter, d *DescriptorVBIData) error { + b := astikit.NewBitsWriterBatch(w) + + for _, item := range d.Services { + b.Write(item.DataServiceID) + + if item.DataServiceID == VBIDataServiceIDClosedCaptioning || + item.DataServiceID == VBIDataServiceIDEBUTeletext || + item.DataServiceID == VBIDataServiceIDInvertedTeletext || + item.DataServiceID == VBIDataServiceIDMonochrome442Samples || + item.DataServiceID == VBIDataServiceIDVPS || + item.DataServiceID == VBIDataServiceIDWSS { + + b.Write(uint8(len(item.Descriptors))) // each descriptor is 1 byte + for _, desc := range item.Descriptors { + b.WriteN(uint8(0xff), 2) + b.Write(desc.FieldParity) + b.WriteN(desc.LineOffset, 5) + } + } else { + // let's put one reserved byte + b.Write(uint8(1)) + b.Write(uint8(0xff)) + } + } + + return b.Err() +} + +func calcDescriptorUnknownLength(d *DescriptorUnknown) uint8 { + return uint8(len(d.Content)) +} + +func writeDescriptorUnknown(w *astikit.BitsWriter, d *DescriptorUnknown) error { + b := astikit.NewBitsWriterBatch(w) + + b.Write(d.Content) + + return b.Err() +} + +func calcDescriptorLength(d *Descriptor) uint8 { + if d.Tag >= 0x80 && d.Tag <= 0xfe { + return calcDescriptorUserDefinedLength(d.UserDefined) + } + + switch d.Tag { + case DescriptorTagAC3: + return calcDescriptorAC3Length(d.AC3) + case DescriptorTagAVCVideo: + return calcDescriptorAVCVideoLength(d.AVCVideo) + case DescriptorTagComponent: + return calcDescriptorComponentLength(d.Component) + case DescriptorTagContent: + return calcDescriptorContentLength(d.Content) + case DescriptorTagDataStreamAlignment: + return calcDescriptorDataStreamAlignmentLength(d.DataStreamAlignment) + case DescriptorTagEnhancedAC3: + return calcDescriptorEnhancedAC3Length(d.EnhancedAC3) + case DescriptorTagExtendedEvent: + ret, _ := calcDescriptorExtendedEventLength(d.ExtendedEvent) + return ret + case DescriptorTagExtension: + return calcDescriptorExtensionLength(d.Extension) + case DescriptorTagISO639LanguageAndAudioType: + return calcDescriptorISO639LanguageAndAudioTypeLength(d.ISO639LanguageAndAudioType) + case DescriptorTagLocalTimeOffset: + return calcDescriptorLocalTimeOffsetLength(d.LocalTimeOffset) + case DescriptorTagMaximumBitrate: + return calcDescriptorMaximumBitrateLength(d.MaximumBitrate) + case DescriptorTagNetworkName: + return calcDescriptorNetworkNameLength(d.NetworkName) + case DescriptorTagParentalRating: + return calcDescriptorParentalRatingLength(d.ParentalRating) + case DescriptorTagPrivateDataIndicator: + return calcDescriptorPrivateDataIndicatorLength(d.PrivateDataIndicator) + case DescriptorTagPrivateDataSpecifier: + return calcDescriptorPrivateDataSpecifierLength(d.PrivateDataSpecifier) + case DescriptorTagRegistration: + return calcDescriptorRegistrationLength(d.Registration) + case DescriptorTagService: + return calcDescriptorServiceLength(d.Service) + case DescriptorTagShortEvent: + return calcDescriptorShortEventLength(d.ShortEvent) + case DescriptorTagStreamIdentifier: + return calcDescriptorStreamIdentifierLength(d.StreamIdentifier) + case DescriptorTagSubtitling: + return calcDescriptorSubtitlingLength(d.Subtitling) + case DescriptorTagTeletext: + return calcDescriptorTeletextLength(d.Teletext) + case DescriptorTagVBIData: + return calcDescriptorVBIDataLength(d.VBIData) + case DescriptorTagVBITeletext: + return calcDescriptorTeletextLength(d.VBITeletext) + } + + return calcDescriptorUnknownLength(d.Unknown) +} + +func writeDescriptor(w *astikit.BitsWriter, d *Descriptor) (int, error) { + b := astikit.NewBitsWriterBatch(w) + length := calcDescriptorLength(d) + + b.Write(d.Tag) + b.Write(length) + + if err := b.Err(); err != nil { + return 0, err + } + + written := int(length) + 2 + + if d.Tag >= 0x80 && d.Tag <= 0xfe { + return written, writeDescriptorUserDefined(w, d.UserDefined) + } + + switch d.Tag { + case DescriptorTagAC3: + return written, writeDescriptorAC3(w, d.AC3) + case DescriptorTagAVCVideo: + return written, writeDescriptorAVCVideo(w, d.AVCVideo) + case DescriptorTagComponent: + return written, writeDescriptorComponent(w, d.Component) + case DescriptorTagContent: + return written, writeDescriptorContent(w, d.Content) + case DescriptorTagDataStreamAlignment: + return written, writeDescriptorDataStreamAlignment(w, d.DataStreamAlignment) + case DescriptorTagEnhancedAC3: + return written, writeDescriptorEnhancedAC3(w, d.EnhancedAC3) + case DescriptorTagExtendedEvent: + return written, writeDescriptorExtendedEvent(w, d.ExtendedEvent) + case DescriptorTagExtension: + return written, writeDescriptorExtension(w, d.Extension) + case DescriptorTagISO639LanguageAndAudioType: + return written, writeDescriptorISO639LanguageAndAudioType(w, d.ISO639LanguageAndAudioType) + case DescriptorTagLocalTimeOffset: + return written, writeDescriptorLocalTimeOffset(w, d.LocalTimeOffset) + case DescriptorTagMaximumBitrate: + return written, writeDescriptorMaximumBitrate(w, d.MaximumBitrate) + case DescriptorTagNetworkName: + return written, writeDescriptorNetworkName(w, d.NetworkName) + case DescriptorTagParentalRating: + return written, writeDescriptorParentalRating(w, d.ParentalRating) + case DescriptorTagPrivateDataIndicator: + return written, writeDescriptorPrivateDataIndicator(w, d.PrivateDataIndicator) + case DescriptorTagPrivateDataSpecifier: + return written, writeDescriptorPrivateDataSpecifier(w, d.PrivateDataSpecifier) + case DescriptorTagRegistration: + return written, writeDescriptorRegistration(w, d.Registration) + case DescriptorTagService: + return written, writeDescriptorService(w, d.Service) + case DescriptorTagShortEvent: + return written, writeDescriptorShortEvent(w, d.ShortEvent) + case DescriptorTagStreamIdentifier: + return written, writeDescriptorStreamIdentifier(w, d.StreamIdentifier) + case DescriptorTagSubtitling: + return written, writeDescriptorSubtitling(w, d.Subtitling) + case DescriptorTagTeletext: + return written, writeDescriptorTeletext(w, d.Teletext) + case DescriptorTagVBIData: + return written, writeDescriptorVBIData(w, d.VBIData) + case DescriptorTagVBITeletext: + return written, writeDescriptorTeletext(w, d.VBITeletext) + } + + return written, writeDescriptorUnknown(w, d.Unknown) +} + +func calcDescriptorsLength(ds []*Descriptor) uint16 { + length := uint16(0) + for _, d := range ds { + length += 2 // tag and length + length += uint16(calcDescriptorLength(d)) + } + return length +} + +func writeDescriptors(w *astikit.BitsWriter, ds []*Descriptor) (int, error) { + written := 0 + + for _, d := range ds { + n, err := writeDescriptor(w, d) + if err != nil { + return 0, err + } + written += n + } + + return written, nil +} + +func writeDescriptorsWithLength(w *astikit.BitsWriter, ds []*Descriptor) (int, error) { + length := calcDescriptorsLength(ds) + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(uint8(0xff), 4) // reserved + b.WriteN(length, 12) // program_info_length + + if err := b.Err(); err != nil { + return 0, err + } + + written, err := writeDescriptors(w, ds) + return written + 2, err // 2 for length +} diff --git a/vendor/github.com/asticode/go-astits/dvb.go b/vendor/github.com/asticode/go-astits/dvb.go new file mode 100644 index 000000000..711a984c0 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/dvb.go @@ -0,0 +1,130 @@ +package astits + +import ( + "fmt" + "time" + + "github.com/asticode/go-astikit" +) + +// parseDVBTime parses a DVB time +// This field is coded as 16 bits giving the 16 LSBs of MJD followed by 24 bits coded as 6 digits in 4 - bit Binary +// Coded Decimal (BCD). If the start time is undefined (e.g. for an event in a NVOD reference service) all bits of the +// field are set to "1". +// I apologize for the computation which is really messy but details are given in the documentation +// Page: 160 | Annex C | Link: https://www.dvb.org/resources/public/standards/a38_dvb-si_specification.pdf +// (barbashov) the link above can be broken, alternative: https://dvb.org/wp-content/uploads/2019/12/a038_tm1217r37_en300468v1_17_1_-_rev-134_-_si_specification.pdf +func parseDVBTime(i *astikit.BytesIterator) (t time.Time, err error) { + // Get next 2 bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Date + var mjd = uint16(bs[0])<<8 | uint16(bs[1]) + var yt = int((float64(mjd) - 15078.2) / 365.25) + var mt = int((float64(mjd) - 14956.1 - float64(int(float64(yt)*365.25))) / 30.6001) + var d = int(float64(mjd) - 14956 - float64(int(float64(yt)*365.25)) - float64(int(float64(mt)*30.6001))) + var k int + if mt == 14 || mt == 15 { + k = 1 + } + var y = yt + k + var m = mt - 1 - k*12 + t, _ = time.Parse("06-01-02", fmt.Sprintf("%d-%d-%d", y, m, d)) + + // Time + var s time.Duration + if s, err = parseDVBDurationSeconds(i); err != nil { + err = fmt.Errorf("astits: parsing DVB duration seconds failed: %w", err) + return + } + t = t.Add(s) + return +} + +// parseDVBDurationMinutes parses a minutes duration +// 16 bit field containing the duration of the event in hours, minutes. format: 4 digits, 4 - bit BCD = 18 bit +func parseDVBDurationMinutes(i *astikit.BytesIterator) (d time.Duration, err error) { + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + d = parseDVBDurationByte(bs[0])*time.Hour + parseDVBDurationByte(bs[1])*time.Minute + return +} + +// parseDVBDurationSeconds parses a seconds duration +// 24 bit field containing the duration of the event in hours, minutes, seconds. format: 6 digits, 4 - bit BCD = 24 bit +func parseDVBDurationSeconds(i *astikit.BytesIterator) (d time.Duration, err error) { + var bs []byte + if bs, err = i.NextBytesNoCopy(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + d = parseDVBDurationByte(bs[0])*time.Hour + parseDVBDurationByte(bs[1])*time.Minute + parseDVBDurationByte(bs[2])*time.Second + return +} + +// parseDVBDurationByte parses a duration byte +func parseDVBDurationByte(i byte) time.Duration { + return time.Duration(uint8(i)>>4*10 + uint8(i)&0xf) +} + +func writeDVBTime(w *astikit.BitsWriter, t time.Time) (int, error) { + year := t.Year() - 1900 + month := t.Month() + day := t.Day() + + l := 0 + if month <= time.February { + l = 1 + } + + mjd := 14956 + day + int(float64(year-l)*365.25) + int(float64(int(month)+1+l*12)*30.6001) + + d := t.Sub(t.Truncate(24 * time.Hour)) + + b := astikit.NewBitsWriterBatch(w) + + b.Write(uint16(mjd)) + bytesWritten, err := writeDVBDurationSeconds(w, d) + if err != nil { + return 2, err + } + + return bytesWritten + 2, b.Err() +} + +func writeDVBDurationMinutes(w *astikit.BitsWriter, d time.Duration) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + hours := uint8(d.Hours()) + minutes := uint8(int(d.Minutes()) % 60) + + b.Write(dvbDurationByteRepresentation(hours)) + b.Write(dvbDurationByteRepresentation(minutes)) + + return 2, b.Err() +} + +func writeDVBDurationSeconds(w *astikit.BitsWriter, d time.Duration) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + hours := uint8(d.Hours()) + minutes := uint8(int(d.Minutes()) % 60) + seconds := uint8(int(d.Seconds()) % 60) + + b.Write(dvbDurationByteRepresentation(hours)) + b.Write(dvbDurationByteRepresentation(minutes)) + b.Write(dvbDurationByteRepresentation(seconds)) + + return 3, b.Err() +} + +func dvbDurationByteRepresentation(n uint8) uint8 { + return (n/10)<<4 | n%10 +} diff --git a/vendor/github.com/asticode/go-astits/muxer.go b/vendor/github.com/asticode/go-astits/muxer.go new file mode 100644 index 000000000..0312169e9 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/muxer.go @@ -0,0 +1,422 @@ +package astits + +import ( + "bytes" + "context" + "errors" + "github.com/asticode/go-astikit" + "io" +) + +const ( + startPID uint16 = 0x0100 + pmtStartPID uint16 = 0x1000 + programNumberStart uint16 = 1 +) + +var ( + ErrPIDNotFound = errors.New("astits: PID not found") + ErrPIDAlreadyExists = errors.New("astits: PID already exists") + ErrPCRPIDInvalid = errors.New("astits: PCR PID invalid") +) + +type Muxer struct { + ctx context.Context + w io.Writer + bitsWriter *astikit.BitsWriter + + packetSize int + tablesRetransmitPeriod int // period in PES packets + + pm programMap // pid -> programNumber + pmt PMTData + nextPID uint16 + patVersion wrappingCounter + pmtVersion wrappingCounter + + patBytes bytes.Buffer + pmtBytes bytes.Buffer + + buf bytes.Buffer + bufWriter *astikit.BitsWriter + + esContexts map[uint16]*esContext + tablesRetransmitCounter int +} + +type esContext struct { + es *PMTElementaryStream + cc wrappingCounter +} + +func newEsContext(es *PMTElementaryStream) *esContext { + return &esContext{ + es: es, + cc: newWrappingCounter(0b1111), // CC is 4 bits + } +} + +func MuxerOptTablesRetransmitPeriod(newPeriod int) func(*Muxer) { + return func(m *Muxer) { + m.tablesRetransmitPeriod = newPeriod + } +} + +// TODO MuxerOptAutodetectPCRPID selecting first video PID for each PMT, falling back to first audio, falling back to any other + +func NewMuxer(ctx context.Context, w io.Writer, opts ...func(*Muxer)) *Muxer { + m := &Muxer{ + ctx: ctx, + w: w, + + packetSize: MpegTsPacketSize, // no 192-byte packet support yet + tablesRetransmitPeriod: 40, + + pm: newProgramMap(), + pmt: PMTData{ + ElementaryStreams: []*PMTElementaryStream{}, + ProgramNumber: programNumberStart, + }, + + // table version is 5-bit field + patVersion: newWrappingCounter(0b11111), + pmtVersion: newWrappingCounter(0b11111), + + esContexts: map[uint16]*esContext{}, + } + + m.bufWriter = astikit.NewBitsWriter(astikit.BitsWriterOptions{Writer: &m.buf}) + m.bitsWriter = astikit.NewBitsWriter(astikit.BitsWriterOptions{Writer: m.w}) + + // TODO multiple programs support + m.pm.set(pmtStartPID, programNumberStart) + + for _, opt := range opts { + opt(m) + } + + // to output tables at the very start + m.tablesRetransmitCounter = m.tablesRetransmitPeriod + + return m +} + +// if es.ElementaryPID is zero, it will be generated automatically +func (m *Muxer) AddElementaryStream(es PMTElementaryStream) error { + if es.ElementaryPID != 0 { + for _, oes := range m.pmt.ElementaryStreams { + if oes.ElementaryPID == es.ElementaryPID { + return ErrPIDAlreadyExists + } + } + } else { + es.ElementaryPID = m.nextPID + m.nextPID++ + } + + m.pmt.ElementaryStreams = append(m.pmt.ElementaryStreams, &es) + + m.esContexts[es.ElementaryPID] = newEsContext(&es) + // invalidate pmt cache + m.pmtBytes.Reset() + return nil +} + +func (m *Muxer) RemoveElementaryStream(pid uint16) error { + foundIdx := -1 + for i, oes := range m.pmt.ElementaryStreams { + if oes.ElementaryPID == pid { + foundIdx = i + break + } + } + + if foundIdx == -1 { + return ErrPIDNotFound + } + + m.pmt.ElementaryStreams = append(m.pmt.ElementaryStreams[:foundIdx], m.pmt.ElementaryStreams[foundIdx+1:]...) + delete(m.esContexts, pid) + m.pmtBytes.Reset() + return nil +} + +// SetPCRPID marks pid as one to look PCRs in +func (m *Muxer) SetPCRPID(pid uint16) { + m.pmt.PCRPID = pid +} + +// WriteData writes MuxerData to TS stream +// Currently only PES packets are supported +// Be aware that after successful call WriteData will set d.AdaptationField.StuffingLength value to zero +func (m *Muxer) WriteData(d *MuxerData) (int, error) { + ctx, ok := m.esContexts[d.PID] + if !ok { + return 0, ErrPIDNotFound + } + + bytesWritten := 0 + + forceTables := d.AdaptationField != nil && + d.AdaptationField.RandomAccessIndicator && + d.PID == m.pmt.PCRPID + + n, err := m.retransmitTables(forceTables) + if err != nil { + return n, err + } + + bytesWritten += n + + payloadStart := true + writeAf := d.AdaptationField != nil + payloadBytesWritten := 0 + for payloadBytesWritten < len(d.PES.Data) { + pktLen := 1 + mpegTsPacketHeaderSize // sync byte + header + pkt := Packet{ + Header: &PacketHeader{ + ContinuityCounter: uint8(ctx.cc.get()), + HasAdaptationField: writeAf, + HasPayload: false, + PayloadUnitStartIndicator: false, + PID: d.PID, + }, + } + + if writeAf { + pkt.AdaptationField = d.AdaptationField + // one byte for adaptation field length field + pktLen += 1 + int(calcPacketAdaptationFieldLength(d.AdaptationField)) + writeAf = false + } + + bytesAvailable := m.packetSize - pktLen + if payloadStart { + pesHeaderLengthCurrent := pesHeaderLength + int(calcPESOptionalHeaderLength(d.PES.Header.OptionalHeader)) + // d.AdaptationField with pes header are too big, we don't have space to write pes header + if bytesAvailable < pesHeaderLengthCurrent { + pkt.Header.HasAdaptationField = true + if pkt.AdaptationField == nil { + pkt.AdaptationField = newStuffingAdaptationField(bytesAvailable) + } else { + pkt.AdaptationField.StuffingLength = bytesAvailable + } + } else { + pkt.Header.HasPayload = true + pkt.Header.PayloadUnitStartIndicator = true + } + } else { + pkt.Header.HasPayload = true + } + + if pkt.Header.HasPayload { + m.buf.Reset() + if d.PES.Header.StreamID == 0 { + d.PES.Header.StreamID = ctx.es.StreamType.ToPESStreamID() + } + + ntot, npayload, err := writePESData( + m.bufWriter, + d.PES.Header, + d.PES.Data[payloadBytesWritten:], + payloadStart, + bytesAvailable, + ) + if err != nil { + return bytesWritten, err + } + + payloadBytesWritten += npayload + + pkt.Payload = m.buf.Bytes() + + bytesAvailable -= ntot + // if we still have some space in packet, we should stuff it with adaptation field stuffing + // we can't stuff packets with 0xff at the end of a packet since it's not uncommon for PES payloads to have length unspecified + if bytesAvailable > 0 { + pkt.Header.HasAdaptationField = true + if pkt.AdaptationField == nil { + pkt.AdaptationField = newStuffingAdaptationField(bytesAvailable) + } else { + pkt.AdaptationField.StuffingLength = bytesAvailable + } + } + + n, err = writePacket(m.bitsWriter, &pkt, m.packetSize) + if err != nil { + return bytesWritten, err + } + + bytesWritten += n + + payloadStart = false + } + } + + if d.AdaptationField != nil { + d.AdaptationField.StuffingLength = 0 + } + + return bytesWritten, nil +} + +// Writes given packet to MPEG-TS stream +// Stuffs with 0xffs if packet turns out to be shorter than target packet length +func (m *Muxer) WritePacket(p *Packet) (int, error) { + return writePacket(m.bitsWriter, p, m.packetSize) +} + +func (m *Muxer) retransmitTables(force bool) (int, error) { + m.tablesRetransmitCounter++ + if !force && m.tablesRetransmitCounter < m.tablesRetransmitPeriod { + return 0, nil + } + + n, err := m.WriteTables() + if err != nil { + return n, err + } + + m.tablesRetransmitCounter = 0 + return n, nil +} + +func (m *Muxer) WriteTables() (int, error) { + bytesWritten := 0 + + if m.patBytes.Len() != m.packetSize { + if err := m.generatePAT(); err != nil { + return bytesWritten, err + } + } + + if m.pmtBytes.Len() != m.packetSize { + if err := m.generatePMT(); err != nil { + return bytesWritten, err + } + } + + n, err := m.w.Write(m.patBytes.Bytes()) + if err != nil { + return bytesWritten, err + } + bytesWritten += n + + n, err = m.w.Write(m.pmtBytes.Bytes()) + if err != nil { + return bytesWritten, err + } + bytesWritten += n + + return bytesWritten, nil +} + +func (m *Muxer) generatePAT() error { + d := m.pm.toPATData() + syntax := &PSISectionSyntax{ + Data: &PSISectionSyntaxData{PAT: d}, + Header: &PSISectionSyntaxHeader{ + CurrentNextIndicator: true, + // TODO support for PAT tables longer than 1 TS packet + //LastSectionNumber: 0, + //SectionNumber: 0, + TableIDExtension: d.TransportStreamID, + VersionNumber: uint8(m.patVersion.get()), + }, + } + section := PSISection{ + Header: &PSISectionHeader{ + SectionLength: calcPATSectionLength(d), + SectionSyntaxIndicator: true, + TableID: PSITableID(d.TransportStreamID), + }, + Syntax: syntax, + } + psiData := PSIData{ + Sections: []*PSISection{§ion}, + } + + m.buf.Reset() + w := astikit.NewBitsWriter(astikit.BitsWriterOptions{Writer: &m.buf}) + if _, err := writePSIData(w, &psiData); err != nil { + return err + } + + m.patBytes.Reset() + wPacket := astikit.NewBitsWriter(astikit.BitsWriterOptions{Writer: &m.patBytes}) + + pkt := Packet{ + Header: &PacketHeader{ + HasPayload: true, + PayloadUnitStartIndicator: true, + PID: PIDPAT, + }, + Payload: m.buf.Bytes(), + } + if _, err := writePacket(wPacket, &pkt, m.packetSize); err != nil { + // FIXME save old PAT and rollback to it here maybe? + return err + } + + return nil +} + +func (m *Muxer) generatePMT() error { + hasPCRPID := false + for _, es := range m.pmt.ElementaryStreams { + if es.ElementaryPID == m.pmt.PCRPID { + hasPCRPID = true + break + } + } + if !hasPCRPID { + return ErrPCRPIDInvalid + } + + syntax := &PSISectionSyntax{ + Data: &PSISectionSyntaxData{PMT: &m.pmt}, + Header: &PSISectionSyntaxHeader{ + CurrentNextIndicator: true, + // TODO support for PMT tables longer than 1 TS packet + //LastSectionNumber: 0, + //SectionNumber: 0, + TableIDExtension: m.pmt.ProgramNumber, + VersionNumber: uint8(m.pmtVersion.get()), + }, + } + section := PSISection{ + Header: &PSISectionHeader{ + SectionLength: calcPMTSectionLength(&m.pmt), + SectionSyntaxIndicator: true, + TableID: PSITableIDPMT, + }, + Syntax: syntax, + } + psiData := PSIData{ + Sections: []*PSISection{§ion}, + } + + m.buf.Reset() + w := astikit.NewBitsWriter(astikit.BitsWriterOptions{Writer: &m.buf}) + if _, err := writePSIData(w, &psiData); err != nil { + return err + } + + m.pmtBytes.Reset() + wPacket := astikit.NewBitsWriter(astikit.BitsWriterOptions{Writer: &m.pmtBytes}) + + pkt := Packet{ + Header: &PacketHeader{ + HasPayload: true, + PayloadUnitStartIndicator: true, + PID: pmtStartPID, // FIXME multiple programs support + }, + Payload: m.buf.Bytes(), + } + if _, err := writePacket(wPacket, &pkt, m.packetSize); err != nil { + // FIXME save old PMT and rollback to it here maybe? + return err + } + + return nil +} diff --git a/vendor/github.com/asticode/go-astits/packet.go b/vendor/github.com/asticode/go-astits/packet.go new file mode 100644 index 000000000..c1261209f --- /dev/null +++ b/vendor/github.com/asticode/go-astits/packet.go @@ -0,0 +1,543 @@ +package astits + +import ( + "fmt" + "github.com/asticode/go-astikit" +) + +// Scrambling Controls +const ( + ScramblingControlNotScrambled = 0 + ScramblingControlReservedForFutureUse = 1 + ScramblingControlScrambledWithEvenKey = 2 + ScramblingControlScrambledWithOddKey = 3 +) + +const ( + MpegTsPacketSize = 188 + mpegTsPacketHeaderSize = 3 + pcrBytesSize = 6 +) + +// Packet represents a packet +// https://en.wikipedia.org/wiki/MPEG_transport_stream +type Packet struct { + AdaptationField *PacketAdaptationField + Header *PacketHeader + Payload []byte // This is only the payload content +} + +// PacketHeader represents a packet header +type PacketHeader struct { + ContinuityCounter uint8 // Sequence number of payload packets (0x00 to 0x0F) within each stream (except PID 8191) + HasAdaptationField bool + HasPayload bool + PayloadUnitStartIndicator bool // Set when a PES, PSI, or DVB-MIP packet begins immediately following the header. + PID uint16 // Packet Identifier, describing the payload data. + TransportErrorIndicator bool // Set when a demodulator can't correct errors from FEC data; indicating the packet is corrupt. + TransportPriority bool // Set when the current packet has a higher priority than other packets with the same PID. + TransportScramblingControl uint8 +} + +// PacketAdaptationField represents a packet adaptation field +type PacketAdaptationField struct { + AdaptationExtensionField *PacketAdaptationExtensionField + DiscontinuityIndicator bool // Set if current TS packet is in a discontinuity state with respect to either the continuity counter or the program clock reference + ElementaryStreamPriorityIndicator bool // Set when this stream should be considered "high priority" + HasAdaptationExtensionField bool + HasOPCR bool + HasPCR bool + HasTransportPrivateData bool + HasSplicingCountdown bool + Length int + IsOneByteStuffing bool // Only used for one byte stuffing - if true, adaptation field will be written as one uint8(0). Not part of TS format + StuffingLength int // Only used in writePacketAdaptationField to request stuffing + OPCR *ClockReference // Original Program clock reference. Helps when one TS is copied into another + PCR *ClockReference // Program clock reference + RandomAccessIndicator bool // Set when the stream may be decoded without errors from this point + SpliceCountdown int // Indicates how many TS packets from this one a splicing point occurs (Two's complement signed; may be negative) + TransportPrivateDataLength int + TransportPrivateData []byte +} + +// PacketAdaptationExtensionField represents a packet adaptation extension field +type PacketAdaptationExtensionField struct { + DTSNextAccessUnit *ClockReference // The PES DTS of the splice point. Split up as 3 bits, 1 marker bit (0x1), 15 bits, 1 marker bit, 15 bits, and 1 marker bit, for 33 data bits total. + HasLegalTimeWindow bool + HasPiecewiseRate bool + HasSeamlessSplice bool + LegalTimeWindowIsValid bool + LegalTimeWindowOffset uint16 // Extra information for rebroadcasters to determine the state of buffers when packets may be missing. + Length int + PiecewiseRate uint32 // The rate of the stream, measured in 188-byte packets, to define the end-time of the LTW. + SpliceType uint8 // Indicates the parameters of the H.262 splice. +} + +// parsePacket parses a packet +func parsePacket(i *astikit.BytesIterator) (p *Packet, err error) { + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: getting next byte failed: %w", err) + return + } + + // Packet must start with a sync byte + if b != syncByte { + err = ErrPacketMustStartWithASyncByte + return + } + + // Create packet + p = &Packet{} + + // In case packet size is bigger than 188 bytes, we don't care for the first bytes + i.Seek(i.Len() - MpegTsPacketSize + 1) + offsetStart := i.Offset() + + // Parse header + if p.Header, err = parsePacketHeader(i); err != nil { + err = fmt.Errorf("astits: parsing packet header failed: %w", err) + return + } + + // Parse adaptation field + if p.Header.HasAdaptationField { + if p.AdaptationField, err = parsePacketAdaptationField(i); err != nil { + err = fmt.Errorf("astits: parsing packet adaptation field failed: %w", err) + return + } + } + + // Build payload + if p.Header.HasPayload { + i.Seek(payloadOffset(offsetStart, p.Header, p.AdaptationField)) + p.Payload = i.Dump() + } + return +} + +// payloadOffset returns the payload offset +func payloadOffset(offsetStart int, h *PacketHeader, a *PacketAdaptationField) (offset int) { + offset = offsetStart + 3 + if h.HasAdaptationField { + offset += 1 + a.Length + } + return +} + +// parsePacketHeader parses the packet header +func parsePacketHeader(i *astikit.BytesIterator) (h *PacketHeader, err error) { + // Get next bytes + var bs []byte + if bs, err = i.NextBytesNoCopy(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + + // Create header + h = &PacketHeader{ + ContinuityCounter: uint8(bs[2] & 0xf), + HasAdaptationField: bs[2]&0x20 > 0, + HasPayload: bs[2]&0x10 > 0, + PayloadUnitStartIndicator: bs[0]&0x40 > 0, + PID: uint16(bs[0]&0x1f)<<8 | uint16(bs[1]), + TransportErrorIndicator: bs[0]&0x80 > 0, + TransportPriority: bs[0]&0x20 > 0, + TransportScramblingControl: uint8(bs[2]) >> 6 & 0x3, + } + return +} + +// parsePacketAdaptationField parses the packet adaptation field +func parsePacketAdaptationField(i *astikit.BytesIterator) (a *PacketAdaptationField, err error) { + // Create adaptation field + a = &PacketAdaptationField{} + + // Get next byte + var b byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Length + a.Length = int(b) + + afStartOffset := i.Offset() + + // Valid length + if a.Length > 0 { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Flags + a.DiscontinuityIndicator = b&0x80 > 0 + a.RandomAccessIndicator = b&0x40 > 0 + a.ElementaryStreamPriorityIndicator = b&0x20 > 0 + a.HasPCR = b&0x10 > 0 + a.HasOPCR = b&0x08 > 0 + a.HasSplicingCountdown = b&0x04 > 0 + a.HasTransportPrivateData = b&0x02 > 0 + a.HasAdaptationExtensionField = b&0x01 > 0 + + // PCR + if a.HasPCR { + if a.PCR, err = parsePCR(i); err != nil { + err = fmt.Errorf("astits: parsing PCR failed: %w", err) + return + } + } + + // OPCR + if a.HasOPCR { + if a.OPCR, err = parsePCR(i); err != nil { + err = fmt.Errorf("astits: parsing PCR failed: %w", err) + return + } + } + + // Splicing countdown + if a.HasSplicingCountdown { + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + a.SpliceCountdown = int(b) + } + + // Transport private data + if a.HasTransportPrivateData { + // Length + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + a.TransportPrivateDataLength = int(b) + + // Data + if a.TransportPrivateDataLength > 0 { + if a.TransportPrivateData, err = i.NextBytes(a.TransportPrivateDataLength); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + } + } + + // Adaptation extension + if a.HasAdaptationExtensionField { + // Create extension field + a.AdaptationExtensionField = &PacketAdaptationExtensionField{} + + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Length + a.AdaptationExtensionField.Length = int(b) + if a.AdaptationExtensionField.Length > 0 { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Basic + a.AdaptationExtensionField.HasLegalTimeWindow = b&0x80 > 0 + a.AdaptationExtensionField.HasPiecewiseRate = b&0x40 > 0 + a.AdaptationExtensionField.HasSeamlessSplice = b&0x20 > 0 + + // Legal time window + if a.AdaptationExtensionField.HasLegalTimeWindow { + var bs []byte + if bs, err = i.NextBytesNoCopy(2); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + a.AdaptationExtensionField.LegalTimeWindowIsValid = bs[0]&0x80 > 0 + a.AdaptationExtensionField.LegalTimeWindowOffset = uint16(bs[0]&0x7f)<<8 | uint16(bs[1]) + } + + // Piecewise rate + if a.AdaptationExtensionField.HasPiecewiseRate { + var bs []byte + if bs, err = i.NextBytesNoCopy(3); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + a.AdaptationExtensionField.PiecewiseRate = uint32(bs[0]&0x3f)<<16 | uint32(bs[1])<<8 | uint32(bs[2]) + } + + // Seamless splice + if a.AdaptationExtensionField.HasSeamlessSplice { + // Get next byte + if b, err = i.NextByte(); err != nil { + err = fmt.Errorf("astits: fetching next byte failed: %w", err) + return + } + + // Splice type + a.AdaptationExtensionField.SpliceType = uint8(b&0xf0) >> 4 + + // We need to rewind since the current byte is used by the DTS next access unit as well + i.Skip(-1) + + // DTS Next access unit + if a.AdaptationExtensionField.DTSNextAccessUnit, err = parsePTSOrDTS(i); err != nil { + err = fmt.Errorf("astits: parsing DTS failed: %w", err) + return + } + } + } + } + } + + a.StuffingLength = a.Length - (i.Offset() - afStartOffset) + + return +} + +// parsePCR parses a Program Clock Reference +// Program clock reference, stored as 33 bits base, 6 bits reserved, 9 bits extension. +func parsePCR(i *astikit.BytesIterator) (cr *ClockReference, err error) { + var bs []byte + if bs, err = i.NextBytesNoCopy(6); err != nil { + err = fmt.Errorf("astits: fetching next bytes failed: %w", err) + return + } + pcr := uint64(bs[0])<<40 | uint64(bs[1])<<32 | uint64(bs[2])<<24 | uint64(bs[3])<<16 | uint64(bs[4])<<8 | uint64(bs[5]) + cr = newClockReference(int64(pcr>>15), int64(pcr&0x1ff)) + return +} + +func writePacket(w *astikit.BitsWriter, p *Packet, targetPacketSize int) (written int, retErr error) { + if retErr = w.Write(uint8(syncByte)); retErr != nil { + return + } + written += 1 + + n, retErr := writePacketHeader(w, p.Header) + if retErr != nil { + return + } + written += n + + if p.Header.HasAdaptationField { + n, retErr = writePacketAdaptationField(w, p.AdaptationField) + if retErr != nil { + return + } + written += n + } + + if targetPacketSize-written < len(p.Payload) { + return 0, fmt.Errorf( + "writePacket: can't write %d bytes of payload: only %d is available", + len(p.Payload), + targetPacketSize-written, + ) + } + + if p.Header.HasPayload { + retErr = w.Write(p.Payload) + if retErr != nil { + return + } + written += len(p.Payload) + } + + for written < targetPacketSize { + if retErr = w.Write(uint8(0xff)); retErr != nil { + return + } + written++ + } + + return written, nil +} + +func writePacketHeader(w *astikit.BitsWriter, h *PacketHeader) (written int, retErr error) { + b := astikit.NewBitsWriterBatch(w) + + b.Write(h.TransportErrorIndicator) + b.Write(h.PayloadUnitStartIndicator) + b.Write(h.TransportPriority) + b.WriteN(h.PID, 13) + b.WriteN(h.TransportScramblingControl, 2) + b.Write(h.HasAdaptationField) // adaptation_field_control higher bit + b.Write(h.HasPayload) // adaptation_field_control lower bit + b.WriteN(h.ContinuityCounter, 4) + + return mpegTsPacketHeaderSize, b.Err() +} + +func writePCR(w *astikit.BitsWriter, cr *ClockReference) (int, error) { + b := astikit.NewBitsWriterBatch(w) + + b.WriteN(uint64(cr.Base), 33) + b.WriteN(uint8(0xff), 6) + b.WriteN(uint64(cr.Extension), 9) + return pcrBytesSize, b.Err() +} + +func calcPacketAdaptationFieldLength(af *PacketAdaptationField) (length uint8) { + length++ + if af.HasPCR { + length += pcrBytesSize + } + if af.HasOPCR { + length += pcrBytesSize + } + if af.HasSplicingCountdown { + length++ + } + if af.HasTransportPrivateData { + length += 1 + uint8(len(af.TransportPrivateData)) + } + if af.HasAdaptationExtensionField { + length += 1 + calcPacketAdaptationFieldExtensionLength(af.AdaptationExtensionField) + } + length += uint8(af.StuffingLength) + return +} + +func writePacketAdaptationField(w *astikit.BitsWriter, af *PacketAdaptationField) (bytesWritten int, retErr error) { + b := astikit.NewBitsWriterBatch(w) + + if af.IsOneByteStuffing { + b.Write(uint8(0)) + return 1, nil + } + + length := calcPacketAdaptationFieldLength(af) + b.Write(length) + bytesWritten++ + + b.Write(af.DiscontinuityIndicator) + b.Write(af.RandomAccessIndicator) + b.Write(af.ElementaryStreamPriorityIndicator) + b.Write(af.HasPCR) + b.Write(af.HasOPCR) + b.Write(af.HasSplicingCountdown) + b.Write(af.HasTransportPrivateData) + b.Write(af.HasAdaptationExtensionField) + + bytesWritten++ + + if af.HasPCR { + n, err := writePCR(w, af.PCR) + if err != nil { + return 0, err + } + bytesWritten += n + } + + if af.HasOPCR { + n, err := writePCR(w, af.OPCR) + if err != nil { + return 0, err + } + bytesWritten += n + } + + if af.HasSplicingCountdown { + b.Write(uint8(af.SpliceCountdown)) + bytesWritten++ + } + + if af.HasTransportPrivateData { + // we can get length from TransportPrivateData itself, why do we need separate field? + b.Write(uint8(af.TransportPrivateDataLength)) + bytesWritten++ + if af.TransportPrivateDataLength > 0 { + b.Write(af.TransportPrivateData) + } + bytesWritten += len(af.TransportPrivateData) + } + + if af.HasAdaptationExtensionField { + n, err := writePacketAdaptationFieldExtension(w, af.AdaptationExtensionField) + if err != nil { + return 0, err + } + bytesWritten += n + } + + // stuffing + for i := 0; i < af.StuffingLength; i++ { + b.Write(uint8(0xff)) + bytesWritten++ + } + + retErr = b.Err() + return +} + +func calcPacketAdaptationFieldExtensionLength(afe *PacketAdaptationExtensionField) (length uint8) { + length++ + if afe.HasLegalTimeWindow { + length += 2 + } + if afe.HasPiecewiseRate { + length += 3 + } + if afe.HasSeamlessSplice { + length += ptsOrDTSByteLength + } + return length +} + +func writePacketAdaptationFieldExtension(w *astikit.BitsWriter, afe *PacketAdaptationExtensionField) (bytesWritten int, retErr error) { + b := astikit.NewBitsWriterBatch(w) + + length := calcPacketAdaptationFieldExtensionLength(afe) + b.Write(length) + bytesWritten++ + + b.Write(afe.HasLegalTimeWindow) + b.Write(afe.HasPiecewiseRate) + b.Write(afe.HasSeamlessSplice) + b.WriteN(uint8(0xff), 5) // reserved + bytesWritten++ + + if afe.HasLegalTimeWindow { + b.Write(afe.LegalTimeWindowIsValid) + b.WriteN(afe.LegalTimeWindowOffset, 15) + bytesWritten += 2 + } + + if afe.HasPiecewiseRate { + b.WriteN(uint8(0xff), 2) + b.WriteN(afe.PiecewiseRate, 22) + bytesWritten += 3 + } + + if afe.HasSeamlessSplice { + n, err := writePTSOrDTS(w, afe.SpliceType, afe.DTSNextAccessUnit) + if err != nil { + return 0, err + } + bytesWritten += n + } + + retErr = b.Err() + return +} + +func newStuffingAdaptationField(bytesToStuff int) *PacketAdaptationField { + if bytesToStuff == 1 { + return &PacketAdaptationField{ + IsOneByteStuffing: true, + } + } + + return &PacketAdaptationField{ + // one byte for length and one for flags + StuffingLength: bytesToStuff - 2, + } +} diff --git a/vendor/github.com/asticode/go-astits/packet_buffer.go b/vendor/github.com/asticode/go-astits/packet_buffer.go new file mode 100644 index 000000000..db3cf3ade --- /dev/null +++ b/vendor/github.com/asticode/go-astits/packet_buffer.go @@ -0,0 +1,139 @@ +package astits + +import ( + "bufio" + "fmt" + "io" + + "github.com/asticode/go-astikit" +) + +// packetBuffer represents a packet buffer +type packetBuffer struct { + packetSize int + r io.Reader + packetReadBuffer []byte +} + +// newPacketBuffer creates a new packet buffer +func newPacketBuffer(r io.Reader, packetSize int) (pb *packetBuffer, err error) { + // Init + pb = &packetBuffer{ + packetSize: packetSize, + r: r, + } + + // Packet size is not set + if pb.packetSize == 0 { + // Auto detect packet size + if pb.packetSize, err = autoDetectPacketSize(r); err != nil { + err = fmt.Errorf("astits: auto detecting packet size failed: %w", err) + return + } + } + return +} + +// autoDetectPacketSize updates the packet size based on the first bytes +// Minimum packet size is 188 and is bounded by 2 sync bytes +// Assumption is made that the first byte of the reader is a sync byte +func autoDetectPacketSize(r io.Reader) (packetSize int, err error) { + // Read first bytes + const l = 193 + var b = make([]byte, l) + shouldRewind, rerr := peek(r, b) + if rerr != nil { + err = fmt.Errorf("astits: reading first %d bytes failed: %w", l, rerr) + return + } + + // Packet must start with a sync byte + if b[0] != syncByte { + err = ErrPacketMustStartWithASyncByte + return + } + + // Look for sync bytes + for idx, b := range b { + if b == syncByte && idx >= MpegTsPacketSize { + // Update packet size + packetSize = idx + + if !shouldRewind { + return + } + + // Rewind or sync reader + var n int64 + if n, err = rewind(r); err != nil { + err = fmt.Errorf("astits: rewinding failed: %w", err) + return + } else if n == -1 { + var ls = packetSize - (l - packetSize) + if _, err = r.Read(make([]byte, ls)); err != nil { + err = fmt.Errorf("astits: reading %d bytes to sync reader failed: %w", ls, err) + return + } + } + return + } + } + err = fmt.Errorf("astits: only one sync byte detected in first %d bytes", l) + return +} + +// bufio.Reader can't be rewinded, which leads to packet loss on packet size autodetection +// but it has handy Peek() method +// so what we do here is peeking bytes for bufio.Reader and falling back to rewinding/syncing for all other readers +func peek(r io.Reader, b []byte) (shouldRewind bool, err error) { + if br, ok := r.(*bufio.Reader); ok { + var bs []byte + bs, err = br.Peek(len(b)) + if err != nil { + return + } + copy(b, bs) + return false, nil + } + + _, err = r.Read(b) + shouldRewind = true + return +} + +// rewind rewinds the reader if possible, otherwise n = -1 +func rewind(r io.Reader) (n int64, err error) { + if s, ok := r.(io.Seeker); ok { + if n, err = s.Seek(0, 0); err != nil { + err = fmt.Errorf("astits: seeking to 0 failed: %w", err) + return + } + return + } + n = -1 + return +} + +// next fetches the next packet from the buffer +func (pb *packetBuffer) next() (p *Packet, err error) { + // Read + if pb.packetReadBuffer == nil || len(pb.packetReadBuffer) != pb.packetSize { + pb.packetReadBuffer = make([]byte, pb.packetSize) + } + + if _, err = io.ReadFull(pb.r, pb.packetReadBuffer); err != nil { + if err == io.EOF || err == io.ErrUnexpectedEOF { + err = ErrNoMorePackets + } else { + err = fmt.Errorf("astits: reading %d bytes failed: %w", pb.packetSize, err) + } + return + } + + // Parse packet + if p, err = parsePacket(astikit.NewBytesIterator(pb.packetReadBuffer)); err != nil { + err = fmt.Errorf("astits: building packet failed: %w", err) + return + } + return +} diff --git a/vendor/github.com/asticode/go-astits/packet_pool.go b/vendor/github.com/asticode/go-astits/packet_pool.go new file mode 100644 index 000000000..d38750ad7 --- /dev/null +++ b/vendor/github.com/asticode/go-astits/packet_pool.go @@ -0,0 +1,101 @@ +package astits + +import ( + "sort" + "sync" +) + +// packetPool represents a pool of packets +type packetPool struct { + b map[uint16][]*Packet // Indexed by PID + m *sync.Mutex +} + +// newPacketPool creates a new packet pool +func newPacketPool() *packetPool { + return &packetPool{ + b: make(map[uint16][]*Packet), + m: &sync.Mutex{}, + } +} + +// add adds a new packet to the pool +func (b *packetPool) add(p *Packet) (ps []*Packet) { + // Throw away packet if error indicator + if p.Header.TransportErrorIndicator { + return + } + + // Throw away packets that don't have a payload until we figure out what we're going to do with them + // TODO figure out what we're going to do with them :D + if !p.Header.HasPayload { + return + } + + // Lock + b.m.Lock() + defer b.m.Unlock() + + // Init buffer + var mps []*Packet + var ok bool + if mps, ok = b.b[p.Header.PID]; !ok { + mps = []*Packet{} + } + + // Empty buffer if we detect a discontinuity + if hasDiscontinuity(mps, p) { + mps = []*Packet{} + } + + // Throw away packet if it's the same as the previous one + if isSameAsPrevious(mps, p) { + return + } + + // Add packet + if len(mps) > 0 || (len(mps) == 0 && p.Header.PayloadUnitStartIndicator) { + mps = append(mps, p) + } + + // Check payload unit start indicator + if p.Header.PayloadUnitStartIndicator && len(mps) > 1 { + ps = mps[:len(mps)-1] + mps = []*Packet{p} + } + + // Assign + b.b[p.Header.PID] = mps + return +} + +// dump dumps the packet pool by looking for the first item with packets inside +func (b *packetPool) dump() (ps []*Packet) { + b.m.Lock() + defer b.m.Unlock() + var keys []int + for k := range b.b { + keys = append(keys, int(k)) + } + sort.Ints(keys) + for _, k := range keys { + ps = b.b[uint16(k)] + delete(b.b, uint16(k)) + if len(ps) > 0 { + return + } + } + return +} + +// hasDiscontinuity checks whether a packet is discontinuous with a set of packets +func hasDiscontinuity(ps []*Packet, p *Packet) bool { + return (p.Header.HasAdaptationField && p.AdaptationField.DiscontinuityIndicator) || + (len(ps) > 0 && p.Header.HasPayload && p.Header.ContinuityCounter != (ps[len(ps)-1].Header.ContinuityCounter+1)%16) || + (len(ps) > 0 && !p.Header.HasPayload && p.Header.ContinuityCounter != ps[len(ps)-1].Header.ContinuityCounter) +} + +// isSameAsPrevious checks whether a packet is the same as the last packet of a set of packets +func isSameAsPrevious(ps []*Packet, p *Packet) bool { + return len(ps) > 0 && p.Header.HasPayload && p.Header.ContinuityCounter == ps[len(ps)-1].Header.ContinuityCounter +} diff --git a/vendor/github.com/asticode/go-astits/program_map.go b/vendor/github.com/asticode/go-astits/program_map.go new file mode 100644 index 000000000..855f2d70f --- /dev/null +++ b/vendor/github.com/asticode/go-astits/program_map.go @@ -0,0 +1,57 @@ +package astits + +import "sync" + +// programMap represents a program ids map +type programMap struct { + m *sync.Mutex + p map[uint16]uint16 // map[ProgramMapID]ProgramNumber +} + +// newProgramMap creates a new program ids map +func newProgramMap() programMap { + return programMap{ + m: &sync.Mutex{}, + p: make(map[uint16]uint16), + } +} + +// exists checks whether the program with this pid exists +func (m programMap) exists(pid uint16) (ok bool) { + m.m.Lock() + defer m.m.Unlock() + _, ok = m.p[pid] + return +} + +// set sets a new program id +func (m programMap) set(pid, number uint16) { + m.m.Lock() + defer m.m.Unlock() + m.p[pid] = number +} + +func (m programMap) unset(pid uint16) { + m.m.Lock() + defer m.m.Unlock() + delete(m.p, pid) +} + +func (m programMap) toPATData() *PATData { + m.m.Lock() + defer m.m.Unlock() + + d := &PATData{ + Programs: []*PATProgram{}, + TransportStreamID: uint16(PSITableIDPAT), + } + + for pid, pnr := range m.p { + d.Programs = append(d.Programs, &PATProgram{ + ProgramMapID: pid, + ProgramNumber: pnr, + }) + } + + return d +} diff --git a/vendor/github.com/asticode/go-astits/wrapping_counter.go b/vendor/github.com/asticode/go-astits/wrapping_counter.go new file mode 100644 index 000000000..025bc4ffa --- /dev/null +++ b/vendor/github.com/asticode/go-astits/wrapping_counter.go @@ -0,0 +1,22 @@ +package astits + +type wrappingCounter struct { + wrapAt int + value int +} + +func newWrappingCounter(wrapAt int) wrappingCounter { + return wrappingCounter{ + wrapAt: wrapAt, + } +} + +// returns current counter state and increments internal value +func (c *wrappingCounter) get() int { + ret := c.value + c.value++ + if c.value > c.wrapAt { + c.value = 0 + } + return ret +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 98c07fd80..050db3412 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -50,6 +50,15 @@ github.com/antchfx/htmlquery # github.com/antchfx/xpath v1.2.0 ## explicit; go 1.14 github.com/antchfx/xpath +# github.com/asticode/go-astikit v0.20.0 +## explicit; go 1.13 +github.com/asticode/go-astikit +# github.com/asticode/go-astisub v0.20.0 +## explicit; go 1.13 +github.com/asticode/go-astisub +# github.com/asticode/go-astits v1.8.0 +## explicit; go 1.13 +github.com/asticode/go-astits # github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84 ## explicit; go 1.14 github.com/chromedp/cdproto