Add normalize report diagnostics

This commit is contained in:
2026-05-09 12:34:37 +00:00
parent 6c780f6293
commit 5b008e272c
3 changed files with 275 additions and 12 deletions

View File

@@ -1,12 +1,14 @@
package cli
import (
"encoding/json"
"os"
"path/filepath"
"strings"
"testing"
"gitea.maximumdirect.net/eric/seriatim/internal/config"
"gitea.maximumdirect.net/eric/seriatim/internal/report"
"gitea.maximumdirect.net/eric/seriatim/schema"
)
@@ -292,8 +294,139 @@ func TestNormalizeSelectedOutputSchemaIsHonored(t *testing.T) {
}
}
func TestNormalizeReportFileWrittenAndContainsObjectInputShape(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `{"segments":[{"start":1,"end":2,"speaker":"A","text":"one"}]}`)
output := filepath.Join(dir, "normalized.json")
reportPath := filepath.Join(dir, "report.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--report-file", reportPath,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var rpt report.Report
readJSON(t, reportPath, &rpt)
audit := extractNormalizeAudit(t, rpt)
if audit.InputShape != "object_with_segments" {
t.Fatalf("input shape = %q, want object_with_segments", audit.InputShape)
}
if audit.InputSegmentCount != 1 {
t.Fatalf("input segment count = %d, want 1", audit.InputSegmentCount)
}
if audit.OutputSchema != config.OutputSchemaIntermediate {
t.Fatalf("output schema = %q, want %q", audit.OutputSchema, config.OutputSchemaIntermediate)
}
if len(audit.OutputModules) != 1 || audit.OutputModules[0] != "json" {
t.Fatalf("output modules = %v, want [json]", audit.OutputModules)
}
}
func TestNormalizeReportIncludesBareArrayShape(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `[{"start":1,"end":2,"speaker":"A","text":"one"}]`)
output := filepath.Join(dir, "normalized.json")
reportPath := filepath.Join(dir, "report.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--report-file", reportPath,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var rpt report.Report
readJSON(t, reportPath, &rpt)
audit := extractNormalizeAudit(t, rpt)
if audit.InputShape != "bare_segments_array" {
t.Fatalf("input shape = %q, want bare_segments_array", audit.InputShape)
}
}
func TestNormalizeReportEmptyInputEmitsWarning(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `{"segments":[]}`)
output := filepath.Join(dir, "normalized.json")
reportPath := filepath.Join(dir, "report.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--report-file", reportPath,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var rpt report.Report
readJSON(t, reportPath, &rpt)
found := false
for _, event := range rpt.Events {
if event.Stage == "normalize" && event.Module == "normalize" && event.Severity == report.SeverityWarning &&
strings.Contains(event.Message, "zero segments") {
found = true
break
}
}
if !found {
t.Fatalf("expected empty transcript warning event, got %#v", rpt.Events)
}
}
func TestNormalizeReportWriteFailureReturnsClearError(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `{"segments":[{"start":1,"end":2,"speaker":"A","text":"one"}]}`)
output := filepath.Join(dir, "normalized.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--report-file", dir,
)
if err == nil {
t.Fatal("expected report write failure")
}
if !strings.Contains(err.Error(), "write --report-file") {
t.Fatalf("unexpected error: %v", err)
}
}
func executeNormalize(args ...string) error {
cmd := NewRootCommand()
cmd.SetArgs(append([]string{"normalize"}, args...))
return cmd.Execute()
}
type normalizeAudit struct {
Command string `json:"command"`
InputFile string `json:"input_file"`
OutputFile string `json:"output_file"`
InputShape string `json:"input_shape"`
InputSegmentCount int `json:"input_segment_count"`
OutputSchema string `json:"output_schema"`
OutputModules []string `json:"output_modules"`
IDsReassigned bool `json:"ids_reassigned"`
SortingChangedInput bool `json:"sorting_changed_input_order"`
SegmentsWithCategories int `json:"segments_with_categories"`
}
func extractNormalizeAudit(t *testing.T, rpt report.Report) normalizeAudit {
t.Helper()
for _, event := range rpt.Events {
if event.Stage == "normalize" && event.Module == "normalize-audit" {
var audit normalizeAudit
if err := json.Unmarshal([]byte(event.Message), &audit); err != nil {
t.Fatalf("decode normalize audit: %v", err)
}
return audit
}
}
t.Fatalf("missing normalize-audit event: %#v", rpt.Events)
return normalizeAudit{}
}

View File

@@ -12,31 +12,57 @@ import (
"gitea.maximumdirect.net/eric/seriatim/schema"
)
// BuildResult contains normalize output plus deterministic transformation diagnostics.
type BuildResult struct {
Output any
SortingChanged bool
IDsReassigned bool
SegmentsWithCategories int
}
// Build converts parsed normalize input into a selected seriatim output schema.
func Build(parsed ParsedTranscript, cfg config.NormalizeConfig) (any, error) {
func Build(parsed ParsedTranscript, cfg config.NormalizeConfig) (BuildResult, error) {
ordered := sortedSegments(parsed.Segments)
sortingChanged := didSortingChangeOrder(ordered)
idsReassigned := didReassignIDs(ordered)
segmentsWithCategories := countSegmentsWithCategories(ordered)
switch cfg.OutputSchema {
case config.OutputSchemaMinimal:
output := buildMinimal(ordered)
if err := schema.ValidateMinimalTranscript(output); err != nil {
return nil, fmt.Errorf("validate normalize output: %w", err)
return BuildResult{}, fmt.Errorf("validate normalize output: %w", err)
}
return output, nil
return BuildResult{
Output: output,
SortingChanged: sortingChanged,
IDsReassigned: idsReassigned,
SegmentsWithCategories: segmentsWithCategories,
}, nil
case config.OutputSchemaIntermediate:
output := buildIntermediate(ordered)
if err := schema.ValidateIntermediateTranscript(output); err != nil {
return nil, fmt.Errorf("validate normalize output: %w", err)
return BuildResult{}, fmt.Errorf("validate normalize output: %w", err)
}
return output, nil
return BuildResult{
Output: output,
SortingChanged: sortingChanged,
IDsReassigned: idsReassigned,
SegmentsWithCategories: segmentsWithCategories,
}, nil
case config.OutputSchemaFull:
output := buildFull(ordered, cfg)
if err := schema.ValidateTranscript(output); err != nil {
return nil, fmt.Errorf("validate normalize output: %w", err)
return BuildResult{}, fmt.Errorf("validate normalize output: %w", err)
}
return output, nil
return BuildResult{
Output: output,
SortingChanged: sortingChanged,
IDsReassigned: idsReassigned,
SegmentsWithCategories: segmentsWithCategories,
}, nil
default:
return nil, fmt.Errorf("unsupported output schema %q", cfg.OutputSchema)
return BuildResult{}, fmt.Errorf("unsupported output schema %q", cfg.OutputSchema)
}
}
@@ -156,3 +182,35 @@ func copyIntPtr(value *int) *int {
copied := *value
return &copied
}
func didSortingChangeOrder(segments []InputSegment) bool {
for index, segment := range segments {
if segment.InputIndex != index {
return true
}
}
return false
}
func didReassignIDs(segments []InputSegment) bool {
if len(segments) == 0 {
return false
}
for index, segment := range segments {
newID := index + 1
if segment.OriginalID == nil || *segment.OriginalID != newID {
return true
}
}
return false
}
func countSegmentsWithCategories(segments []InputSegment) int {
count := 0
for _, segment := range segments {
if len(segment.Categories) > 0 {
count++
}
}
return count
}

View File

@@ -5,12 +5,28 @@ import (
"encoding/json"
"fmt"
"os"
"strings"
"gitea.maximumdirect.net/eric/seriatim/internal/artifact"
"gitea.maximumdirect.net/eric/seriatim/internal/buildinfo"
"gitea.maximumdirect.net/eric/seriatim/internal/config"
"gitea.maximumdirect.net/eric/seriatim/internal/report"
)
// Run validates command wiring for normalize and will later execute
// artifact-level normalization.
type normalizeAudit struct {
Command string `json:"command"`
InputFile string `json:"input_file"`
OutputFile string `json:"output_file"`
InputShape string `json:"input_shape"`
InputSegmentCount int `json:"input_segment_count"`
OutputSchema string `json:"output_schema"`
OutputModules []string `json:"output_modules"`
IDsReassigned bool `json:"ids_reassigned"`
SortingChangedInput bool `json:"sorting_changed_input_order"`
SegmentsWithCategories int `json:"segments_with_categories"`
}
// Run executes artifact-level normalization.
func Run(ctx context.Context, cfg config.NormalizeConfig) error {
if err := ctx.Err(); err != nil {
return err
@@ -21,15 +37,71 @@ func Run(ctx context.Context, cfg config.NormalizeConfig) error {
return err
}
output, err := Build(parsed, cfg)
built, err := Build(parsed, cfg)
if err != nil {
return err
}
if err := writeOutputJSON(cfg.OutputFile, output); err != nil {
if err := writeOutputJSON(cfg.OutputFile, built.Output); err != nil {
return err
}
if cfg.ReportFile != "" {
audit := normalizeAudit{
Command: "normalize",
InputFile: cfg.InputFile,
OutputFile: cfg.OutputFile,
InputShape: string(parsed.Shape),
InputSegmentCount: len(parsed.Segments),
OutputSchema: cfg.OutputSchema,
OutputModules: append([]string(nil), cfg.OutputModules...),
IDsReassigned: built.IDsReassigned,
SortingChangedInput: built.SortingChanged,
SegmentsWithCategories: built.SegmentsWithCategories,
}
auditJSON, err := json.Marshal(audit)
if err != nil {
return fmt.Errorf("marshal normalize audit: %w", err)
}
events := []report.Event{
report.Info("normalize", "normalize", "started normalize command"),
report.Info("normalize", "normalize", fmt.Sprintf("input file: %s", cfg.InputFile)),
report.Info("normalize", "normalize", fmt.Sprintf("detected input shape: %s", parsed.Shape)),
report.Info("normalize", "normalize", fmt.Sprintf("input segment count: %d", len(parsed.Segments))),
report.Info("normalize", "normalize", fmt.Sprintf("selected output schema: %s", cfg.OutputSchema)),
report.Info("normalize", "normalize", fmt.Sprintf("selected output modules: %s", strings.Join(cfg.OutputModules, ","))),
report.Info("normalize", "normalize", fmt.Sprintf("output file: %s", cfg.OutputFile)),
report.Info("normalize", "normalize", fmt.Sprintf("ids reassigned: %t", built.IDsReassigned)),
report.Info("normalize", "normalize", fmt.Sprintf("sorting changed input order: %t", built.SortingChanged)),
report.Info("normalize", "normalize", fmt.Sprintf("segments with categories: %d", built.SegmentsWithCategories)),
report.Info("normalize", "normalize-audit", string(auditJSON)),
}
if len(parsed.Segments) == 0 {
events = append(events, report.Warning("normalize", "normalize", "input transcript contains zero segments"))
}
events = append(events,
report.Info("normalize", "validate-output", fmt.Sprintf("validated %d output segment(s)", len(parsed.Segments))),
report.Info("output", "json", "wrote transcript JSON"),
)
rpt := report.Report{
Metadata: report.Metadata{
Application: artifact.ApplicationName,
Version: buildinfo.Version,
InputReader: "normalize-input",
InputFiles: []string{cfg.InputFile},
PreprocessingModules: []string{},
PostprocessingModules: []string{},
OutputModules: append([]string(nil), cfg.OutputModules...),
},
Events: events,
}
if err := report.WriteJSON(cfg.ReportFile, rpt); err != nil {
return fmt.Errorf("write --report-file %q: %w", cfg.ReportFile, err)
}
}
return nil
}