Implement normalize output conversion

This commit is contained in:
2026-05-09 12:32:18 +00:00
parent c132f3fd5d
commit 6c780f6293
3 changed files with 387 additions and 8 deletions

View File

@@ -1,9 +1,13 @@
package cli
import (
"os"
"path/filepath"
"strings"
"testing"
"gitea.maximumdirect.net/eric/seriatim/internal/config"
"gitea.maximumdirect.net/eric/seriatim/schema"
)
func TestNormalizeCommandIsRecognized(t *testing.T) {
@@ -80,7 +84,171 @@ func TestNormalizeInvalidOutputModuleFails(t *testing.T) {
}
}
func TestNormalizeValidFlagsReachNotImplementedBoundary(t *testing.T) {
func TestNormalizeDefaultOutputSchemaIsIntermediate(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `{
"segments": [
{"id": 99, "start": 5, "end": 6, "speaker": "Bob", "text": "second", "categories": ["filler"]},
{"id": 10, "start": 1, "end": 2, "speaker": "Alice", "text": "first", "categories": ["backchannel"]}
]
}`)
output := filepath.Join(dir, "normalized.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var transcript schema.IntermediateTranscript
readJSON(t, output, &transcript)
if transcript.Metadata.OutputSchema != config.OutputSchemaIntermediate {
t.Fatalf("output schema = %q, want %q", transcript.Metadata.OutputSchema, config.OutputSchemaIntermediate)
}
if len(transcript.Segments) != 2 {
t.Fatalf("segment count = %d, want 2", len(transcript.Segments))
}
if transcript.Segments[0].ID != 1 || transcript.Segments[1].ID != 2 {
t.Fatalf("segment IDs = %d,%d, want 1,2", transcript.Segments[0].ID, transcript.Segments[1].ID)
}
if transcript.Segments[0].Text != "first" || transcript.Segments[1].Text != "second" {
t.Fatalf("unexpected sort order: %#v", transcript.Segments)
}
if len(transcript.Segments[0].Categories) != 1 || transcript.Segments[0].Categories[0] != "backchannel" {
t.Fatalf("expected categories preserved on first segment, got %#v", transcript.Segments[0].Categories)
}
}
func TestNormalizeBareArrayInputToIntermediateOutput(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `[
{"start": 2, "end": 3, "speaker": "Bob", "text": "second"},
{"start": 1, "end": 2, "speaker": "Alice", "text": "first"}
]`)
output := filepath.Join(dir, "normalized.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--output-schema", config.OutputSchemaIntermediate,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var transcript schema.IntermediateTranscript
readJSON(t, output, &transcript)
if len(transcript.Segments) != 2 {
t.Fatalf("segment count = %d, want 2", len(transcript.Segments))
}
if transcript.Segments[0].Speaker != "Alice" || transcript.Segments[1].Speaker != "Bob" {
t.Fatalf("unexpected sorted speakers: %#v", transcript.Segments)
}
}
func TestNormalizeInputIndexTieBreakerIsDeterministic(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `[
{"start": 1, "end": 2, "speaker": "Zulu", "text": "first in"},
{"start": 1, "end": 2, "speaker": "Alpha", "text": "second in"}
]`)
output := filepath.Join(dir, "normalized.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var transcript schema.IntermediateTranscript
readJSON(t, output, &transcript)
if transcript.Segments[0].Speaker != "Zulu" || transcript.Segments[1].Speaker != "Alpha" {
t.Fatalf("tie-break order mismatch: %#v", transcript.Segments)
}
}
func TestNormalizeMinimalSchemaOmitsCategories(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `{
"segments": [
{"start": 1, "end": 2, "speaker": "Alice", "text": "first", "categories": ["filler"]}
]
}`)
output := filepath.Join(dir, "normalized.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--output-schema", config.OutputSchemaMinimal,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var transcript schema.MinimalTranscript
readJSON(t, output, &transcript)
if transcript.Metadata.OutputSchema != config.OutputSchemaMinimal {
t.Fatalf("output schema = %q, want %q", transcript.Metadata.OutputSchema, config.OutputSchemaMinimal)
}
if len(transcript.Segments) != 1 || transcript.Segments[0].ID != 1 {
t.Fatalf("unexpected minimal output: %#v", transcript.Segments)
}
bytes, readErr := os.ReadFile(output)
if readErr != nil {
t.Fatalf("read output: %v", readErr)
}
if strings.Contains(string(bytes), "categories") {
t.Fatalf("minimal output unexpectedly contains categories:\n%s", string(bytes))
}
}
func TestNormalizeFullSchemaOutputValidatesAndHasProvenanceFallback(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `[
{"start": 1, "end": 2, "speaker": "Alice", "text": "first"},
{"start": 3, "end": 4, "speaker": "Bob", "text": "second", "source":"custom.json", "source_segment_index": 7}
]`)
output := filepath.Join(dir, "normalized.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--output-schema", config.OutputSchemaFull,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var transcript schema.Transcript
readJSON(t, output, &transcript)
if err := schema.ValidateTranscript(transcript); err != nil {
t.Fatalf("full output should validate: %v", err)
}
if len(transcript.Segments) != 2 {
t.Fatalf("segment count = %d, want 2", len(transcript.Segments))
}
if transcript.Segments[0].Source != filepath.Base(input) {
t.Fatalf("source fallback = %q, want %q", transcript.Segments[0].Source, filepath.Base(input))
}
if transcript.Segments[0].SourceSegmentIndex == nil || *transcript.Segments[0].SourceSegmentIndex != 0 {
t.Fatalf("source_segment_index fallback = %v, want 0", transcript.Segments[0].SourceSegmentIndex)
}
if transcript.Segments[1].Source != "custom.json" {
t.Fatalf("explicit source preserved = %q, want custom.json", transcript.Segments[1].Source)
}
if transcript.Segments[1].SourceSegmentIndex == nil || *transcript.Segments[1].SourceSegmentIndex != 7 {
t.Fatalf("explicit source_segment_index preserved = %v, want 7", transcript.Segments[1].SourceSegmentIndex)
}
if transcript.OverlapGroups == nil || len(transcript.OverlapGroups) != 0 {
t.Fatalf("overlap_groups = %#v, want empty array", transcript.OverlapGroups)
}
}
func TestNormalizeEmptySegmentsArrayProducesValidOutput(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `{"segments":[]}`)
output := filepath.Join(dir, "normalized.json")
@@ -89,11 +257,38 @@ func TestNormalizeValidFlagsReachNotImplementedBoundary(t *testing.T) {
"--input-file", input,
"--output-file", output,
)
if err == nil {
t.Fatal("expected not implemented error")
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
if !strings.Contains(err.Error(), "not implemented") {
t.Fatalf("unexpected error: %v", err)
var transcript schema.IntermediateTranscript
readJSON(t, output, &transcript)
if len(transcript.Segments) != 0 {
t.Fatalf("segment count = %d, want 0", len(transcript.Segments))
}
if err := schema.ValidateIntermediateTranscript(transcript); err != nil {
t.Fatalf("intermediate output should validate: %v", err)
}
}
func TestNormalizeSelectedOutputSchemaIsHonored(t *testing.T) {
dir := t.TempDir()
input := writeJSONFile(t, dir, "input.json", `{"segments":[{"start":1,"end":2,"speaker":"A","text":"one"}]}`)
output := filepath.Join(dir, "normalized.json")
err := executeNormalize(
"--input-file", input,
"--output-file", output,
"--output-schema", config.OutputSchemaMinimal,
)
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
var transcript schema.MinimalTranscript
readJSON(t, output, &transcript)
if transcript.Metadata.OutputSchema != config.OutputSchemaMinimal {
t.Fatalf("output schema = %q, want %q", transcript.Metadata.OutputSchema, config.OutputSchemaMinimal)
}
}