3 Commits

Author SHA1 Message Date
9ddcf5e0df Document the PostgreSQL schema contract in doc.go
All checks were successful
ci/woodpecker/push/build-image Pipeline was successful
2026-03-17 09:33:07 -05:00
d0b58a4734 Updates to track feedkit v0.7.2 and to add a dedupe processor
All checks were successful
ci/woodpecker/push/build-image Pipeline was successful
2026-03-16 18:35:44 -05:00
6cd823f528 Update go.mod
All checks were successful
ci/woodpecker/push/build-image Pipeline was successful
2026-03-16 15:37:31 -05:00
11 changed files with 284 additions and 37 deletions

View File

@@ -15,9 +15,10 @@ import (
"gitea.maximumdirect.net/ejr/feedkit/config" "gitea.maximumdirect.net/ejr/feedkit/config"
fkdispatch "gitea.maximumdirect.net/ejr/feedkit/dispatch" fkdispatch "gitea.maximumdirect.net/ejr/feedkit/dispatch"
fkevent "gitea.maximumdirect.net/ejr/feedkit/event" fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline" fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline"
fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors" fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors"
fkdedupe "gitea.maximumdirect.net/ejr/feedkit/processors/dedupe"
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
fkscheduler "gitea.maximumdirect.net/ejr/feedkit/scheduler" fkscheduler "gitea.maximumdirect.net/ejr/feedkit/scheduler"
fksinks "gitea.maximumdirect.net/ejr/feedkit/sinks" fksinks "gitea.maximumdirect.net/ejr/feedkit/sinks"
fksources "gitea.maximumdirect.net/ejr/feedkit/sources" fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
@@ -27,6 +28,8 @@ import (
wfsources "gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources" wfsources "gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources"
) )
const dedupeMaxEntries = 2048
func main() { func main() {
log.SetFlags(log.LstdFlags | log.Lmicroseconds) log.SetFlags(log.LstdFlags | log.Lmicroseconds)
@@ -103,9 +106,9 @@ func main() {
events := make(chan fkevent.Event, 256) events := make(chan fkevent.Event, 256)
// --- Normalization (optional) --- // --- Processors ---
// //
// We install feedkit's normalize.Processor even before any normalizers exist. // We install feedkit's processors/normalize.Processor even before any normalizers exist.
// With an empty normalizer list and RequireMatch=false, this is a no-op passthrough. // With an empty normalizer list and RequireMatch=false, this is a no-op passthrough.
// It will begin transforming events as soon as: // It will begin transforming events as soon as:
// 1) sources emit raw schemas (raw.*), and // 1) sources emit raw schemas (raw.*), and
@@ -116,8 +119,9 @@ func main() {
procReg.Register("normalize", func() (fkprocessors.Processor, error) { procReg.Register("normalize", func() (fkprocessors.Processor, error) {
return fknormalize.NewProcessor(normalizers, false), nil return fknormalize.NewProcessor(normalizers, false), nil
}) })
procReg.Register("dedupe", fkdedupe.Factory(dedupeMaxEntries))
chain, err := procReg.BuildChain([]string{"normalize"}) chain, err := procReg.BuildChain([]string{"normalize", "dedupe"})
if err != nil { if err != nil {
log.Fatalf("build processor chain failed: %v", err) log.Fatalf("build processor chain failed: %v", err)
} }

View File

@@ -9,9 +9,10 @@ import (
"gitea.maximumdirect.net/ejr/feedkit/config" "gitea.maximumdirect.net/ejr/feedkit/config"
fkevent "gitea.maximumdirect.net/ejr/feedkit/event" fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline" fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline"
fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors" fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors"
fkdedupe "gitea.maximumdirect.net/ejr/feedkit/processors/dedupe"
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
wfnormalizers "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers" wfnormalizers "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers"
) )
@@ -97,39 +98,23 @@ func TestExampleConfigLoads(t *testing.T) {
} }
} }
func TestProcessorRegistryBuildsNormalizeChain(t *testing.T) { func TestProcessorRegistryBuildsNormalizeThenDedupeChain(t *testing.T) {
normalizers := wfnormalizers.RegisterBuiltins(nil) chain, err := buildProcessorChainForTests()
if len(normalizers) == 0 {
t.Fatalf("RegisterBuiltins() returned no normalizers")
}
procReg := fkprocessors.NewRegistry()
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
return fknormalize.NewProcessor(normalizers, false), nil
})
chain, err := procReg.BuildChain([]string{"normalize"})
if err != nil { if err != nil {
t.Fatalf("BuildChain() unexpected error: %v", err) t.Fatalf("BuildChain() unexpected error: %v", err)
} }
if len(chain) != 1 { if len(chain) != 2 {
t.Fatalf("BuildChain() expected 1 processor, got %d", len(chain)) t.Fatalf("BuildChain() expected 2 processors, got %d", len(chain))
} }
pl := &fkpipeline.Pipeline{Processors: chain} pl := &fkpipeline.Pipeline{Processors: chain}
if len(pl.Processors) != 1 { if len(pl.Processors) != 2 {
t.Fatalf("pipeline expected 1 processor, got %d", len(pl.Processors)) t.Fatalf("pipeline expected 2 processors, got %d", len(pl.Processors))
} }
} }
func TestNormalizeNoMatchPassThrough(t *testing.T) { func TestNormalizeNoMatchPassThrough(t *testing.T) {
normalizers := wfnormalizers.RegisterBuiltins(nil) chain, err := buildProcessorChainForTests()
procReg := fkprocessors.NewRegistry()
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
return fknormalize.NewProcessor(normalizers, false), nil
})
chain, err := procReg.BuildChain([]string{"normalize"})
if err != nil { if err != nil {
t.Fatalf("BuildChain() unexpected error: %v", err) t.Fatalf("BuildChain() unexpected error: %v", err)
} }
@@ -157,3 +142,50 @@ func TestNormalizeNoMatchPassThrough(t *testing.T) {
t.Fatalf("Pipeline.Process() expected passthrough output, got %#v", *out) t.Fatalf("Pipeline.Process() expected passthrough output, got %#v", *out)
} }
} }
func TestDedupeDropsSecondEventWithSameID(t *testing.T) {
chain, err := buildProcessorChainForTests()
if err != nil {
t.Fatalf("BuildChain() unexpected error: %v", err)
}
pl := &fkpipeline.Pipeline{Processors: chain}
in := fkevent.Event{
ID: "evt-dedupe-1",
Kind: fkevent.Kind("observation"),
Source: "test",
EmittedAt: time.Now().UTC(),
Schema: "raw.weatherfeeder.unknown.v1",
Payload: map[string]any{
"ok": true,
},
}
first, err := pl.Process(context.Background(), in)
if err != nil {
t.Fatalf("first Pipeline.Process() unexpected error: %v", err)
}
if first == nil {
t.Fatalf("first Pipeline.Process() unexpectedly dropped event")
}
second, err := pl.Process(context.Background(), in)
if err != nil {
t.Fatalf("second Pipeline.Process() unexpected error: %v", err)
}
if second != nil {
t.Fatalf("second Pipeline.Process() expected dedupe drop, got %#v", *second)
}
}
func buildProcessorChainForTests() ([]fkprocessors.Processor, error) {
normalizers := wfnormalizers.RegisterBuiltins(nil)
procReg := fkprocessors.NewRegistry()
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
return fknormalize.NewProcessor(normalizers, false), nil
})
procReg.Register("dedupe", fkdedupe.Factory(dedupeMaxEntries))
return procReg.BuildChain([]string{"normalize", "dedupe"})
}

3
go.mod
View File

@@ -2,10 +2,11 @@ module gitea.maximumdirect.net/ejr/weatherfeeder
go 1.25 go 1.25
require gitea.maximumdirect.net/ejr/feedkit v0.7.0 require gitea.maximumdirect.net/ejr/feedkit v0.7.2
require ( require (
github.com/klauspost/compress v1.17.2 // indirect github.com/klauspost/compress v1.17.2 // indirect
github.com/lib/pq v1.10.9 // indirect
github.com/nats-io/nats.go v1.34.0 // indirect github.com/nats-io/nats.go v1.34.0 // indirect
github.com/nats-io/nkeys v0.4.7 // indirect github.com/nats-io/nkeys v0.4.7 // indirect
github.com/nats-io/nuid v1.0.1 // indirect github.com/nats-io/nuid v1.0.1 // indirect

6
go.sum
View File

@@ -1,7 +1,9 @@
gitea.maximumdirect.net/ejr/feedkit v0.7.0 h1:qXbsD30BH1HkKf579B4Qu3pDiT9mr+8DmDwzd3IXUoo= gitea.maximumdirect.net/ejr/feedkit v0.7.2 h1:hTg302SgSi7tw11lNzuc+3g7MvHT6jQQziuo2NoARt8=
gitea.maximumdirect.net/ejr/feedkit v0.7.0/go.mod h1:wYtA10GouvSe7L/8e1UEC+tqcp32HJofExIo1k+Wjls= gitea.maximumdirect.net/ejr/feedkit v0.7.2/go.mod h1:U6xC9xZLN3cL4yi7YBVyzGoHYRLJXusFCAKlj2kdYYQ=
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/nats-io/nats.go v1.34.0 h1:fnxnPCNiwIG5w08rlMcEKTUw4AV/nKyGCOJE8TdhSPk= github.com/nats-io/nats.go v1.34.0 h1:fnxnPCNiwIG5w08rlMcEKTUw4AV/nKyGCOJE8TdhSPk=
github.com/nats-io/nats.go v1.34.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8= github.com/nats-io/nats.go v1.34.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI= github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=

View File

@@ -2,7 +2,7 @@
package normalizers package normalizers
import ( import (
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize" fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws" "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws"
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo" "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo"

View File

@@ -4,7 +4,7 @@ import (
"reflect" "reflect"
"testing" "testing"
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize" fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws" "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws"
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo" "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo"
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openweather" "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openweather"

View File

@@ -29,7 +29,7 @@
// //
// 1. One normalizer per file. // 1. One normalizer per file.
// Each file contains exactly one Normalizer implementation (one type that // Each file contains exactly one Normalizer implementation (one type that
// satisfies feedkit/normalize.Normalizer). // satisfies feedkit/processors/normalize.Normalizer).
// Helper files are encouraged (types.go, common.go, mapping.go, etc.) as long // Helper files are encouraged (types.go, common.go, mapping.go, etc.) as long
// as they do not define additional Normalizer types. // as they do not define additional Normalizer types.
// //

View File

@@ -2,7 +2,7 @@
package nws package nws
import ( import (
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize" fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
) )
// Register appends NWS normalizers in stable order. // Register appends NWS normalizers in stable order.

View File

@@ -2,7 +2,7 @@
package openmeteo package openmeteo
import ( import (
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize" fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
) )
// Register appends Open-Meteo normalizers in stable order. // Register appends Open-Meteo normalizers in stable order.

View File

@@ -2,7 +2,7 @@
package openweather package openweather
import ( import (
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize" fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
) )
// Register appends OpenWeather normalizers in stable order. // Register appends OpenWeather normalizers in stable order.

View File

@@ -0,0 +1,208 @@
// Package postgres documents weatherfeeder's PostgreSQL sink contract for
// downstream SQL consumers.
//
// This package wires weatherfeeder canonical events into normalized relational
// tables. Downstream consumers can reconstruct the same canonical JSON objects
// that were written by joining parent/child tables as described below.
//
// Canonical input schemas:
// - weather.observation.v1 -> model.WeatherObservation
// - weather.forecast.v1 -> model.WeatherForecastRun
// - weather.alert.v1 -> model.WeatherAlertRun
//
// Parent/child relationships:
// - observations.event_id -> observation_cloud_layers.event_id
// - observations.event_id -> observation_present_weather.event_id
// - forecasts.event_id -> forecast_periods.run_event_id
// - alert_runs.event_id -> alerts.run_event_id
// - alerts.(run_event_id, alert_index) -> alert_references.(run_event_id, alert_index)
//
// Dedupe and retention behavior:
// - Parent primary keys (event_id): observations, forecasts, alert_runs.
// - Child primary keys use positional indexes to preserve payload order.
// - Prune columns:
// - observations.observed_at
// - observation_cloud_layers.observed_at
// - observation_present_weather.observed_at
// - forecasts.issued_at
// - forecast_periods.issued_at
// - alert_runs.as_of
// - alerts.as_of
// - alert_references.as_of
//
// Envelope field mapping (shared parent columns)
//
// These columns exist on observations, forecasts, and alert_runs:
// - event_id TEXT -> event.id
// - event_kind TEXT -> event.kind
// - event_source TEXT -> event.source
// - event_schema TEXT -> event.schema
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
//
// Table contract
//
// 1. observations (PK: event_id)
//
// - event_id TEXT -> event.id
// - event_kind TEXT -> event.kind
// - event_source TEXT -> event.source
// - event_schema TEXT -> event.schema
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
// - station_id TEXT NULL -> payload.stationId
// - station_name TEXT NULL -> payload.stationName
// - observed_at TIMESTAMPTZ -> payload.timestamp
// - condition_code INTEGER -> payload.conditionCode
// - condition_text TEXT NULL -> payload.conditionText
// - is_day BOOLEAN NULL -> payload.isDay
// - provider_raw_description TEXT NULL -> payload.providerRawDescription
// - text_description TEXT NULL -> payload.textDescription
// - icon_url TEXT NULL -> payload.iconUrl
// - temperature_c DOUBLE PRECISION NULL -> payload.temperatureC
// - dewpoint_c DOUBLE PRECISION NULL -> payload.dewpointC
// - wind_direction_degrees DOUBLE PRECISION NULL -> payload.windDirectionDegrees
// - wind_speed_kmh DOUBLE PRECISION NULL -> payload.windSpeedKmh
// - wind_gust_kmh DOUBLE PRECISION NULL -> payload.windGustKmh
// - barometric_pressure_pa DOUBLE PRECISION NULL -> payload.barometricPressurePa
// - sea_level_pressure_pa DOUBLE PRECISION NULL -> payload.seaLevelPressurePa
// - visibility_meters DOUBLE PRECISION NULL -> payload.visibilityMeters
// - relative_humidity_percent DOUBLE PRECISION NULL -> payload.relativeHumidityPercent
// - apparent_temperature_c DOUBLE PRECISION NULL -> payload.apparentTemperatureC
// - elevation_meters DOUBLE PRECISION NULL -> payload.elevationMeters
// - raw_message TEXT NULL -> payload.rawMessage
//
// 2. observation_cloud_layers (PK: event_id, layer_index)
//
// - event_id TEXT -> observations.event_id / payload.cloudLayers[i]
// - layer_index INTEGER -> i (array position in payload.cloudLayers)
// - observed_at TIMESTAMPTZ -> payload.timestamp
// - base_meters DOUBLE PRECISION NULL -> payload.cloudLayers[i].baseMeters
// - amount TEXT NULL -> payload.cloudLayers[i].amount
//
// 3. observation_present_weather (PK: event_id, weather_index)
//
// - event_id TEXT -> observations.event_id / payload.presentWeather[i]
// - weather_index INTEGER -> i (array position in payload.presentWeather)
// - observed_at TIMESTAMPTZ -> payload.timestamp
// - raw_text TEXT NULL -> JSON-encoded payload.presentWeather[i].raw
//
// Note: raw_text stores compact JSON text. Consumers that need the original
// object should parse raw_text as JSON.
//
// 4. forecasts (PK: event_id)
//
// - event_id TEXT -> event.id
// - event_kind TEXT -> event.kind
// - event_source TEXT -> event.source
// - event_schema TEXT -> event.schema
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
// - location_id TEXT NULL -> payload.locationId
// - location_name TEXT NULL -> payload.locationName
// - issued_at TIMESTAMPTZ -> payload.issuedAt
// - updated_at TIMESTAMPTZ NULL -> payload.updatedAt
// - product TEXT -> payload.product
// - latitude DOUBLE PRECISION NULL -> payload.latitude
// - longitude DOUBLE PRECISION NULL -> payload.longitude
// - elevation_meters DOUBLE PRECISION NULL -> payload.elevationMeters
// - period_count INTEGER -> len(payload.periods)
//
// 5. forecast_periods (PK: run_event_id, period_index)
//
// - run_event_id TEXT -> forecasts.event_id / payload.periods[i]
// - period_index INTEGER -> i (array position in payload.periods)
// - issued_at TIMESTAMPTZ -> payload.issuedAt (copied from parent)
// - start_time TIMESTAMPTZ -> payload.periods[i].startTime
// - end_time TIMESTAMPTZ -> payload.periods[i].endTime
// - name TEXT NULL -> payload.periods[i].name
// - is_day BOOLEAN NULL -> payload.periods[i].isDay
// - condition_code INTEGER -> payload.periods[i].conditionCode
// - condition_text TEXT NULL -> payload.periods[i].conditionText
// - provider_raw_description TEXT NULL -> payload.periods[i].providerRawDescription
// - text_description TEXT NULL -> payload.periods[i].textDescription
// - detailed_text TEXT NULL -> payload.periods[i].detailedText
// - icon_url TEXT NULL -> payload.periods[i].iconUrl
// - temperature_c DOUBLE PRECISION NULL -> payload.periods[i].temperatureC
// - temperature_c_min DOUBLE PRECISION NULL -> payload.periods[i].temperatureCMin
// - temperature_c_max DOUBLE PRECISION NULL -> payload.periods[i].temperatureCMax
// - dewpoint_c DOUBLE PRECISION NULL -> payload.periods[i].dewpointC
// - relative_humidity_percent DOUBLE PRECISION NULL -> payload.periods[i].relativeHumidityPercent
// - wind_direction_degrees DOUBLE PRECISION NULL -> payload.periods[i].windDirectionDegrees
// - wind_speed_kmh DOUBLE PRECISION NULL -> payload.periods[i].windSpeedKmh
// - wind_gust_kmh DOUBLE PRECISION NULL -> payload.periods[i].windGustKmh
// - barometric_pressure_pa DOUBLE PRECISION NULL -> payload.periods[i].barometricPressurePa
// - visibility_meters DOUBLE PRECISION NULL -> payload.periods[i].visibilityMeters
// - apparent_temperature_c DOUBLE PRECISION NULL -> payload.periods[i].apparentTemperatureC
// - cloud_cover_percent DOUBLE PRECISION NULL -> payload.periods[i].cloudCoverPercent
// - probability_of_precipitation_percent DOUBLE PRECISION NULL -> payload.periods[i].probabilityOfPrecipitationPercent
// - precipitation_amount_mm DOUBLE PRECISION NULL -> payload.periods[i].precipitationAmountMm
// - snowfall_depth_mm DOUBLE PRECISION NULL -> payload.periods[i].snowfallDepthMm
// - uv_index DOUBLE PRECISION NULL -> payload.periods[i].uvIndex
//
// 6. alert_runs (PK: event_id)
//
// - event_id TEXT -> event.id
// - event_kind TEXT -> event.kind
// - event_source TEXT -> event.source
// - event_schema TEXT -> event.schema
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
// - location_id TEXT NULL -> payload.locationId
// - location_name TEXT NULL -> payload.locationName
// - as_of TIMESTAMPTZ -> payload.asOf
// - latitude DOUBLE PRECISION NULL -> payload.latitude
// - longitude DOUBLE PRECISION NULL -> payload.longitude
// - alert_count INTEGER -> len(payload.alerts)
//
// 7. alerts (PK: run_event_id, alert_index)
//
// - run_event_id TEXT -> alert_runs.event_id / payload.alerts[i]
// - alert_index INTEGER -> i (array position in payload.alerts)
// - as_of TIMESTAMPTZ -> payload.asOf (copied from parent)
// - alert_id TEXT -> payload.alerts[i].id
// - event TEXT NULL -> payload.alerts[i].event
// - headline TEXT NULL -> payload.alerts[i].headline
// - severity TEXT NULL -> payload.alerts[i].severity
// - urgency TEXT NULL -> payload.alerts[i].urgency
// - certainty TEXT NULL -> payload.alerts[i].certainty
// - status TEXT NULL -> payload.alerts[i].status
// - message_type TEXT NULL -> payload.alerts[i].messageType
// - category TEXT NULL -> payload.alerts[i].category
// - response TEXT NULL -> payload.alerts[i].response
// - description TEXT NULL -> payload.alerts[i].description
// - instruction TEXT NULL -> payload.alerts[i].instruction
// - sent TIMESTAMPTZ NULL -> payload.alerts[i].sent
// - effective TIMESTAMPTZ NULL -> payload.alerts[i].effective
// - onset TIMESTAMPTZ NULL -> payload.alerts[i].onset
// - expires TIMESTAMPTZ NULL -> payload.alerts[i].expires
// - area_description TEXT NULL -> payload.alerts[i].areaDescription
// - sender_name TEXT NULL -> payload.alerts[i].senderName
// - reference_count INTEGER -> len(payload.alerts[i].references)
//
// 8. alert_references (PK: run_event_id, alert_index, reference_index)
//
// - run_event_id TEXT -> alert_runs.event_id / payload.alerts[i].references[j]
// - alert_index INTEGER -> i (array position in payload.alerts)
// - reference_index INTEGER -> j (array position in payload.alerts[i].references)
// - as_of TIMESTAMPTZ -> payload.asOf (copied from parent)
// - id TEXT NULL -> payload.alerts[i].references[j].id
// - identifier TEXT NULL -> payload.alerts[i].references[j].identifier
// - sender TEXT NULL -> payload.alerts[i].references[j].sender
// - sent TIMESTAMPTZ NULL -> payload.alerts[i].references[j].sent
//
// Reconstructing canonical JSON payloads
//
// - WeatherObservation:
// read one row from observations, then join child rows by event_id ordered by
// layer_index / weather_index to rebuild cloudLayers and presentWeather arrays.
//
// - WeatherForecastRun:
// read one row from forecasts, then join forecast_periods by run_event_id
// ordered by period_index to rebuild periods.
//
// - WeatherAlertRun:
// read one row from alert_runs, join alerts by run_event_id ordered by
// alert_index, then join alert_references by (run_event_id, alert_index)
// ordered by reference_index to rebuild references per alert.
package postgres