Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| eb27486466 | |||
| de5add59fd | |||
| 356c3be648 | |||
| dbaebbbd7a | |||
| 88d5727a84 | |||
| 129cebd94d | |||
| e42f2bc9de | |||
| 9ddcf5e0df | |||
| d0b58a4734 | |||
| 6cd823f528 | |||
| 84da2bb689 | |||
| 859ee9dd5c |
28
API.md
28
API.md
@@ -80,34 +80,18 @@ A `WeatherObservation` represents a point-in-time observation for a station/loca
|
|||||||
| `stationName` | string | no | Human station name |
|
| `stationName` | string | no | Human station name |
|
||||||
| `timestamp` | timestamp string | yes | Observation timestamp |
|
| `timestamp` | timestamp string | yes | Observation timestamp |
|
||||||
| `conditionCode` | int | yes | WMO code (`-1` unknown) |
|
| `conditionCode` | int | yes | WMO code (`-1` unknown) |
|
||||||
| `conditionText` | string | no | Canonical short condition text |
|
|
||||||
| `isDay` | bool | no | Day/night hint |
|
| `isDay` | bool | no | Day/night hint |
|
||||||
| `providerRawDescription` | string | no | Provider-specific evidence text |
|
| `textDescription` | string | no | Human-facing short description |
|
||||||
| `textDescription` | string | no | Legacy/transitional text description |
|
|
||||||
| `iconUrl` | string | no | Legacy/transitional icon URL |
|
|
||||||
| `temperatureC` | number | no | Celsius |
|
| `temperatureC` | number | no | Celsius |
|
||||||
| `dewpointC` | number | no | Celsius |
|
| `dewpointC` | number | no | Celsius |
|
||||||
| `windDirectionDegrees` | number | no | Degrees |
|
| `windDirectionDegrees` | number | no | Degrees |
|
||||||
| `windSpeedKmh` | number | no | km/h |
|
| `windSpeedKmh` | number | no | km/h |
|
||||||
| `windGustKmh` | number | no | km/h |
|
| `windGustKmh` | number | no | km/h |
|
||||||
| `barometricPressurePa` | number | no | Pascals |
|
| `barometricPressurePa` | number | no | Pascals |
|
||||||
| `seaLevelPressurePa` | number | no | Pascals |
|
|
||||||
| `visibilityMeters` | number | no | Meters |
|
| `visibilityMeters` | number | no | Meters |
|
||||||
| `relativeHumidityPercent` | number | no | Percent |
|
| `relativeHumidityPercent` | number | no | Percent |
|
||||||
| `apparentTemperatureC` | number | no | Celsius |
|
| `apparentTemperatureC` | number | no | Celsius |
|
||||||
| `elevationMeters` | number | no | Meters |
|
|
||||||
| `rawMessage` | string | no | Provider raw message (for example METAR) |
|
|
||||||
| `presentWeather` | array | no | Provider-specific structured weather fragments |
|
| `presentWeather` | array | no | Provider-specific structured weather fragments |
|
||||||
| `cloudLayers` | array | no | Cloud layer details |
|
|
||||||
|
|
||||||
### Nested: `cloudLayers[]`
|
|
||||||
|
|
||||||
Each `cloudLayers[]` element:
|
|
||||||
|
|
||||||
| Field | Type | Required | Notes |
|
|
||||||
|---|---:|:---:|---|
|
|
||||||
| `baseMeters` | number | no | Cloud base altitude in meters |
|
|
||||||
| `amount` | string | no | Provider string (e.g. FEW/SCT/BKN/OVC) |
|
|
||||||
|
|
||||||
### Nested: `presentWeather[]`
|
### Nested: `presentWeather[]`
|
||||||
|
|
||||||
@@ -159,11 +143,7 @@ A `WeatherForecastPeriod` is valid for `[startTime, endTime)`.
|
|||||||
| `name` | string | no | Human label (often empty for hourly) |
|
| `name` | string | no | Human label (often empty for hourly) |
|
||||||
| `isDay` | bool | no | Day/night hint |
|
| `isDay` | bool | no | Day/night hint |
|
||||||
| `conditionCode` | int | yes | WMO code (`-1` for unknown) |
|
| `conditionCode` | int | yes | WMO code (`-1` for unknown) |
|
||||||
| `conditionText` | string | no | Canonical short text |
|
|
||||||
| `providerRawDescription` | string | no | Provider-specific “evidence” text |
|
|
||||||
| `textDescription` | string | no | Human-facing short phrase |
|
| `textDescription` | string | no | Human-facing short phrase |
|
||||||
| `detailedText` | string | no | Longer narrative |
|
|
||||||
| `iconUrl` | string | no | Legacy/transitional |
|
|
||||||
| `temperatureC` | number | no | °C |
|
| `temperatureC` | number | no | °C |
|
||||||
| `temperatureCMin` | number | no | °C (aggregated products) |
|
| `temperatureCMin` | number | no | °C (aggregated products) |
|
||||||
| `temperatureCMax` | number | no | °C (aggregated products) |
|
| `temperatureCMax` | number | no | °C (aggregated products) |
|
||||||
@@ -241,7 +221,7 @@ A run may contain zero, one, or many alerts.
|
|||||||
|
|
||||||
- Consumers **must** ignore unknown fields.
|
- Consumers **must** ignore unknown fields.
|
||||||
- Producers (weatherfeeder) prefer **additive changes** within a schema version.
|
- Producers (weatherfeeder) prefer **additive changes** within a schema version.
|
||||||
- Renames/removals/semantic breaks require a **schema version bump** (`weather.*.v2`).
|
- Renames/removals/semantic breaks normally require a **schema version bump** (`weather.*.v2`); pre-1.0 projects may choose in-place changes.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -259,7 +239,7 @@ A run may contain zero, one, or many alerts.
|
|||||||
"stationId": "KSTL",
|
"stationId": "KSTL",
|
||||||
"timestamp": "2026-01-17T14:00:00Z",
|
"timestamp": "2026-01-17T14:00:00Z",
|
||||||
"conditionCode": 1,
|
"conditionCode": 1,
|
||||||
"conditionText": "Mainly Sunny",
|
"textDescription": "Mainly Sunny",
|
||||||
"temperatureC": 3.25,
|
"temperatureC": 3.25,
|
||||||
"windSpeedKmh": 18.5
|
"windSpeedKmh": 18.5
|
||||||
}
|
}
|
||||||
@@ -285,7 +265,7 @@ A run may contain zero, one, or many alerts.
|
|||||||
"startTime": "2026-01-17T14:00:00Z",
|
"startTime": "2026-01-17T14:00:00Z",
|
||||||
"endTime": "2026-01-17T15:00:00Z",
|
"endTime": "2026-01-17T15:00:00Z",
|
||||||
"conditionCode": 2,
|
"conditionCode": 2,
|
||||||
"conditionText": "Partly Cloudy",
|
"textDescription": "Partly Cloudy",
|
||||||
"temperatureC": 3.5,
|
"temperatureC": 3.5,
|
||||||
"probabilityOfPrecipitationPercent": 10
|
"probabilityOfPrecipitationPercent": 10
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ For the complete wire contract (event envelope + payload schemas, fields, units,
|
|||||||
|
|
||||||
## Upstream providers (current MVP)
|
## Upstream providers (current MVP)
|
||||||
|
|
||||||
- NWS: observations, hourly forecasts, alerts
|
- NWS: observations, hourly forecasts, narrative forecasts, alerts
|
||||||
- Open-Meteo: observations, hourly forecasts
|
- Open-Meteo: observations, hourly forecasts
|
||||||
- OpenWeather: observations
|
- OpenWeather: observations
|
||||||
|
|
||||||
|
|||||||
@@ -48,12 +48,21 @@ sources:
|
|||||||
- name: NWSHourlyForecastSTL
|
- name: NWSHourlyForecastSTL
|
||||||
mode: poll
|
mode: poll
|
||||||
kinds: ["forecast"]
|
kinds: ["forecast"]
|
||||||
driver: nws_forecast
|
driver: nws_forecast_hourly
|
||||||
every: 45m
|
every: 45m
|
||||||
params:
|
params:
|
||||||
url: "https://api.weather.gov/gridpoints/LSX/90,74/forecast/hourly"
|
url: "https://api.weather.gov/gridpoints/LSX/90,74/forecast/hourly"
|
||||||
user_agent: "HomeOps (eric@maximumdirect.net)"
|
user_agent: "HomeOps (eric@maximumdirect.net)"
|
||||||
|
|
||||||
|
- name: NWSNarrativeForecastSTL
|
||||||
|
mode: poll
|
||||||
|
kinds: ["forecast"]
|
||||||
|
driver: nws_forecast_narrative
|
||||||
|
every: 45m
|
||||||
|
params:
|
||||||
|
url: "https://api.weather.gov/gridpoints/LSX/90,74/forecast?units=us"
|
||||||
|
user_agent: "HomeOps (eric@maximumdirect.net)"
|
||||||
|
|
||||||
- name: OpenMeteoHourlyForecastSTL
|
- name: OpenMeteoHourlyForecastSTL
|
||||||
mode: poll
|
mode: poll
|
||||||
kinds: ["forecast"]
|
kinds: ["forecast"]
|
||||||
@@ -83,6 +92,15 @@ sinks:
|
|||||||
url: nats://nats:4222
|
url: nats://nats:4222
|
||||||
exchange: weatherfeeder
|
exchange: weatherfeeder
|
||||||
|
|
||||||
|
# - name: pg_weatherfeeder
|
||||||
|
# driver: postgres
|
||||||
|
# params:
|
||||||
|
# uri: postgres://weatherdb:5432/weatherdb?sslmode=disable
|
||||||
|
# username: weatherdb
|
||||||
|
# password: weatherdb
|
||||||
|
# prune: 3d
|
||||||
|
# # Prunes rows older than now-3d on each write transaction.
|
||||||
|
|
||||||
# - name: logfile
|
# - name: logfile
|
||||||
# driver: file
|
# driver: file
|
||||||
# params:
|
# params:
|
||||||
@@ -95,5 +113,8 @@ routes:
|
|||||||
- sink: nats_weatherfeeder
|
- sink: nats_weatherfeeder
|
||||||
kinds: ["observation", "forecast", "alert"]
|
kinds: ["observation", "forecast", "alert"]
|
||||||
|
|
||||||
|
# - sink: pg_weatherfeeder
|
||||||
|
# kinds: ["observation", "forecast", "alert"]
|
||||||
|
|
||||||
# - sink: logfile
|
# - sink: logfile
|
||||||
# kinds: ["observation", "alert", "forecast"]
|
# kinds: ["observation", "alert", "forecast"]
|
||||||
|
|||||||
@@ -15,17 +15,21 @@ import (
|
|||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
fkdispatch "gitea.maximumdirect.net/ejr/feedkit/dispatch"
|
fkdispatch "gitea.maximumdirect.net/ejr/feedkit/dispatch"
|
||||||
fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
|
fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
|
|
||||||
fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline"
|
fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline"
|
||||||
fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors"
|
fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors"
|
||||||
|
fkdedupe "gitea.maximumdirect.net/ejr/feedkit/processors/dedupe"
|
||||||
|
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
|
||||||
fkscheduler "gitea.maximumdirect.net/ejr/feedkit/scheduler"
|
fkscheduler "gitea.maximumdirect.net/ejr/feedkit/scheduler"
|
||||||
fksinks "gitea.maximumdirect.net/ejr/feedkit/sinks"
|
fksinks "gitea.maximumdirect.net/ejr/feedkit/sinks"
|
||||||
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
|
|
||||||
wfnormalizers "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers"
|
wfnormalizers "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers"
|
||||||
|
wfpgsink "gitea.maximumdirect.net/ejr/weatherfeeder/internal/sinks/postgres"
|
||||||
wfsources "gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources"
|
wfsources "gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const dedupeMaxEntries = 2048
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
log.SetFlags(log.LstdFlags | log.Lmicroseconds)
|
log.SetFlags(log.LstdFlags | log.Lmicroseconds)
|
||||||
|
|
||||||
@@ -37,6 +41,9 @@ func main() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("config load failed: %v", err)
|
log.Fatalf("config load failed: %v", err)
|
||||||
}
|
}
|
||||||
|
if err := wfpgsink.RegisterPostgresSchemas(cfg); err != nil {
|
||||||
|
log.Fatalf("postgres schema registration failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
// --- Registries ---
|
// --- Registries ---
|
||||||
srcReg := fksources.NewRegistry()
|
srcReg := fksources.NewRegistry()
|
||||||
@@ -99,9 +106,9 @@ func main() {
|
|||||||
|
|
||||||
events := make(chan fkevent.Event, 256)
|
events := make(chan fkevent.Event, 256)
|
||||||
|
|
||||||
// --- Normalization (optional) ---
|
// --- Processors ---
|
||||||
//
|
//
|
||||||
// We install feedkit's normalize.Processor even before any normalizers exist.
|
// We install feedkit's processors/normalize.Processor even before any normalizers exist.
|
||||||
// With an empty normalizer list and RequireMatch=false, this is a no-op passthrough.
|
// With an empty normalizer list and RequireMatch=false, this is a no-op passthrough.
|
||||||
// It will begin transforming events as soon as:
|
// It will begin transforming events as soon as:
|
||||||
// 1) sources emit raw schemas (raw.*), and
|
// 1) sources emit raw schemas (raw.*), and
|
||||||
@@ -112,8 +119,9 @@ func main() {
|
|||||||
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
|
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
|
||||||
return fknormalize.NewProcessor(normalizers, false), nil
|
return fknormalize.NewProcessor(normalizers, false), nil
|
||||||
})
|
})
|
||||||
|
procReg.Register("dedupe", fkdedupe.Factory(dedupeMaxEntries))
|
||||||
|
|
||||||
chain, err := procReg.BuildChain([]string{"normalize"})
|
chain, err := procReg.BuildChain([]string{"normalize", "dedupe"})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("build processor chain failed: %v", err)
|
log.Fatalf("build processor chain failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,9 +9,10 @@ import (
|
|||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
|
fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
|
|
||||||
fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline"
|
fkpipeline "gitea.maximumdirect.net/ejr/feedkit/pipeline"
|
||||||
fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors"
|
fkprocessors "gitea.maximumdirect.net/ejr/feedkit/processors"
|
||||||
|
fkdedupe "gitea.maximumdirect.net/ejr/feedkit/processors/dedupe"
|
||||||
|
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
|
||||||
|
|
||||||
wfnormalizers "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers"
|
wfnormalizers "gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers"
|
||||||
)
|
)
|
||||||
@@ -97,39 +98,23 @@ func TestExampleConfigLoads(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessorRegistryBuildsNormalizeChain(t *testing.T) {
|
func TestProcessorRegistryBuildsNormalizeThenDedupeChain(t *testing.T) {
|
||||||
normalizers := wfnormalizers.RegisterBuiltins(nil)
|
chain, err := buildProcessorChainForTests()
|
||||||
if len(normalizers) == 0 {
|
|
||||||
t.Fatalf("RegisterBuiltins() returned no normalizers")
|
|
||||||
}
|
|
||||||
|
|
||||||
procReg := fkprocessors.NewRegistry()
|
|
||||||
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
|
|
||||||
return fknormalize.NewProcessor(normalizers, false), nil
|
|
||||||
})
|
|
||||||
|
|
||||||
chain, err := procReg.BuildChain([]string{"normalize"})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("BuildChain() unexpected error: %v", err)
|
t.Fatalf("BuildChain() unexpected error: %v", err)
|
||||||
}
|
}
|
||||||
if len(chain) != 1 {
|
if len(chain) != 2 {
|
||||||
t.Fatalf("BuildChain() expected 1 processor, got %d", len(chain))
|
t.Fatalf("BuildChain() expected 2 processors, got %d", len(chain))
|
||||||
}
|
}
|
||||||
|
|
||||||
pl := &fkpipeline.Pipeline{Processors: chain}
|
pl := &fkpipeline.Pipeline{Processors: chain}
|
||||||
if len(pl.Processors) != 1 {
|
if len(pl.Processors) != 2 {
|
||||||
t.Fatalf("pipeline expected 1 processor, got %d", len(pl.Processors))
|
t.Fatalf("pipeline expected 2 processors, got %d", len(pl.Processors))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNormalizeNoMatchPassThrough(t *testing.T) {
|
func TestNormalizeNoMatchPassThrough(t *testing.T) {
|
||||||
normalizers := wfnormalizers.RegisterBuiltins(nil)
|
chain, err := buildProcessorChainForTests()
|
||||||
procReg := fkprocessors.NewRegistry()
|
|
||||||
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
|
|
||||||
return fknormalize.NewProcessor(normalizers, false), nil
|
|
||||||
})
|
|
||||||
|
|
||||||
chain, err := procReg.BuildChain([]string{"normalize"})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("BuildChain() unexpected error: %v", err)
|
t.Fatalf("BuildChain() unexpected error: %v", err)
|
||||||
}
|
}
|
||||||
@@ -157,3 +142,50 @@ func TestNormalizeNoMatchPassThrough(t *testing.T) {
|
|||||||
t.Fatalf("Pipeline.Process() expected passthrough output, got %#v", *out)
|
t.Fatalf("Pipeline.Process() expected passthrough output, got %#v", *out)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDedupeDropsSecondEventWithSameID(t *testing.T) {
|
||||||
|
chain, err := buildProcessorChainForTests()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("BuildChain() unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
pl := &fkpipeline.Pipeline{Processors: chain}
|
||||||
|
in := fkevent.Event{
|
||||||
|
ID: "evt-dedupe-1",
|
||||||
|
Kind: fkevent.Kind("observation"),
|
||||||
|
Source: "test",
|
||||||
|
EmittedAt: time.Now().UTC(),
|
||||||
|
Schema: "raw.weatherfeeder.unknown.v1",
|
||||||
|
Payload: map[string]any{
|
||||||
|
"ok": true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
first, err := pl.Process(context.Background(), in)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("first Pipeline.Process() unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if first == nil {
|
||||||
|
t.Fatalf("first Pipeline.Process() unexpectedly dropped event")
|
||||||
|
}
|
||||||
|
|
||||||
|
second, err := pl.Process(context.Background(), in)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("second Pipeline.Process() unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if second != nil {
|
||||||
|
t.Fatalf("second Pipeline.Process() expected dedupe drop, got %#v", *second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildProcessorChainForTests() ([]fkprocessors.Processor, error) {
|
||||||
|
normalizers := wfnormalizers.RegisterBuiltins(nil)
|
||||||
|
|
||||||
|
procReg := fkprocessors.NewRegistry()
|
||||||
|
procReg.Register("normalize", func() (fkprocessors.Processor, error) {
|
||||||
|
return fknormalize.NewProcessor(normalizers, false), nil
|
||||||
|
})
|
||||||
|
procReg.Register("dedupe", fkdedupe.Factory(dedupeMaxEntries))
|
||||||
|
|
||||||
|
return procReg.BuildChain([]string{"normalize", "dedupe"})
|
||||||
|
}
|
||||||
|
|||||||
3
go.mod
3
go.mod
@@ -2,10 +2,11 @@ module gitea.maximumdirect.net/ejr/weatherfeeder
|
|||||||
|
|
||||||
go 1.25
|
go 1.25
|
||||||
|
|
||||||
require gitea.maximumdirect.net/ejr/feedkit v0.6.0
|
require gitea.maximumdirect.net/ejr/feedkit v0.8.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/klauspost/compress v1.17.2 // indirect
|
github.com/klauspost/compress v1.17.2 // indirect
|
||||||
|
github.com/lib/pq v1.10.9 // indirect
|
||||||
github.com/nats-io/nats.go v1.34.0 // indirect
|
github.com/nats-io/nats.go v1.34.0 // indirect
|
||||||
github.com/nats-io/nkeys v0.4.7 // indirect
|
github.com/nats-io/nkeys v0.4.7 // indirect
|
||||||
github.com/nats-io/nuid v1.0.1 // indirect
|
github.com/nats-io/nuid v1.0.1 // indirect
|
||||||
|
|||||||
6
go.sum
6
go.sum
@@ -1,7 +1,9 @@
|
|||||||
gitea.maximumdirect.net/ejr/feedkit v0.6.0 h1:GXwyNKvPp1sWN8TS5E5NDGFgimpyHlzerO5E+/qoTXg=
|
gitea.maximumdirect.net/ejr/feedkit v0.8.0 h1:JdEEy6T3AQ97alLNYcQ3crN3tOEZPLMBD0Qr/MH5/dw=
|
||||||
gitea.maximumdirect.net/ejr/feedkit v0.6.0/go.mod h1:wYtA10GouvSe7L/8e1UEC+tqcp32HJofExIo1k+Wjls=
|
gitea.maximumdirect.net/ejr/feedkit v0.8.0/go.mod h1:U6xC9xZLN3cL4yi7YBVyzGoHYRLJXusFCAKlj2kdYYQ=
|
||||||
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
|
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
|
||||||
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||||
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
github.com/nats-io/nats.go v1.34.0 h1:fnxnPCNiwIG5w08rlMcEKTUw4AV/nKyGCOJE8TdhSPk=
|
github.com/nats-io/nats.go v1.34.0 h1:fnxnPCNiwIG5w08rlMcEKTUw4AV/nKyGCOJE8TdhSPk=
|
||||||
github.com/nats-io/nats.go v1.34.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
|
github.com/nats-io/nats.go v1.34.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
|
||||||
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=
|
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
package normalizers
|
package normalizers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
|
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
|
||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo"
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import (
|
|||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
|
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/nws"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openmeteo"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openweather"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/normalizers/openweather"
|
||||||
|
|||||||
@@ -29,7 +29,7 @@
|
|||||||
//
|
//
|
||||||
// 1. One normalizer per file.
|
// 1. One normalizer per file.
|
||||||
// Each file contains exactly one Normalizer implementation (one type that
|
// Each file contains exactly one Normalizer implementation (one type that
|
||||||
// satisfies feedkit/normalize.Normalizer).
|
// satisfies feedkit/processors/normalize.Normalizer).
|
||||||
// Helper files are encouraged (types.go, common.go, mapping.go, etc.) as long
|
// Helper files are encouraged (types.go, common.go, mapping.go, etc.) as long
|
||||||
// as they do not define additional Normalizer types.
|
// as they do not define additional Normalizer types.
|
||||||
//
|
//
|
||||||
|
|||||||
@@ -17,9 +17,10 @@ import (
|
|||||||
// ForecastNormalizer converts:
|
// ForecastNormalizer converts:
|
||||||
//
|
//
|
||||||
// standards.SchemaRawNWSHourlyForecastV1 -> standards.SchemaWeatherForecastV1
|
// standards.SchemaRawNWSHourlyForecastV1 -> standards.SchemaWeatherForecastV1
|
||||||
|
// standards.SchemaRawNWSNarrativeForecastV1 -> standards.SchemaWeatherForecastV1
|
||||||
//
|
//
|
||||||
// It interprets NWS GeoJSON gridpoint *hourly* forecast responses and maps them into
|
// It keeps one NWS forecast normalization entrypoint and dispatches to product-specific
|
||||||
// the canonical model.WeatherForecastRun representation.
|
// builders by raw schema.
|
||||||
//
|
//
|
||||||
// Caveats / policy:
|
// Caveats / policy:
|
||||||
// 1. NWS forecast periods do not include METAR presentWeather phenomena, so ConditionCode
|
// 1. NWS forecast periods do not include METAR presentWeather phenomena, so ConditionCode
|
||||||
@@ -29,81 +30,186 @@ import (
|
|||||||
type ForecastNormalizer struct{}
|
type ForecastNormalizer struct{}
|
||||||
|
|
||||||
func (ForecastNormalizer) Match(e event.Event) bool {
|
func (ForecastNormalizer) Match(e event.Event) bool {
|
||||||
s := strings.TrimSpace(e.Schema)
|
switch strings.TrimSpace(e.Schema) {
|
||||||
return s == standards.SchemaRawNWSHourlyForecastV1
|
case standards.SchemaRawNWSHourlyForecastV1:
|
||||||
|
return true
|
||||||
|
case standards.SchemaRawNWSNarrativeForecastV1:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ForecastNormalizer) Normalize(ctx context.Context, in event.Event) (*event.Event, error) {
|
func (ForecastNormalizer) Normalize(ctx context.Context, in event.Event) (*event.Event, error) {
|
||||||
_ = ctx // normalization is pure/CPU; keep ctx for future expensive steps
|
_ = ctx // normalization is pure/CPU; keep ctx for future expensive steps
|
||||||
|
|
||||||
|
return normalizeForecastEventBySchema(in)
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeForecastEventBySchema(in event.Event) (*event.Event, error) {
|
||||||
|
switch strings.TrimSpace(in.Schema) {
|
||||||
|
case standards.SchemaRawNWSHourlyForecastV1:
|
||||||
|
return normalizeHourlyForecastEvent(in)
|
||||||
|
case standards.SchemaRawNWSNarrativeForecastV1:
|
||||||
|
return normalizeNarrativeForecastEvent(in)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported nws forecast schema %q", strings.TrimSpace(in.Schema))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeHourlyForecastEvent(in event.Event) (*event.Event, error) {
|
||||||
return normcommon.NormalizeJSON(
|
return normcommon.NormalizeJSON(
|
||||||
in,
|
in,
|
||||||
"nws hourly forecast",
|
"nws hourly forecast",
|
||||||
standards.SchemaWeatherForecastV1,
|
standards.SchemaWeatherForecastV1,
|
||||||
buildForecast,
|
buildHourlyForecast,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildForecast contains the domain mapping logic (provider -> canonical model).
|
func normalizeNarrativeForecastEvent(in event.Event) (*event.Event, error) {
|
||||||
func buildForecast(parsed nwsForecastResponse) (model.WeatherForecastRun, time.Time, error) {
|
return normcommon.NormalizeJSON(
|
||||||
// IssuedAt is required by the canonical model.
|
in,
|
||||||
issuedStr := strings.TrimSpace(parsed.Properties.GeneratedAt)
|
"nws narrative forecast",
|
||||||
|
standards.SchemaWeatherForecastV1,
|
||||||
|
buildNarrativeForecast,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildHourlyForecast contains hourly forecast mapping logic (provider -> canonical model).
|
||||||
|
func buildHourlyForecast(parsed nwsHourlyForecastResponse) (model.WeatherForecastRun, time.Time, error) {
|
||||||
|
issuedAt, updatedAt, err := parseForecastRunTimes(parsed.Properties.GeneratedAt, parsed.Properties.UpdateTime)
|
||||||
|
if err != nil {
|
||||||
|
return model.WeatherForecastRun{}, time.Time{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Best-effort location centroid from the GeoJSON polygon (optional).
|
||||||
|
lat, lon := centroidLatLon(parsed.Geometry.Coordinates)
|
||||||
|
|
||||||
|
run := newForecastRunBase(
|
||||||
|
issuedAt,
|
||||||
|
updatedAt,
|
||||||
|
model.ForecastProductHourly,
|
||||||
|
lat,
|
||||||
|
lon,
|
||||||
|
parsed.Properties.Elevation.Value,
|
||||||
|
)
|
||||||
|
|
||||||
|
periods := make([]model.WeatherForecastPeriod, 0, len(parsed.Properties.Periods))
|
||||||
|
for i, p := range parsed.Properties.Periods {
|
||||||
|
period, err := mapHourlyForecastPeriod(i, p)
|
||||||
|
if err != nil {
|
||||||
|
return model.WeatherForecastRun{}, time.Time{}, err
|
||||||
|
}
|
||||||
|
periods = append(periods, period)
|
||||||
|
}
|
||||||
|
|
||||||
|
run.Periods = periods
|
||||||
|
|
||||||
|
// EffectiveAt policy for forecasts: treat IssuedAt as the effective time (dedupe-friendly).
|
||||||
|
return run, issuedAt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildNarrativeForecast contains narrative forecast mapping logic (provider -> canonical model).
|
||||||
|
func buildNarrativeForecast(parsed nwsNarrativeForecastResponse) (model.WeatherForecastRun, time.Time, error) {
|
||||||
|
issuedAt, updatedAt, err := parseForecastRunTimes(parsed.Properties.GeneratedAt, parsed.Properties.UpdateTime)
|
||||||
|
if err != nil {
|
||||||
|
return model.WeatherForecastRun{}, time.Time{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Best-effort location centroid from the GeoJSON polygon (optional).
|
||||||
|
lat, lon := centroidLatLon(parsed.Geometry.Coordinates)
|
||||||
|
|
||||||
|
run := newForecastRunBase(
|
||||||
|
issuedAt,
|
||||||
|
updatedAt,
|
||||||
|
model.ForecastProductNarrative,
|
||||||
|
lat,
|
||||||
|
lon,
|
||||||
|
parsed.Properties.Elevation.Value,
|
||||||
|
)
|
||||||
|
|
||||||
|
periods := make([]model.WeatherForecastPeriod, 0, len(parsed.Properties.Periods))
|
||||||
|
for i, p := range parsed.Properties.Periods {
|
||||||
|
period, err := mapNarrativeForecastPeriod(i, p)
|
||||||
|
if err != nil {
|
||||||
|
return model.WeatherForecastRun{}, time.Time{}, err
|
||||||
|
}
|
||||||
|
periods = append(periods, period)
|
||||||
|
}
|
||||||
|
|
||||||
|
run.Periods = periods
|
||||||
|
|
||||||
|
// EffectiveAt policy for forecasts: treat IssuedAt as the effective time (dedupe-friendly).
|
||||||
|
return run, issuedAt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseForecastRunTimes(generatedAt, updateTime string) (time.Time, *time.Time, error) {
|
||||||
|
issuedStr := strings.TrimSpace(generatedAt)
|
||||||
if issuedStr == "" {
|
if issuedStr == "" {
|
||||||
return model.WeatherForecastRun{}, time.Time{}, fmt.Errorf("missing properties.generatedAt")
|
return time.Time{}, nil, fmt.Errorf("missing properties.generatedAt")
|
||||||
}
|
}
|
||||||
issuedAt, err := nwscommon.ParseTime(issuedStr)
|
issuedAt, err := nwscommon.ParseTime(issuedStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return model.WeatherForecastRun{}, time.Time{}, fmt.Errorf("invalid properties.generatedAt %q: %w", issuedStr, err)
|
return time.Time{}, nil, fmt.Errorf("invalid properties.generatedAt %q: %w", issuedStr, err)
|
||||||
}
|
}
|
||||||
issuedAt = issuedAt.UTC()
|
issuedAt = issuedAt.UTC()
|
||||||
|
|
||||||
// UpdatedAt is optional.
|
|
||||||
var updatedAt *time.Time
|
var updatedAt *time.Time
|
||||||
if s := strings.TrimSpace(parsed.Properties.UpdateTime); s != "" {
|
if s := strings.TrimSpace(updateTime); s != "" {
|
||||||
if t, err := nwscommon.ParseTime(s); err == nil {
|
if t, err := nwscommon.ParseTime(s); err == nil {
|
||||||
tt := t.UTC()
|
tt := t.UTC()
|
||||||
updatedAt = &tt
|
updatedAt = &tt
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Best-effort location centroid from the GeoJSON polygon (optional).
|
return issuedAt, updatedAt, nil
|
||||||
lat, lon := centroidLatLon(parsed.Geometry.Coordinates)
|
}
|
||||||
|
|
||||||
// Schema is explicitly hourly, so product is not a heuristic.
|
func newForecastRunBase(
|
||||||
run := model.WeatherForecastRun{
|
issuedAt time.Time,
|
||||||
|
updatedAt *time.Time,
|
||||||
|
product model.ForecastProduct,
|
||||||
|
lat, lon, elevation *float64,
|
||||||
|
) model.WeatherForecastRun {
|
||||||
|
return model.WeatherForecastRun{
|
||||||
LocationID: "",
|
LocationID: "",
|
||||||
LocationName: "",
|
LocationName: "",
|
||||||
|
|
||||||
IssuedAt: issuedAt,
|
IssuedAt: issuedAt,
|
||||||
UpdatedAt: updatedAt,
|
UpdatedAt: updatedAt,
|
||||||
Product: model.ForecastProductHourly,
|
Product: product,
|
||||||
|
|
||||||
Latitude: lat,
|
Latitude: lat,
|
||||||
Longitude: lon,
|
Longitude: lon,
|
||||||
ElevationMeters: parsed.Properties.Elevation.Value,
|
|
||||||
|
|
||||||
|
ElevationMeters: elevation,
|
||||||
Periods: nil,
|
Periods: nil,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
periods := make([]model.WeatherForecastPeriod, 0, len(parsed.Properties.Periods))
|
func parseForecastPeriodWindow(startStr, endStr string, idx int) (time.Time, time.Time, error) {
|
||||||
for i, p := range parsed.Properties.Periods {
|
startStr = strings.TrimSpace(startStr)
|
||||||
startStr := strings.TrimSpace(p.StartTime)
|
endStr = strings.TrimSpace(endStr)
|
||||||
endStr := strings.TrimSpace(p.EndTime)
|
|
||||||
|
|
||||||
if startStr == "" || endStr == "" {
|
if startStr == "" || endStr == "" {
|
||||||
return model.WeatherForecastRun{}, time.Time{}, fmt.Errorf("periods[%d]: missing startTime/endTime", i)
|
return time.Time{}, time.Time{}, fmt.Errorf("periods[%d]: missing startTime/endTime", idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
start, err := nwscommon.ParseTime(startStr)
|
start, err := nwscommon.ParseTime(startStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return model.WeatherForecastRun{}, time.Time{}, fmt.Errorf("periods[%d].startTime invalid %q: %w", i, startStr, err)
|
return time.Time{}, time.Time{}, fmt.Errorf("periods[%d].startTime invalid %q: %w", idx, startStr, err)
|
||||||
}
|
}
|
||||||
end, err := nwscommon.ParseTime(endStr)
|
end, err := nwscommon.ParseTime(endStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return model.WeatherForecastRun{}, time.Time{}, fmt.Errorf("periods[%d].endTime invalid %q: %w", i, endStr, err)
|
return time.Time{}, time.Time{}, fmt.Errorf("periods[%d].endTime invalid %q: %w", idx, endStr, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return start.UTC(), end.UTC(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapHourlyForecastPeriod(idx int, p nwsHourlyForecastPeriod) (model.WeatherForecastPeriod, error) {
|
||||||
|
start, end, err := parseForecastPeriodWindow(p.StartTime, p.EndTime, idx)
|
||||||
|
if err != nil {
|
||||||
|
return model.WeatherForecastPeriod{}, err
|
||||||
}
|
}
|
||||||
start = start.UTC()
|
|
||||||
end = end.UTC()
|
|
||||||
|
|
||||||
// NWS hourly supplies isDaytime; make it a pointer to match the canonical model.
|
// NWS hourly supplies isDaytime; make it a pointer to match the canonical model.
|
||||||
var isDay *bool
|
var isDay *bool
|
||||||
@@ -118,9 +224,7 @@ func buildForecast(parsed nwsForecastResponse) (model.WeatherForecastRun, time.T
|
|||||||
providerDesc := strings.TrimSpace(p.ShortForecast)
|
providerDesc := strings.TrimSpace(p.ShortForecast)
|
||||||
wmo := wmoFromNWSForecast(providerDesc, p.Icon, tempC)
|
wmo := wmoFromNWSForecast(providerDesc, p.Icon, tempC)
|
||||||
|
|
||||||
canonicalText := standards.WMOText(wmo, isDay)
|
return model.WeatherForecastPeriod{
|
||||||
|
|
||||||
period := model.WeatherForecastPeriod{
|
|
||||||
StartTime: start,
|
StartTime: start,
|
||||||
EndTime: end,
|
EndTime: end,
|
||||||
|
|
||||||
@@ -128,15 +232,9 @@ func buildForecast(parsed nwsForecastResponse) (model.WeatherForecastRun, time.T
|
|||||||
IsDay: isDay,
|
IsDay: isDay,
|
||||||
|
|
||||||
ConditionCode: wmo,
|
ConditionCode: wmo,
|
||||||
ConditionText: canonicalText,
|
|
||||||
|
|
||||||
ProviderRawDescription: providerDesc,
|
// For forecasts, keep provider short forecast text as the human-facing description.
|
||||||
|
TextDescription: providerDesc,
|
||||||
// For forecasts, keep provider text as the human-facing description.
|
|
||||||
TextDescription: strings.TrimSpace(p.ShortForecast),
|
|
||||||
DetailedText: strings.TrimSpace(p.DetailedForecast),
|
|
||||||
|
|
||||||
IconURL: strings.TrimSpace(p.Icon),
|
|
||||||
|
|
||||||
TemperatureC: tempC,
|
TemperatureC: tempC,
|
||||||
|
|
||||||
@@ -147,13 +245,49 @@ func buildForecast(parsed nwsForecastResponse) (model.WeatherForecastRun, time.T
|
|||||||
WindSpeedKmh: parseNWSWindSpeedKmh(p.WindSpeed),
|
WindSpeedKmh: parseNWSWindSpeedKmh(p.WindSpeed),
|
||||||
|
|
||||||
ProbabilityOfPrecipitationPercent: p.ProbabilityOfPrecipitation.Value,
|
ProbabilityOfPrecipitationPercent: p.ProbabilityOfPrecipitation.Value,
|
||||||
}
|
}, nil
|
||||||
|
}
|
||||||
periods = append(periods, period)
|
|
||||||
}
|
func mapNarrativeForecastPeriod(idx int, p nwsNarrativeForecastPeriod) (model.WeatherForecastPeriod, error) {
|
||||||
|
start, end, err := parseForecastPeriodWindow(p.StartTime, p.EndTime, idx)
|
||||||
run.Periods = periods
|
if err != nil {
|
||||||
|
return model.WeatherForecastPeriod{}, err
|
||||||
// EffectiveAt policy for forecasts: treat IssuedAt as the effective time (dedupe-friendly).
|
}
|
||||||
return run, issuedAt, nil
|
|
||||||
|
// NWS narrative supplies isDaytime; make it a pointer to match the canonical model.
|
||||||
|
var isDay *bool
|
||||||
|
if p.IsDaytime != nil {
|
||||||
|
b := *p.IsDaytime
|
||||||
|
isDay = &b
|
||||||
|
}
|
||||||
|
|
||||||
|
tempC := tempCFromNWS(p.Temperature, p.TemperatureUnit)
|
||||||
|
|
||||||
|
// Infer WMO from shortForecast (and fall back to icon token).
|
||||||
|
shortForecast := strings.TrimSpace(p.ShortForecast)
|
||||||
|
wmo := wmoFromNWSForecast(shortForecast, p.Icon, tempC)
|
||||||
|
|
||||||
|
textDescription := strings.TrimSpace(p.DetailedForecast)
|
||||||
|
if textDescription == "" {
|
||||||
|
textDescription = shortForecast
|
||||||
|
}
|
||||||
|
|
||||||
|
return model.WeatherForecastPeriod{
|
||||||
|
StartTime: start,
|
||||||
|
EndTime: end,
|
||||||
|
|
||||||
|
Name: strings.TrimSpace(p.Name),
|
||||||
|
IsDay: isDay,
|
||||||
|
|
||||||
|
ConditionCode: wmo,
|
||||||
|
|
||||||
|
TextDescription: textDescription,
|
||||||
|
|
||||||
|
TemperatureC: tempC,
|
||||||
|
|
||||||
|
WindDirectionDegrees: parseNWSWindDirectionDegrees(p.WindDirection),
|
||||||
|
WindSpeedKmh: parseNWSWindSpeedKmh(p.WindSpeed),
|
||||||
|
|
||||||
|
ProbabilityOfPrecipitationPercent: p.ProbabilityOfPrecipitation.Value,
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
|||||||
211
internal/normalizers/nws/forecast_test.go
Normal file
211
internal/normalizers/nws/forecast_test.go
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
package nws
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"math"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/model"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBuildHourlyForecastUsesShortForecastAsTextDescription(t *testing.T) {
|
||||||
|
parsed := nwsHourlyForecastResponse{}
|
||||||
|
parsed.Properties.GeneratedAt = "2026-03-16T18:00:00Z"
|
||||||
|
parsed.Properties.Periods = []nwsHourlyForecastPeriod{
|
||||||
|
{
|
||||||
|
StartTime: "2026-03-16T19:00:00Z",
|
||||||
|
EndTime: "2026-03-16T20:00:00Z",
|
||||||
|
ShortForecast: " Mostly Cloudy ",
|
||||||
|
DetailedForecast: "Clouds increasing overnight.",
|
||||||
|
Icon: "https://example.invalid/icon",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
run, effectiveAt, err := buildHourlyForecast(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildHourlyForecast() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(run.Periods) != 1 {
|
||||||
|
t.Fatalf("periods len = %d, want 1", len(run.Periods))
|
||||||
|
}
|
||||||
|
if got, want := run.Periods[0].TextDescription, "Mostly Cloudy"; got != want {
|
||||||
|
t.Fatalf("TextDescription = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
|
||||||
|
wantIssued := time.Date(2026, 3, 16, 18, 0, 0, 0, time.UTC)
|
||||||
|
if !run.IssuedAt.Equal(wantIssued) {
|
||||||
|
t.Fatalf("IssuedAt = %s, want %s", run.IssuedAt.Format(time.RFC3339), wantIssued.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
if !effectiveAt.Equal(wantIssued) {
|
||||||
|
t.Fatalf("effectiveAt = %s, want %s", effectiveAt.Format(time.RFC3339), wantIssued.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNoLegacyForecastDescriptionKeys(t, run.Periods[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeForecastEventBySchemaRejectsUnsupportedSchema(t *testing.T) {
|
||||||
|
_, err := normalizeForecastEventBySchema(event.Event{
|
||||||
|
Schema: "raw.nws.daily.forecast.v1",
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("normalizeForecastEventBySchema() expected unsupported schema error")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), "unsupported nws forecast schema") {
|
||||||
|
t.Fatalf("error = %q, want unsupported schema context", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeForecastEventBySchemaRoutesHourly(t *testing.T) {
|
||||||
|
_, err := normalizeForecastEventBySchema(event.Event{
|
||||||
|
Schema: standards.SchemaRawNWSHourlyForecastV1,
|
||||||
|
Payload: map[string]any{"properties": map[string]any{}},
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("normalizeForecastEventBySchema() expected build error for missing generatedAt")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), "missing properties.generatedAt") {
|
||||||
|
t.Fatalf("error = %q, want missing properties.generatedAt", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeForecastEventBySchemaRoutesNarrative(t *testing.T) {
|
||||||
|
_, err := normalizeForecastEventBySchema(event.Event{
|
||||||
|
Schema: standards.SchemaRawNWSNarrativeForecastV1,
|
||||||
|
Payload: map[string]any{"properties": map[string]any{}},
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("normalizeForecastEventBySchema() expected build error for missing generatedAt")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), "missing properties.generatedAt") {
|
||||||
|
t.Fatalf("error = %q, want missing properties.generatedAt", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildNarrativeForecastMapsExpectedFields(t *testing.T) {
|
||||||
|
parsed := nwsNarrativeForecastResponse{}
|
||||||
|
parsed.Properties.GeneratedAt = "2026-03-27T15:17:01Z"
|
||||||
|
isDay := true
|
||||||
|
tempF := 53.0
|
||||||
|
pop := 20.0
|
||||||
|
|
||||||
|
parsed.Properties.Periods = []nwsNarrativeForecastPeriod{
|
||||||
|
{
|
||||||
|
Name: "Today",
|
||||||
|
StartTime: "2026-03-27T10:00:00-05:00",
|
||||||
|
EndTime: "2026-03-27T18:00:00-05:00",
|
||||||
|
IsDaytime: &isDay,
|
||||||
|
Temperature: &tempF,
|
||||||
|
TemperatureUnit: "F",
|
||||||
|
WindSpeed: "10 to 14 mph",
|
||||||
|
WindDirection: "SW",
|
||||||
|
ShortForecast: "Partly Sunny",
|
||||||
|
DetailedForecast: " Partly sunny, with a high near 53. ",
|
||||||
|
ProbabilityOfPrecipitation: struct {
|
||||||
|
UnitCode string `json:"unitCode"`
|
||||||
|
Value *float64 `json:"value"`
|
||||||
|
}{
|
||||||
|
UnitCode: "wmoUnit:percent",
|
||||||
|
Value: &pop,
|
||||||
|
},
|
||||||
|
Icon: "https://api.weather.gov/icons/land/day/bkn?size=medium",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
run, effectiveAt, err := buildNarrativeForecast(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildNarrativeForecast() error = %v", err)
|
||||||
|
}
|
||||||
|
if got, want := run.Product, model.ForecastProductNarrative; got != want {
|
||||||
|
t.Fatalf("Product = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
if len(run.Periods) != 1 {
|
||||||
|
t.Fatalf("periods len = %d, want 1", len(run.Periods))
|
||||||
|
}
|
||||||
|
|
||||||
|
p := run.Periods[0]
|
||||||
|
if got, want := p.TextDescription, "Partly sunny, with a high near 53."; got != want {
|
||||||
|
t.Fatalf("TextDescription = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
if p.TemperatureC == nil {
|
||||||
|
t.Fatalf("TemperatureC is nil, want converted value")
|
||||||
|
}
|
||||||
|
if math.Abs(*p.TemperatureC-11.6666666667) > 0.0001 {
|
||||||
|
t.Fatalf("TemperatureC = %.6f, want ~11.6667", *p.TemperatureC)
|
||||||
|
}
|
||||||
|
if p.IsDay == nil || !*p.IsDay {
|
||||||
|
t.Fatalf("IsDay = %v, want true", p.IsDay)
|
||||||
|
}
|
||||||
|
if p.WindDirectionDegrees == nil || *p.WindDirectionDegrees != 225 {
|
||||||
|
t.Fatalf("WindDirectionDegrees = %v, want 225", p.WindDirectionDegrees)
|
||||||
|
}
|
||||||
|
if p.WindSpeedKmh == nil || math.Abs(*p.WindSpeedKmh-19.3128) > 0.001 {
|
||||||
|
t.Fatalf("WindSpeedKmh = %.6f, want ~19.3128", derefOrZero(p.WindSpeedKmh))
|
||||||
|
}
|
||||||
|
if p.ProbabilityOfPrecipitationPercent == nil || *p.ProbabilityOfPrecipitationPercent != 20 {
|
||||||
|
t.Fatalf("ProbabilityOfPrecipitationPercent = %v, want 20", p.ProbabilityOfPrecipitationPercent)
|
||||||
|
}
|
||||||
|
|
||||||
|
wantIssued := time.Date(2026, 3, 27, 15, 17, 1, 0, time.UTC)
|
||||||
|
if !run.IssuedAt.Equal(wantIssued) {
|
||||||
|
t.Fatalf("IssuedAt = %s, want %s", run.IssuedAt.Format(time.RFC3339), wantIssued.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
if !effectiveAt.Equal(wantIssued) {
|
||||||
|
t.Fatalf("effectiveAt = %s, want %s", effectiveAt.Format(time.RFC3339), wantIssued.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNoLegacyForecastDescriptionKeys(t, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildNarrativeForecastFallsBackToShortForecastDescription(t *testing.T) {
|
||||||
|
parsed := nwsNarrativeForecastResponse{}
|
||||||
|
parsed.Properties.GeneratedAt = "2026-03-27T15:17:01Z"
|
||||||
|
parsed.Properties.Periods = []nwsNarrativeForecastPeriod{
|
||||||
|
{
|
||||||
|
StartTime: "2026-03-27T18:00:00-05:00",
|
||||||
|
EndTime: "2026-03-28T06:00:00-05:00",
|
||||||
|
ShortForecast: " Mostly Clear ",
|
||||||
|
DetailedForecast: " ",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
run, _, err := buildNarrativeForecast(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildNarrativeForecast() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(run.Periods) != 1 {
|
||||||
|
t.Fatalf("periods len = %d, want 1", len(run.Periods))
|
||||||
|
}
|
||||||
|
if got, want := run.Periods[0].TextDescription, "Mostly Clear"; got != want {
|
||||||
|
t.Fatalf("TextDescription = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertNoLegacyForecastDescriptionKeys(t *testing.T, period any) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
b, err := json.Marshal(period)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("json.Marshal(period) error = %v", err)
|
||||||
|
}
|
||||||
|
var got map[string]any
|
||||||
|
if err := json.Unmarshal(b, &got); err != nil {
|
||||||
|
t.Fatalf("json.Unmarshal(period) error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, key := range []string{"conditionText", "providerRawDescription", "detailedText", "iconUrl"} {
|
||||||
|
if _, ok := got[key]; ok {
|
||||||
|
t.Fatalf("unexpected legacy key %q in marshaled period: %#v", key, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func derefOrZero(v *float64) float64 {
|
||||||
|
if v == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return *v
|
||||||
|
}
|
||||||
@@ -54,14 +54,6 @@ func buildObservation(parsed nwsObservationResponse) (model.WeatherObservation,
|
|||||||
ts = t.UTC()
|
ts = t.UTC()
|
||||||
}
|
}
|
||||||
|
|
||||||
cloudLayers := make([]model.CloudLayer, 0, len(parsed.Properties.CloudLayers))
|
|
||||||
for _, cl := range parsed.Properties.CloudLayers {
|
|
||||||
cloudLayers = append(cloudLayers, model.CloudLayer{
|
|
||||||
BaseMeters: cl.Base.Value,
|
|
||||||
Amount: cl.Amount,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Preserve raw presentWeather objects (for troubleshooting / drift analysis).
|
// Preserve raw presentWeather objects (for troubleshooting / drift analysis).
|
||||||
present := make([]model.PresentWeather, 0, len(parsed.Properties.PresentWeather))
|
present := make([]model.PresentWeather, 0, len(parsed.Properties.PresentWeather))
|
||||||
for _, pw := range parsed.Properties.PresentWeather {
|
for _, pw := range parsed.Properties.PresentWeather {
|
||||||
@@ -70,6 +62,7 @@ func buildObservation(parsed nwsObservationResponse) (model.WeatherObservation,
|
|||||||
|
|
||||||
// Decode presentWeather into typed METAR phenomena for mapping.
|
// Decode presentWeather into typed METAR phenomena for mapping.
|
||||||
phenomena := decodeMetarPhenomena(parsed.Properties.PresentWeather)
|
phenomena := decodeMetarPhenomena(parsed.Properties.PresentWeather)
|
||||||
|
cloudLayers := parsed.Properties.CloudLayers
|
||||||
|
|
||||||
providerDesc := strings.TrimSpace(parsed.Properties.TextDescription)
|
providerDesc := strings.TrimSpace(parsed.Properties.TextDescription)
|
||||||
|
|
||||||
@@ -81,9 +74,6 @@ func buildObservation(parsed nwsObservationResponse) (model.WeatherObservation,
|
|||||||
isDay = isDayFromLatLonTime(*lat, *lon, ts)
|
isDay = isDayFromLatLonTime(*lat, *lon, ts)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Canonical condition text comes from our WMO table.
|
|
||||||
canonicalText := standards.WMOText(wmo, isDay)
|
|
||||||
|
|
||||||
// Apparent temperature: prefer wind chill when both are supplied.
|
// Apparent temperature: prefer wind chill when both are supplied.
|
||||||
var apparentC *float64
|
var apparentC *float64
|
||||||
if parsed.Properties.WindChill.Value != nil {
|
if parsed.Properties.WindChill.Value != nil {
|
||||||
@@ -98,15 +88,9 @@ func buildObservation(parsed nwsObservationResponse) (model.WeatherObservation,
|
|||||||
Timestamp: ts,
|
Timestamp: ts,
|
||||||
|
|
||||||
ConditionCode: wmo,
|
ConditionCode: wmo,
|
||||||
ConditionText: canonicalText,
|
|
||||||
IsDay: isDay,
|
IsDay: isDay,
|
||||||
|
|
||||||
ProviderRawDescription: providerDesc,
|
TextDescription: providerDesc,
|
||||||
|
|
||||||
// Transitional / human-facing:
|
|
||||||
// keep output consistent by populating TextDescription from canonical text.
|
|
||||||
TextDescription: canonicalText,
|
|
||||||
IconURL: parsed.Properties.Icon,
|
|
||||||
|
|
||||||
TemperatureC: parsed.Properties.Temperature.Value,
|
TemperatureC: parsed.Properties.Temperature.Value,
|
||||||
DewpointC: parsed.Properties.Dewpoint.Value,
|
DewpointC: parsed.Properties.Dewpoint.Value,
|
||||||
@@ -115,19 +99,21 @@ func buildObservation(parsed nwsObservationResponse) (model.WeatherObservation,
|
|||||||
WindSpeedKmh: parsed.Properties.WindSpeed.Value,
|
WindSpeedKmh: parsed.Properties.WindSpeed.Value,
|
||||||
WindGustKmh: parsed.Properties.WindGust.Value,
|
WindGustKmh: parsed.Properties.WindGust.Value,
|
||||||
|
|
||||||
BarometricPressurePa: parsed.Properties.BarometricPressure.Value,
|
BarometricPressurePa: pressurePrecedenceNWS(parsed.Properties.SeaLevelPressure.Value, parsed.Properties.BarometricPressure.Value),
|
||||||
SeaLevelPressurePa: parsed.Properties.SeaLevelPressure.Value,
|
|
||||||
VisibilityMeters: parsed.Properties.Visibility.Value,
|
VisibilityMeters: parsed.Properties.Visibility.Value,
|
||||||
|
|
||||||
RelativeHumidityPercent: parsed.Properties.RelativeHumidity.Value,
|
RelativeHumidityPercent: parsed.Properties.RelativeHumidity.Value,
|
||||||
ApparentTemperatureC: apparentC,
|
ApparentTemperatureC: apparentC,
|
||||||
|
|
||||||
ElevationMeters: parsed.Properties.Elevation.Value,
|
|
||||||
RawMessage: parsed.Properties.RawMessage,
|
|
||||||
|
|
||||||
PresentWeather: present,
|
PresentWeather: present,
|
||||||
CloudLayers: cloudLayers,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return obs, ts, nil
|
return obs, ts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func pressurePrecedenceNWS(seaLevelPa, barometricPa *float64) *float64 {
|
||||||
|
if seaLevelPa != nil {
|
||||||
|
return seaLevelPa
|
||||||
|
}
|
||||||
|
return barometricPa
|
||||||
|
}
|
||||||
|
|||||||
51
internal/normalizers/nws/observation_test.go
Normal file
51
internal/normalizers/nws/observation_test.go
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
package nws
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestBuildObservationTextDescriptionAndPressurePrecedence(t *testing.T) {
|
||||||
|
barometric := 100200.0
|
||||||
|
seaLevel := 101400.0
|
||||||
|
|
||||||
|
parsed := nwsObservationResponse{}
|
||||||
|
parsed.Properties.Timestamp = "2026-03-16T19:00:00Z"
|
||||||
|
parsed.Properties.TextDescription = " Overcast "
|
||||||
|
parsed.Properties.BarometricPressure.Value = &barometric
|
||||||
|
parsed.Properties.SeaLevelPressure.Value = &seaLevel
|
||||||
|
|
||||||
|
obs, _, err := buildObservation(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildObservation() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got, want := obs.TextDescription, "Overcast"; got != want {
|
||||||
|
t.Fatalf("TextDescription = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
|
||||||
|
if obs.BarometricPressurePa == nil {
|
||||||
|
t.Fatalf("BarometricPressurePa = nil, want non-nil")
|
||||||
|
}
|
||||||
|
if got, want := *obs.BarometricPressurePa, seaLevel; got != want {
|
||||||
|
t.Fatalf("BarometricPressurePa = %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildObservationPressureFallbackToBarometric(t *testing.T) {
|
||||||
|
barometric := 99900.0
|
||||||
|
|
||||||
|
parsed := nwsObservationResponse{}
|
||||||
|
parsed.Properties.Timestamp = "2026-03-16T19:00:00Z"
|
||||||
|
parsed.Properties.TextDescription = "Cloudy"
|
||||||
|
parsed.Properties.BarometricPressure.Value = &barometric
|
||||||
|
|
||||||
|
obs, _, err := buildObservation(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildObservation() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if obs.BarometricPressurePa == nil {
|
||||||
|
t.Fatalf("BarometricPressurePa = nil, want non-nil")
|
||||||
|
}
|
||||||
|
if got, want := *obs.BarometricPressurePa, barometric; got != want {
|
||||||
|
t.Fatalf("BarometricPressurePa = %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
package nws
|
package nws
|
||||||
|
|
||||||
import (
|
import (
|
||||||
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
|
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Register appends NWS normalizers in stable order.
|
// Register appends NWS normalizers in stable order.
|
||||||
|
|||||||
@@ -86,22 +86,21 @@ type nwsObservationResponse struct {
|
|||||||
// We decode these as generic maps, then optionally interpret them in metar.go.
|
// We decode these as generic maps, then optionally interpret them in metar.go.
|
||||||
PresentWeather []map[string]any `json:"presentWeather"`
|
PresentWeather []map[string]any `json:"presentWeather"`
|
||||||
|
|
||||||
CloudLayers []struct {
|
CloudLayers []nwsCloudLayer `json:"cloudLayers"`
|
||||||
|
} `json:"properties"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nwsCloudLayer struct {
|
||||||
Base struct {
|
Base struct {
|
||||||
UnitCode string `json:"unitCode"`
|
UnitCode string `json:"unitCode"`
|
||||||
Value *float64 `json:"value"`
|
Value *float64 `json:"value"`
|
||||||
} `json:"base"`
|
} `json:"base"`
|
||||||
Amount string `json:"amount"`
|
Amount string `json:"amount"`
|
||||||
} `json:"cloudLayers"`
|
|
||||||
} `json:"properties"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// nwsForecastResponse is a minimal-but-sufficient representation of the NWS
|
// nwsHourlyForecastResponse is a minimal-but-sufficient representation of the NWS
|
||||||
// gridpoint forecast GeoJSON payload needed for mapping into model.WeatherForecastRun.
|
// gridpoint hourly forecast GeoJSON payload needed for mapping into model.WeatherForecastRun.
|
||||||
//
|
type nwsHourlyForecastResponse struct {
|
||||||
// This is currently designed to support the hourly forecast endpoint; revisions may be needed
|
|
||||||
// to accommodate other forecast endpoints in the future.
|
|
||||||
type nwsForecastResponse struct {
|
|
||||||
Geometry struct {
|
Geometry struct {
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Coordinates [][][]float64 `json:"coordinates"` // GeoJSON polygon: [ring][point][lon,lat]
|
Coordinates [][][]float64 `json:"coordinates"` // GeoJSON polygon: [ring][point][lon,lat]
|
||||||
@@ -120,11 +119,11 @@ type nwsForecastResponse struct {
|
|||||||
Value *float64 `json:"value"`
|
Value *float64 `json:"value"`
|
||||||
} `json:"elevation"`
|
} `json:"elevation"`
|
||||||
|
|
||||||
Periods []nwsForecastPeriod `json:"periods"`
|
Periods []nwsHourlyForecastPeriod `json:"periods"`
|
||||||
} `json:"properties"`
|
} `json:"properties"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type nwsForecastPeriod struct {
|
type nwsHourlyForecastPeriod struct {
|
||||||
Number int `json:"number"`
|
Number int `json:"number"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
StartTime string `json:"startTime"`
|
StartTime string `json:"startTime"`
|
||||||
@@ -159,6 +158,56 @@ type nwsForecastPeriod struct {
|
|||||||
DetailedForecast string `json:"detailedForecast"`
|
DetailedForecast string `json:"detailedForecast"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// nwsNarrativeForecastResponse is a minimal-but-sufficient representation of the NWS
|
||||||
|
// gridpoint narrative forecast GeoJSON payload needed for mapping into model.WeatherForecastRun.
|
||||||
|
type nwsNarrativeForecastResponse struct {
|
||||||
|
Geometry struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Coordinates [][][]float64 `json:"coordinates"` // GeoJSON polygon: [ring][point][lon,lat]
|
||||||
|
} `json:"geometry"`
|
||||||
|
|
||||||
|
Properties struct {
|
||||||
|
Units string `json:"units"` // "us" or "si" (often "us" for narrative)
|
||||||
|
ForecastGenerator string `json:"forecastGenerator"` // e.g. "BaselineForecastGenerator"
|
||||||
|
|
||||||
|
GeneratedAt string `json:"generatedAt"` // RFC3339-ish
|
||||||
|
UpdateTime string `json:"updateTime"` // RFC3339-ish
|
||||||
|
ValidTimes string `json:"validTimes"`
|
||||||
|
|
||||||
|
Elevation struct {
|
||||||
|
UnitCode string `json:"unitCode"`
|
||||||
|
Value *float64 `json:"value"`
|
||||||
|
} `json:"elevation"`
|
||||||
|
|
||||||
|
Periods []nwsNarrativeForecastPeriod `json:"periods"`
|
||||||
|
} `json:"properties"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type nwsNarrativeForecastPeriod struct {
|
||||||
|
Number int `json:"number"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
StartTime string `json:"startTime"`
|
||||||
|
EndTime string `json:"endTime"`
|
||||||
|
|
||||||
|
IsDaytime *bool `json:"isDaytime"`
|
||||||
|
|
||||||
|
Temperature *float64 `json:"temperature"`
|
||||||
|
TemperatureUnit string `json:"temperatureUnit"` // "F" or "C"
|
||||||
|
TemperatureTrend any `json:"temperatureTrend"`
|
||||||
|
|
||||||
|
ProbabilityOfPrecipitation struct {
|
||||||
|
UnitCode string `json:"unitCode"`
|
||||||
|
Value *float64 `json:"value"`
|
||||||
|
} `json:"probabilityOfPrecipitation"`
|
||||||
|
|
||||||
|
WindSpeed string `json:"windSpeed"` // e.g. "9 mph", "10 to 15 mph"
|
||||||
|
WindDirection string `json:"windDirection"` // e.g. "W", "NW"
|
||||||
|
|
||||||
|
Icon string `json:"icon"`
|
||||||
|
ShortForecast string `json:"shortForecast"`
|
||||||
|
DetailedForecast string `json:"detailedForecast"`
|
||||||
|
}
|
||||||
|
|
||||||
// nwsAlertsResponse is a minimal-but-sufficient representation of the NWS /alerts
|
// nwsAlertsResponse is a minimal-but-sufficient representation of the NWS /alerts
|
||||||
// FeatureCollection payload needed for mapping into model.WeatherAlertRun.
|
// FeatureCollection payload needed for mapping into model.WeatherAlertRun.
|
||||||
type nwsAlertsResponse struct {
|
type nwsAlertsResponse struct {
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import (
|
|||||||
// 1. METAR phenomena (presentWeather) — most reliable for precip/hazards
|
// 1. METAR phenomena (presentWeather) — most reliable for precip/hazards
|
||||||
// 2. textDescription keywords — weaker, but reusable across providers
|
// 2. textDescription keywords — weaker, but reusable across providers
|
||||||
// 3. cloud layers fallback — only for sky-only conditions
|
// 3. cloud layers fallback — only for sky-only conditions
|
||||||
func mapNWSToWMO(providerDesc string, cloudLayers []model.CloudLayer, phenomena []metarPhenomenon) model.WMOCode {
|
func mapNWSToWMO(providerDesc string, cloudLayers []nwsCloudLayer, phenomena []metarPhenomenon) model.WMOCode {
|
||||||
// 1) Prefer METAR phenomena if present.
|
// 1) Prefer METAR phenomena if present.
|
||||||
if code := wmoFromPhenomena(phenomena); code != model.WMOUnknown {
|
if code := wmoFromPhenomena(phenomena); code != model.WMOUnknown {
|
||||||
return code
|
return code
|
||||||
@@ -167,7 +167,7 @@ func wmoFromPhenomena(phenomena []metarPhenomenon) model.WMOCode {
|
|||||||
return model.WMOUnknown
|
return model.WMOUnknown
|
||||||
}
|
}
|
||||||
|
|
||||||
func wmoFromCloudLayers(cloudLayers []model.CloudLayer) model.WMOCode {
|
func wmoFromCloudLayers(cloudLayers []nwsCloudLayer) model.WMOCode {
|
||||||
// NWS cloud layer amount values commonly include:
|
// NWS cloud layer amount values commonly include:
|
||||||
// OVC, BKN, SCT, FEW, SKC, CLR, VV (vertical visibility / obscured sky)
|
// OVC, BKN, SCT, FEW, SKC, CLR, VV (vertical visibility / obscured sky)
|
||||||
//
|
//
|
||||||
|
|||||||
@@ -108,14 +108,7 @@ func buildForecast(parsed omForecastResponse, fallbackIssued time.Time) (model.W
|
|||||||
IsDay: isDay,
|
IsDay: isDay,
|
||||||
|
|
||||||
ConditionCode: wmo,
|
ConditionCode: wmo,
|
||||||
ConditionText: canonicalText,
|
|
||||||
|
|
||||||
ProviderRawDescription: "",
|
|
||||||
|
|
||||||
TextDescription: canonicalText,
|
TextDescription: canonicalText,
|
||||||
DetailedText: "",
|
|
||||||
|
|
||||||
IconURL: "",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if v := floatAt(parsed.Hourly.Temperature2m, i); v != nil {
|
if v := floatAt(parsed.Hourly.Temperature2m, i); v != nil {
|
||||||
|
|||||||
71
internal/normalizers/openmeteo/forecast_test.go
Normal file
71
internal/normalizers/openmeteo/forecast_test.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
package openmeteo
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/model"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBuildForecastUsesCanonicalTextDescription(t *testing.T) {
|
||||||
|
weatherCode := 2
|
||||||
|
isDay := 1
|
||||||
|
|
||||||
|
parsed := omForecastResponse{
|
||||||
|
Timezone: "UTC",
|
||||||
|
UTCOffsetSeconds: 0,
|
||||||
|
Hourly: omForecastHourly{
|
||||||
|
Time: []string{"2026-03-16T19:00"},
|
||||||
|
WeatherCode: []*int{&weatherCode},
|
||||||
|
IsDay: []*int{&isDay},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
run, effectiveAt, err := buildForecast(parsed, time.Time{})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildForecast() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(run.Periods) != 1 {
|
||||||
|
t.Fatalf("periods len = %d, want 1", len(run.Periods))
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedText := standards.WMOText(model.WMOCode(weatherCode), boolPtr(true))
|
||||||
|
if got := run.Periods[0].TextDescription; got != expectedText {
|
||||||
|
t.Fatalf("TextDescription = %q, want %q", got, expectedText)
|
||||||
|
}
|
||||||
|
|
||||||
|
wantIssued := time.Date(2026, 3, 16, 19, 0, 0, 0, time.UTC)
|
||||||
|
if !run.IssuedAt.Equal(wantIssued) {
|
||||||
|
t.Fatalf("IssuedAt = %s, want %s", run.IssuedAt.Format(time.RFC3339), wantIssued.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
if !effectiveAt.Equal(wantIssued) {
|
||||||
|
t.Fatalf("effectiveAt = %s, want %s", effectiveAt.Format(time.RFC3339), wantIssued.Format(time.RFC3339))
|
||||||
|
}
|
||||||
|
|
||||||
|
assertNoLegacyForecastDescriptionKeys(t, run.Periods[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
func boolPtr(v bool) *bool {
|
||||||
|
return &v
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertNoLegacyForecastDescriptionKeys(t *testing.T, period any) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
b, err := json.Marshal(period)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("json.Marshal(period) error = %v", err)
|
||||||
|
}
|
||||||
|
var got map[string]any
|
||||||
|
if err := json.Unmarshal(b, &got); err != nil {
|
||||||
|
t.Fatalf("json.Unmarshal(period) error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, key := range []string{"conditionText", "providerRawDescription", "detailedText", "iconUrl"} {
|
||||||
|
if _, ok := got[key]; ok {
|
||||||
|
t.Fatalf("unexpected legacy key %q in marshaled period: %#v", key, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -87,19 +87,8 @@ func buildObservation(parsed omResponse) (model.WeatherObservation, time.Time, e
|
|||||||
Timestamp: ts,
|
Timestamp: ts,
|
||||||
|
|
||||||
ConditionCode: wmo,
|
ConditionCode: wmo,
|
||||||
ConditionText: canonicalText,
|
|
||||||
IsDay: isDay,
|
IsDay: isDay,
|
||||||
|
|
||||||
// Open-Meteo does not provide a separate human text description for "current"
|
|
||||||
// when using weather_code; we leave provider evidence empty.
|
|
||||||
ProviderRawDescription: "",
|
|
||||||
|
|
||||||
// Transitional / human-facing:
|
|
||||||
// keep output consistent by populating TextDescription from canonical text.
|
|
||||||
TextDescription: canonicalText,
|
TextDescription: canonicalText,
|
||||||
|
|
||||||
// IconURL: Open-Meteo does not provide an icon URL in this endpoint.
|
|
||||||
IconURL: "",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Measurements (all optional; only set when present).
|
// Measurements (all optional; only set when present).
|
||||||
@@ -132,20 +121,13 @@ func buildObservation(parsed omResponse) (model.WeatherObservation, time.Time, e
|
|||||||
obs.WindGustKmh = &v
|
obs.WindGustKmh = &v
|
||||||
}
|
}
|
||||||
|
|
||||||
if parsed.Current.SurfacePressure != nil {
|
if parsed.Current.PressureMSL != nil {
|
||||||
|
v := normcommon.PressurePaFromHPa(*parsed.Current.PressureMSL)
|
||||||
|
obs.BarometricPressurePa = &v
|
||||||
|
} else if parsed.Current.SurfacePressure != nil {
|
||||||
v := normcommon.PressurePaFromHPa(*parsed.Current.SurfacePressure)
|
v := normcommon.PressurePaFromHPa(*parsed.Current.SurfacePressure)
|
||||||
obs.BarometricPressurePa = &v
|
obs.BarometricPressurePa = &v
|
||||||
}
|
}
|
||||||
|
|
||||||
if parsed.Current.PressureMSL != nil {
|
|
||||||
v := normcommon.PressurePaFromHPa(*parsed.Current.PressureMSL)
|
|
||||||
obs.SeaLevelPressurePa = &v
|
|
||||||
}
|
|
||||||
|
|
||||||
if parsed.Elevation != nil {
|
|
||||||
v := *parsed.Elevation
|
|
||||||
obs.ElevationMeters = &v
|
|
||||||
}
|
|
||||||
|
|
||||||
return obs, ts, nil
|
return obs, ts, nil
|
||||||
}
|
}
|
||||||
|
|||||||
61
internal/normalizers/openmeteo/observation_test.go
Normal file
61
internal/normalizers/openmeteo/observation_test.go
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
package openmeteo
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestBuildObservationTextDescriptionAndPressurePrecedence(t *testing.T) {
|
||||||
|
weatherCode := 2
|
||||||
|
pressureMSL := 1016.0
|
||||||
|
surfacePressure := 1009.0
|
||||||
|
|
||||||
|
parsed := omResponse{
|
||||||
|
Timezone: "UTC",
|
||||||
|
UTCOffsetSeconds: 0,
|
||||||
|
Current: omCurrent{
|
||||||
|
Time: "2026-03-16T19:00",
|
||||||
|
WeatherCode: &weatherCode,
|
||||||
|
PressureMSL: &pressureMSL,
|
||||||
|
SurfacePressure: &surfacePressure,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
obs, _, err := buildObservation(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildObservation() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got, want := obs.TextDescription, "Partly Cloudy"; got != want {
|
||||||
|
t.Fatalf("TextDescription = %q, want %q", got, want)
|
||||||
|
}
|
||||||
|
|
||||||
|
if obs.BarometricPressurePa == nil {
|
||||||
|
t.Fatalf("BarometricPressurePa = nil, want non-nil")
|
||||||
|
}
|
||||||
|
if got, want := *obs.BarometricPressurePa, 101600.0; got != want {
|
||||||
|
t.Fatalf("BarometricPressurePa = %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildObservationPressureFallbackToSurface(t *testing.T) {
|
||||||
|
surfacePressure := 1008.0
|
||||||
|
|
||||||
|
parsed := omResponse{
|
||||||
|
Timezone: "UTC",
|
||||||
|
UTCOffsetSeconds: 0,
|
||||||
|
Current: omCurrent{
|
||||||
|
Time: "2026-03-16T19:00",
|
||||||
|
SurfacePressure: &surfacePressure,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
obs, _, err := buildObservation(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildObservation() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if obs.BarometricPressurePa == nil {
|
||||||
|
t.Fatalf("BarometricPressurePa = nil, want non-nil")
|
||||||
|
}
|
||||||
|
if got, want := *obs.BarometricPressurePa, 100800.0; got != want {
|
||||||
|
t.Fatalf("BarometricPressurePa = %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
package openmeteo
|
package openmeteo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
|
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Register appends Open-Meteo normalizers in stable order.
|
// Register appends Open-Meteo normalizers in stable order.
|
||||||
|
|||||||
@@ -53,15 +53,6 @@ func inferIsDay(icon string, dt, sunrise, sunset int64) *bool {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// openWeatherIconURL builds the standard OpenWeather icon URL for the given icon code.
|
|
||||||
func openWeatherIconURL(icon string) string {
|
|
||||||
icon = strings.TrimSpace(icon)
|
|
||||||
if icon == "" {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("https://openweathermap.org/img/wn/%s@2x.png", icon)
|
|
||||||
}
|
|
||||||
|
|
||||||
// openWeatherStationID returns a stable station identifier for the given response.
|
// openWeatherStationID returns a stable station identifier for the given response.
|
||||||
// Prefer the OpenWeather city ID when present; otherwise, fall back to coordinates.
|
// Prefer the OpenWeather city ID when present; otherwise, fall back to coordinates.
|
||||||
func openWeatherStationID(parsed owmResponse) string {
|
func openWeatherStationID(parsed owmResponse) string {
|
||||||
|
|||||||
@@ -75,10 +75,10 @@ func buildObservation(parsed owmResponse) (model.WeatherObservation, time.Time,
|
|||||||
}
|
}
|
||||||
|
|
||||||
surfacePa := normcommon.PressurePaFromHPa(parsed.Main.Pressure)
|
surfacePa := normcommon.PressurePaFromHPa(parsed.Main.Pressure)
|
||||||
var seaLevelPa *float64
|
barometricPa := &surfacePa
|
||||||
if parsed.Main.SeaLevel != nil {
|
if parsed.Main.SeaLevel != nil {
|
||||||
v := normcommon.PressurePaFromHPa(*parsed.Main.SeaLevel)
|
v := normcommon.PressurePaFromHPa(*parsed.Main.SeaLevel)
|
||||||
seaLevelPa = &v
|
barometricPa = &v
|
||||||
}
|
}
|
||||||
|
|
||||||
wsKmh := normcommon.SpeedKmhFromMps(parsed.Wind.Speed)
|
wsKmh := normcommon.SpeedKmhFromMps(parsed.Wind.Speed)
|
||||||
@@ -96,9 +96,6 @@ func buildObservation(parsed owmResponse) (model.WeatherObservation, time.Time,
|
|||||||
|
|
||||||
// Condition mapping: OpenWeather condition IDs -> canonical WMO code vocabulary.
|
// Condition mapping: OpenWeather condition IDs -> canonical WMO code vocabulary.
|
||||||
wmo := mapOpenWeatherToWMO(owmID)
|
wmo := mapOpenWeatherToWMO(owmID)
|
||||||
canonicalText := standards.WMOText(wmo, isDay)
|
|
||||||
|
|
||||||
iconURL := openWeatherIconURL(icon)
|
|
||||||
|
|
||||||
stationID := openWeatherStationID(parsed)
|
stationID := openWeatherStationID(parsed)
|
||||||
stationName := strings.TrimSpace(parsed.Name)
|
stationName := strings.TrimSpace(parsed.Name)
|
||||||
@@ -112,14 +109,8 @@ func buildObservation(parsed owmResponse) (model.WeatherObservation, time.Time,
|
|||||||
Timestamp: ts,
|
Timestamp: ts,
|
||||||
|
|
||||||
ConditionCode: wmo,
|
ConditionCode: wmo,
|
||||||
ConditionText: canonicalText,
|
|
||||||
IsDay: isDay,
|
IsDay: isDay,
|
||||||
|
TextDescription: rawDesc,
|
||||||
ProviderRawDescription: rawDesc,
|
|
||||||
|
|
||||||
// Human-facing legacy fields: populate with canonical text for consistency.
|
|
||||||
TextDescription: canonicalText,
|
|
||||||
IconURL: iconURL,
|
|
||||||
|
|
||||||
TemperatureC: &tempC,
|
TemperatureC: &tempC,
|
||||||
ApparentTemperatureC: apparentC,
|
ApparentTemperatureC: apparentC,
|
||||||
@@ -128,8 +119,7 @@ func buildObservation(parsed owmResponse) (model.WeatherObservation, time.Time,
|
|||||||
WindSpeedKmh: &wsKmh,
|
WindSpeedKmh: &wsKmh,
|
||||||
WindGustKmh: wgKmh,
|
WindGustKmh: wgKmh,
|
||||||
|
|
||||||
BarometricPressurePa: &surfacePa,
|
BarometricPressurePa: barometricPa,
|
||||||
SeaLevelPressurePa: seaLevelPa,
|
|
||||||
VisibilityMeters: visM,
|
VisibilityMeters: visM,
|
||||||
|
|
||||||
RelativeHumidityPercent: &rh,
|
RelativeHumidityPercent: &rh,
|
||||||
|
|||||||
58
internal/normalizers/openweather/observation_test.go
Normal file
58
internal/normalizers/openweather/observation_test.go
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
package openweather
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestBuildObservationTextDescriptionAndPressurePrecedence(t *testing.T) {
|
||||||
|
seaLevel := 1018.0
|
||||||
|
|
||||||
|
parsed := owmResponse{}
|
||||||
|
parsed.Dt = 1710000000
|
||||||
|
parsed.Main.Temp = 20.0
|
||||||
|
parsed.Main.Humidity = 45.0
|
||||||
|
parsed.Main.Pressure = 1000.0
|
||||||
|
parsed.Main.SeaLevel = &seaLevel
|
||||||
|
parsed.Wind.Speed = 3.0
|
||||||
|
parsed.Weather = []owmWeather{
|
||||||
|
{ID: 801, Main: "Clouds", Description: "few clouds", Icon: "02d"},
|
||||||
|
}
|
||||||
|
|
||||||
|
obs, _, err := buildObservation(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildObservation() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if obs.TextDescription != "few clouds" {
|
||||||
|
t.Fatalf("TextDescription = %q, want %q", obs.TextDescription, "few clouds")
|
||||||
|
}
|
||||||
|
|
||||||
|
if obs.BarometricPressurePa == nil {
|
||||||
|
t.Fatalf("BarometricPressurePa = nil, want non-nil")
|
||||||
|
}
|
||||||
|
if got, want := *obs.BarometricPressurePa, 101800.0; got != want {
|
||||||
|
t.Fatalf("BarometricPressurePa = %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildObservationPressureFallbackToSurface(t *testing.T) {
|
||||||
|
parsed := owmResponse{}
|
||||||
|
parsed.Dt = 1710000000
|
||||||
|
parsed.Main.Temp = 20.0
|
||||||
|
parsed.Main.Humidity = 45.0
|
||||||
|
parsed.Main.Pressure = 1001.0
|
||||||
|
parsed.Wind.Speed = 3.0
|
||||||
|
parsed.Weather = []owmWeather{
|
||||||
|
{ID: 800, Description: "clear sky", Icon: "01d"},
|
||||||
|
}
|
||||||
|
|
||||||
|
obs, _, err := buildObservation(parsed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("buildObservation() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if obs.BarometricPressurePa == nil {
|
||||||
|
t.Fatalf("BarometricPressurePa = nil, want non-nil")
|
||||||
|
}
|
||||||
|
if got, want := *obs.BarometricPressurePa, 100100.0; got != want {
|
||||||
|
t.Fatalf("BarometricPressurePa = %v, want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
package openweather
|
package openweather
|
||||||
|
|
||||||
import (
|
import (
|
||||||
fknormalize "gitea.maximumdirect.net/ejr/feedkit/normalize"
|
fknormalize "gitea.maximumdirect.net/ejr/feedkit/processors/normalize"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Register appends OpenWeather normalizers in stable order.
|
// Register appends OpenWeather normalizers in stable order.
|
||||||
|
|||||||
188
internal/sinks/postgres/doc.go
Normal file
188
internal/sinks/postgres/doc.go
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
// Package postgres documents weatherfeeder's PostgreSQL sink contract for
|
||||||
|
// downstream SQL consumers.
|
||||||
|
//
|
||||||
|
// This package wires weatherfeeder canonical events into normalized relational
|
||||||
|
// tables. Downstream consumers can reconstruct the same canonical JSON objects
|
||||||
|
// that were written by joining parent/child tables as described below.
|
||||||
|
//
|
||||||
|
// Canonical input schemas:
|
||||||
|
// - weather.observation.v1 -> model.WeatherObservation
|
||||||
|
// - weather.forecast.v1 -> model.WeatherForecastRun
|
||||||
|
// - weather.alert.v1 -> model.WeatherAlertRun
|
||||||
|
//
|
||||||
|
// Parent/child relationships:
|
||||||
|
// - observations.event_id -> observation_present_weather.event_id
|
||||||
|
// - forecasts.event_id -> forecast_periods.run_event_id
|
||||||
|
// - alert_runs.event_id -> alerts.run_event_id
|
||||||
|
// - alerts.(run_event_id, alert_index) -> alert_references.(run_event_id, alert_index)
|
||||||
|
//
|
||||||
|
// Dedupe and retention behavior:
|
||||||
|
// - Parent primary keys (event_id): observations, forecasts, alert_runs.
|
||||||
|
// - Child primary keys use positional indexes to preserve payload order.
|
||||||
|
// - Prune columns:
|
||||||
|
// - observations.observed_at
|
||||||
|
// - observation_present_weather.observed_at
|
||||||
|
// - forecasts.issued_at
|
||||||
|
// - forecast_periods.issued_at
|
||||||
|
// - alert_runs.as_of
|
||||||
|
// - alerts.as_of
|
||||||
|
// - alert_references.as_of
|
||||||
|
//
|
||||||
|
// Envelope field mapping (shared parent columns)
|
||||||
|
//
|
||||||
|
// These columns exist on observations, forecasts, and alert_runs:
|
||||||
|
// - event_id TEXT -> event.id
|
||||||
|
// - event_kind TEXT -> event.kind
|
||||||
|
// - event_source TEXT -> event.source
|
||||||
|
// - event_schema TEXT -> event.schema
|
||||||
|
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
|
||||||
|
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
|
||||||
|
//
|
||||||
|
// Table contract
|
||||||
|
//
|
||||||
|
// 1. observations (PK: event_id)
|
||||||
|
//
|
||||||
|
// - event_id TEXT -> event.id
|
||||||
|
// - event_kind TEXT -> event.kind
|
||||||
|
// - event_source TEXT -> event.source
|
||||||
|
// - event_schema TEXT -> event.schema
|
||||||
|
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
|
||||||
|
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
|
||||||
|
// - station_id TEXT NULL -> payload.stationId
|
||||||
|
// - station_name TEXT NULL -> payload.stationName
|
||||||
|
// - observed_at TIMESTAMPTZ -> payload.timestamp
|
||||||
|
// - condition_code INTEGER -> payload.conditionCode
|
||||||
|
// - is_day BOOLEAN NULL -> payload.isDay
|
||||||
|
// - text_description TEXT NULL -> payload.textDescription
|
||||||
|
// - temperature_c DOUBLE PRECISION NULL -> payload.temperatureC
|
||||||
|
// - dewpoint_c DOUBLE PRECISION NULL -> payload.dewpointC
|
||||||
|
// - wind_direction_degrees DOUBLE PRECISION NULL -> payload.windDirectionDegrees
|
||||||
|
// - wind_speed_kmh DOUBLE PRECISION NULL -> payload.windSpeedKmh
|
||||||
|
// - wind_gust_kmh DOUBLE PRECISION NULL -> payload.windGustKmh
|
||||||
|
// - barometric_pressure_pa DOUBLE PRECISION NULL -> payload.barometricPressurePa
|
||||||
|
// - visibility_meters DOUBLE PRECISION NULL -> payload.visibilityMeters
|
||||||
|
// - relative_humidity_percent DOUBLE PRECISION NULL -> payload.relativeHumidityPercent
|
||||||
|
// - apparent_temperature_c DOUBLE PRECISION NULL -> payload.apparentTemperatureC
|
||||||
|
//
|
||||||
|
// 2. observation_present_weather (PK: event_id, weather_index)
|
||||||
|
//
|
||||||
|
// - event_id TEXT -> observations.event_id / payload.presentWeather[i]
|
||||||
|
// - weather_index INTEGER -> i (array position in payload.presentWeather)
|
||||||
|
// - observed_at TIMESTAMPTZ -> payload.timestamp
|
||||||
|
// - raw_text TEXT NULL -> JSON-encoded payload.presentWeather[i].raw
|
||||||
|
//
|
||||||
|
// Note: raw_text stores compact JSON text. Consumers that need the original
|
||||||
|
// object should parse raw_text as JSON.
|
||||||
|
//
|
||||||
|
// 3. forecasts (PK: event_id)
|
||||||
|
//
|
||||||
|
// - event_id TEXT -> event.id
|
||||||
|
// - event_kind TEXT -> event.kind
|
||||||
|
// - event_source TEXT -> event.source
|
||||||
|
// - event_schema TEXT -> event.schema
|
||||||
|
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
|
||||||
|
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
|
||||||
|
// - location_id TEXT NULL -> payload.locationId
|
||||||
|
// - location_name TEXT NULL -> payload.locationName
|
||||||
|
// - issued_at TIMESTAMPTZ -> payload.issuedAt
|
||||||
|
// - updated_at TIMESTAMPTZ NULL -> payload.updatedAt
|
||||||
|
// - product TEXT -> payload.product
|
||||||
|
// - latitude DOUBLE PRECISION NULL -> payload.latitude
|
||||||
|
// - longitude DOUBLE PRECISION NULL -> payload.longitude
|
||||||
|
// - elevation_meters DOUBLE PRECISION NULL -> payload.elevationMeters
|
||||||
|
// - period_count INTEGER -> len(payload.periods)
|
||||||
|
//
|
||||||
|
// 4. forecast_periods (PK: run_event_id, period_index)
|
||||||
|
//
|
||||||
|
// - run_event_id TEXT -> forecasts.event_id / payload.periods[i]
|
||||||
|
// - period_index INTEGER -> i (array position in payload.periods)
|
||||||
|
// - issued_at TIMESTAMPTZ -> payload.issuedAt (copied from parent)
|
||||||
|
// - start_time TIMESTAMPTZ -> payload.periods[i].startTime
|
||||||
|
// - end_time TIMESTAMPTZ -> payload.periods[i].endTime
|
||||||
|
// - name TEXT NULL -> payload.periods[i].name
|
||||||
|
// - is_day BOOLEAN NULL -> payload.periods[i].isDay
|
||||||
|
// - condition_code INTEGER -> payload.periods[i].conditionCode
|
||||||
|
// - text_description TEXT NULL -> payload.periods[i].textDescription
|
||||||
|
// - temperature_c DOUBLE PRECISION NULL -> payload.periods[i].temperatureC
|
||||||
|
// - temperature_c_min DOUBLE PRECISION NULL -> payload.periods[i].temperatureCMin
|
||||||
|
// - temperature_c_max DOUBLE PRECISION NULL -> payload.periods[i].temperatureCMax
|
||||||
|
// - dewpoint_c DOUBLE PRECISION NULL -> payload.periods[i].dewpointC
|
||||||
|
// - relative_humidity_percent DOUBLE PRECISION NULL -> payload.periods[i].relativeHumidityPercent
|
||||||
|
// - wind_direction_degrees DOUBLE PRECISION NULL -> payload.periods[i].windDirectionDegrees
|
||||||
|
// - wind_speed_kmh DOUBLE PRECISION NULL -> payload.periods[i].windSpeedKmh
|
||||||
|
// - wind_gust_kmh DOUBLE PRECISION NULL -> payload.periods[i].windGustKmh
|
||||||
|
// - barometric_pressure_pa DOUBLE PRECISION NULL -> payload.periods[i].barometricPressurePa
|
||||||
|
// - visibility_meters DOUBLE PRECISION NULL -> payload.periods[i].visibilityMeters
|
||||||
|
// - apparent_temperature_c DOUBLE PRECISION NULL -> payload.periods[i].apparentTemperatureC
|
||||||
|
// - cloud_cover_percent DOUBLE PRECISION NULL -> payload.periods[i].cloudCoverPercent
|
||||||
|
// - probability_of_precipitation_percent DOUBLE PRECISION NULL -> payload.periods[i].probabilityOfPrecipitationPercent
|
||||||
|
// - precipitation_amount_mm DOUBLE PRECISION NULL -> payload.periods[i].precipitationAmountMm
|
||||||
|
// - snowfall_depth_mm DOUBLE PRECISION NULL -> payload.periods[i].snowfallDepthMm
|
||||||
|
// - uv_index DOUBLE PRECISION NULL -> payload.periods[i].uvIndex
|
||||||
|
//
|
||||||
|
// 5. alert_runs (PK: event_id)
|
||||||
|
//
|
||||||
|
// - event_id TEXT -> event.id
|
||||||
|
// - event_kind TEXT -> event.kind
|
||||||
|
// - event_source TEXT -> event.source
|
||||||
|
// - event_schema TEXT -> event.schema
|
||||||
|
// - event_emitted_at TIMESTAMPTZ -> event.emitted_at
|
||||||
|
// - event_effective_at TIMESTAMPTZ NULL -> event.effective_at
|
||||||
|
// - location_id TEXT NULL -> payload.locationId
|
||||||
|
// - location_name TEXT NULL -> payload.locationName
|
||||||
|
// - as_of TIMESTAMPTZ -> payload.asOf
|
||||||
|
// - latitude DOUBLE PRECISION NULL -> payload.latitude
|
||||||
|
// - longitude DOUBLE PRECISION NULL -> payload.longitude
|
||||||
|
// - alert_count INTEGER -> len(payload.alerts)
|
||||||
|
//
|
||||||
|
// 6. alerts (PK: run_event_id, alert_index)
|
||||||
|
//
|
||||||
|
// - run_event_id TEXT -> alert_runs.event_id / payload.alerts[i]
|
||||||
|
// - alert_index INTEGER -> i (array position in payload.alerts)
|
||||||
|
// - as_of TIMESTAMPTZ -> payload.asOf (copied from parent)
|
||||||
|
// - alert_id TEXT -> payload.alerts[i].id
|
||||||
|
// - event TEXT NULL -> payload.alerts[i].event
|
||||||
|
// - headline TEXT NULL -> payload.alerts[i].headline
|
||||||
|
// - severity TEXT NULL -> payload.alerts[i].severity
|
||||||
|
// - urgency TEXT NULL -> payload.alerts[i].urgency
|
||||||
|
// - certainty TEXT NULL -> payload.alerts[i].certainty
|
||||||
|
// - status TEXT NULL -> payload.alerts[i].status
|
||||||
|
// - message_type TEXT NULL -> payload.alerts[i].messageType
|
||||||
|
// - category TEXT NULL -> payload.alerts[i].category
|
||||||
|
// - response TEXT NULL -> payload.alerts[i].response
|
||||||
|
// - description TEXT NULL -> payload.alerts[i].description
|
||||||
|
// - instruction TEXT NULL -> payload.alerts[i].instruction
|
||||||
|
// - sent TIMESTAMPTZ NULL -> payload.alerts[i].sent
|
||||||
|
// - effective TIMESTAMPTZ NULL -> payload.alerts[i].effective
|
||||||
|
// - onset TIMESTAMPTZ NULL -> payload.alerts[i].onset
|
||||||
|
// - expires TIMESTAMPTZ NULL -> payload.alerts[i].expires
|
||||||
|
// - area_description TEXT NULL -> payload.alerts[i].areaDescription
|
||||||
|
// - sender_name TEXT NULL -> payload.alerts[i].senderName
|
||||||
|
// - reference_count INTEGER -> len(payload.alerts[i].references)
|
||||||
|
//
|
||||||
|
// 7. alert_references (PK: run_event_id, alert_index, reference_index)
|
||||||
|
//
|
||||||
|
// - run_event_id TEXT -> alert_runs.event_id / payload.alerts[i].references[j]
|
||||||
|
// - alert_index INTEGER -> i (array position in payload.alerts)
|
||||||
|
// - reference_index INTEGER -> j (array position in payload.alerts[i].references)
|
||||||
|
// - as_of TIMESTAMPTZ -> payload.asOf (copied from parent)
|
||||||
|
// - id TEXT NULL -> payload.alerts[i].references[j].id
|
||||||
|
// - identifier TEXT NULL -> payload.alerts[i].references[j].identifier
|
||||||
|
// - sender TEXT NULL -> payload.alerts[i].references[j].sender
|
||||||
|
// - sent TIMESTAMPTZ NULL -> payload.alerts[i].references[j].sent
|
||||||
|
//
|
||||||
|
// Reconstructing canonical JSON payloads
|
||||||
|
//
|
||||||
|
// - WeatherObservation:
|
||||||
|
// read one row from observations, then join child rows by event_id ordered by
|
||||||
|
// weather_index to rebuild presentWeather arrays.
|
||||||
|
//
|
||||||
|
// - WeatherForecastRun:
|
||||||
|
// read one row from forecasts, then join forecast_periods by run_event_id
|
||||||
|
// ordered by period_index to rebuild periods.
|
||||||
|
//
|
||||||
|
// - WeatherAlertRun:
|
||||||
|
// read one row from alert_runs, join alerts by run_event_id ordered by
|
||||||
|
// alert_index, then join alert_references by (run_event_id, alert_index)
|
||||||
|
// ordered by reference_index to rebuild references per alert.
|
||||||
|
package postgres
|
||||||
320
internal/sinks/postgres/map.go
Normal file
320
internal/sinks/postgres/map.go
Normal file
@@ -0,0 +1,320 @@
|
|||||||
|
package postgres
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksinks "gitea.maximumdirect.net/ejr/feedkit/sinks"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/model"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
|
)
|
||||||
|
|
||||||
|
func mapPostgresEvent(_ context.Context, e fkevent.Event) ([]fksinks.PostgresWrite, error) {
|
||||||
|
schema := strings.TrimSpace(e.Schema)
|
||||||
|
switch schema {
|
||||||
|
case standards.SchemaWeatherObservationV1:
|
||||||
|
return mapObservationEvent(e)
|
||||||
|
case standards.SchemaWeatherForecastV1:
|
||||||
|
return mapForecastEvent(e)
|
||||||
|
case standards.SchemaWeatherAlertV1:
|
||||||
|
return mapAlertEvent(e)
|
||||||
|
default:
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapObservationEvent(e fkevent.Event) ([]fksinks.PostgresWrite, error) {
|
||||||
|
obs, err := decodePayload[model.WeatherObservation](e.Payload)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("decode observation payload: %w", err)
|
||||||
|
}
|
||||||
|
if obs.Timestamp.IsZero() {
|
||||||
|
return nil, fmt.Errorf("decode observation payload: timestamp is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
observedAt := obs.Timestamp.UTC()
|
||||||
|
writes := make([]fksinks.PostgresWrite, 0, 1+len(obs.PresentWeather))
|
||||||
|
|
||||||
|
writes = append(writes, fksinks.PostgresWrite{
|
||||||
|
Table: tableObservations,
|
||||||
|
Values: map[string]any{
|
||||||
|
"event_id": e.ID,
|
||||||
|
"event_kind": string(e.Kind),
|
||||||
|
"event_source": e.Source,
|
||||||
|
"event_schema": e.Schema,
|
||||||
|
"event_emitted_at": e.EmittedAt.UTC(),
|
||||||
|
"event_effective_at": nullableTime(e.EffectiveAt),
|
||||||
|
"station_id": nullableString(obs.StationID),
|
||||||
|
"station_name": nullableString(obs.StationName),
|
||||||
|
"observed_at": observedAt,
|
||||||
|
"condition_code": int(obs.ConditionCode),
|
||||||
|
"is_day": nullableBool(obs.IsDay),
|
||||||
|
"text_description": nullableString(obs.TextDescription),
|
||||||
|
"temperature_c": nullableFloat64(obs.TemperatureC),
|
||||||
|
"dewpoint_c": nullableFloat64(obs.DewpointC),
|
||||||
|
"wind_direction_degrees": nullableFloat64(obs.WindDirectionDegrees),
|
||||||
|
"wind_speed_kmh": nullableFloat64(obs.WindSpeedKmh),
|
||||||
|
"wind_gust_kmh": nullableFloat64(obs.WindGustKmh),
|
||||||
|
"barometric_pressure_pa": nullableFloat64(obs.BarometricPressurePa),
|
||||||
|
"visibility_meters": nullableFloat64(obs.VisibilityMeters),
|
||||||
|
"relative_humidity_percent": nullableFloat64(obs.RelativeHumidityPercent),
|
||||||
|
"apparent_temperature_c": nullableFloat64(obs.ApparentTemperatureC),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
for i, pw := range obs.PresentWeather {
|
||||||
|
rawText, err := compactJSONText(pw.Raw)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("observation presentWeather[%d].raw: %w", i, err)
|
||||||
|
}
|
||||||
|
writes = append(writes, fksinks.PostgresWrite{
|
||||||
|
Table: tableObservationPresentWeather,
|
||||||
|
Values: map[string]any{
|
||||||
|
"event_id": e.ID,
|
||||||
|
"weather_index": i,
|
||||||
|
"observed_at": observedAt,
|
||||||
|
"raw_text": rawText,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return writes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapForecastEvent(e fkevent.Event) ([]fksinks.PostgresWrite, error) {
|
||||||
|
run, err := decodePayload[model.WeatherForecastRun](e.Payload)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("decode forecast payload: %w", err)
|
||||||
|
}
|
||||||
|
if run.IssuedAt.IsZero() {
|
||||||
|
return nil, fmt.Errorf("decode forecast payload: issuedAt is required")
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(string(run.Product)) == "" {
|
||||||
|
return nil, fmt.Errorf("decode forecast payload: product is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
issuedAt := run.IssuedAt.UTC()
|
||||||
|
writes := make([]fksinks.PostgresWrite, 0, 1+len(run.Periods))
|
||||||
|
|
||||||
|
writes = append(writes, fksinks.PostgresWrite{
|
||||||
|
Table: tableForecasts,
|
||||||
|
Values: map[string]any{
|
||||||
|
"event_id": e.ID,
|
||||||
|
"event_kind": string(e.Kind),
|
||||||
|
"event_source": e.Source,
|
||||||
|
"event_schema": e.Schema,
|
||||||
|
"event_emitted_at": e.EmittedAt.UTC(),
|
||||||
|
"event_effective_at": nullableTime(e.EffectiveAt),
|
||||||
|
"location_id": nullableString(run.LocationID),
|
||||||
|
"location_name": nullableString(run.LocationName),
|
||||||
|
"issued_at": issuedAt,
|
||||||
|
"updated_at": nullableTime(run.UpdatedAt),
|
||||||
|
"product": string(run.Product),
|
||||||
|
"latitude": nullableFloat64(run.Latitude),
|
||||||
|
"longitude": nullableFloat64(run.Longitude),
|
||||||
|
"elevation_meters": nullableFloat64(run.ElevationMeters),
|
||||||
|
"period_count": len(run.Periods),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
for i, p := range run.Periods {
|
||||||
|
if p.StartTime.IsZero() || p.EndTime.IsZero() {
|
||||||
|
return nil, fmt.Errorf("decode forecast payload: periods[%d] startTime/endTime are required", i)
|
||||||
|
}
|
||||||
|
writes = append(writes, fksinks.PostgresWrite{
|
||||||
|
Table: tableForecastPeriods,
|
||||||
|
Values: map[string]any{
|
||||||
|
"run_event_id": e.ID,
|
||||||
|
"period_index": i,
|
||||||
|
"issued_at": issuedAt,
|
||||||
|
"start_time": p.StartTime.UTC(),
|
||||||
|
"end_time": p.EndTime.UTC(),
|
||||||
|
"name": nullableString(p.Name),
|
||||||
|
"is_day": nullableBool(p.IsDay),
|
||||||
|
"condition_code": int(p.ConditionCode),
|
||||||
|
"text_description": nullableString(p.TextDescription),
|
||||||
|
"temperature_c": nullableFloat64(p.TemperatureC),
|
||||||
|
"temperature_c_min": nullableFloat64(p.TemperatureCMin),
|
||||||
|
"temperature_c_max": nullableFloat64(p.TemperatureCMax),
|
||||||
|
"dewpoint_c": nullableFloat64(p.DewpointC),
|
||||||
|
"relative_humidity_percent": nullableFloat64(p.RelativeHumidityPercent),
|
||||||
|
"wind_direction_degrees": nullableFloat64(p.WindDirectionDegrees),
|
||||||
|
"wind_speed_kmh": nullableFloat64(p.WindSpeedKmh),
|
||||||
|
"wind_gust_kmh": nullableFloat64(p.WindGustKmh),
|
||||||
|
"barometric_pressure_pa": nullableFloat64(p.BarometricPressurePa),
|
||||||
|
"visibility_meters": nullableFloat64(p.VisibilityMeters),
|
||||||
|
"apparent_temperature_c": nullableFloat64(p.ApparentTemperatureC),
|
||||||
|
"cloud_cover_percent": nullableFloat64(p.CloudCoverPercent),
|
||||||
|
"probability_of_precipitation_percent": nullableFloat64(p.ProbabilityOfPrecipitationPercent),
|
||||||
|
"precipitation_amount_mm": nullableFloat64(p.PrecipitationAmountMm),
|
||||||
|
"snowfall_depth_mm": nullableFloat64(p.SnowfallDepthMM),
|
||||||
|
"uv_index": nullableFloat64(p.UVIndex),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return writes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapAlertEvent(e fkevent.Event) ([]fksinks.PostgresWrite, error) {
|
||||||
|
run, err := decodePayload[model.WeatherAlertRun](e.Payload)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("decode alert payload: %w", err)
|
||||||
|
}
|
||||||
|
if run.AsOf.IsZero() {
|
||||||
|
return nil, fmt.Errorf("decode alert payload: asOf is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
asOf := run.AsOf.UTC()
|
||||||
|
writes := make([]fksinks.PostgresWrite, 0, 1+len(run.Alerts)+countAlertReferences(run.Alerts))
|
||||||
|
|
||||||
|
writes = append(writes, fksinks.PostgresWrite{
|
||||||
|
Table: tableAlertRuns,
|
||||||
|
Values: map[string]any{
|
||||||
|
"event_id": e.ID,
|
||||||
|
"event_kind": string(e.Kind),
|
||||||
|
"event_source": e.Source,
|
||||||
|
"event_schema": e.Schema,
|
||||||
|
"event_emitted_at": e.EmittedAt.UTC(),
|
||||||
|
"event_effective_at": nullableTime(e.EffectiveAt),
|
||||||
|
"location_id": nullableString(run.LocationID),
|
||||||
|
"location_name": nullableString(run.LocationName),
|
||||||
|
"as_of": asOf,
|
||||||
|
"latitude": nullableFloat64(run.Latitude),
|
||||||
|
"longitude": nullableFloat64(run.Longitude),
|
||||||
|
"alert_count": len(run.Alerts),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
for i, a := range run.Alerts {
|
||||||
|
if strings.TrimSpace(a.ID) == "" {
|
||||||
|
return nil, fmt.Errorf("decode alert payload: alerts[%d].id is required", i)
|
||||||
|
}
|
||||||
|
|
||||||
|
writes = append(writes, fksinks.PostgresWrite{
|
||||||
|
Table: tableAlerts,
|
||||||
|
Values: map[string]any{
|
||||||
|
"run_event_id": e.ID,
|
||||||
|
"alert_index": i,
|
||||||
|
"as_of": asOf,
|
||||||
|
"alert_id": a.ID,
|
||||||
|
"event": nullableString(a.Event),
|
||||||
|
"headline": nullableString(a.Headline),
|
||||||
|
"severity": nullableString(a.Severity),
|
||||||
|
"urgency": nullableString(a.Urgency),
|
||||||
|
"certainty": nullableString(a.Certainty),
|
||||||
|
"status": nullableString(a.Status),
|
||||||
|
"message_type": nullableString(a.MessageType),
|
||||||
|
"category": nullableString(a.Category),
|
||||||
|
"response": nullableString(a.Response),
|
||||||
|
"description": nullableString(a.Description),
|
||||||
|
"instruction": nullableString(a.Instruction),
|
||||||
|
"sent": nullableTime(a.Sent),
|
||||||
|
"effective": nullableTime(a.Effective),
|
||||||
|
"onset": nullableTime(a.Onset),
|
||||||
|
"expires": nullableTime(a.Expires),
|
||||||
|
"area_description": nullableString(a.AreaDescription),
|
||||||
|
"sender_name": nullableString(a.SenderName),
|
||||||
|
"reference_count": len(a.References),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
for j, ref := range a.References {
|
||||||
|
writes = append(writes, fksinks.PostgresWrite{
|
||||||
|
Table: tableAlertReferences,
|
||||||
|
Values: map[string]any{
|
||||||
|
"run_event_id": e.ID,
|
||||||
|
"alert_index": i,
|
||||||
|
"reference_index": j,
|
||||||
|
"as_of": asOf,
|
||||||
|
"id": nullableString(ref.ID),
|
||||||
|
"identifier": nullableString(ref.Identifier),
|
||||||
|
"sender": nullableString(ref.Sender),
|
||||||
|
"sent": nullableTime(ref.Sent),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return writes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodePayload[T any](payload any) (T, error) {
|
||||||
|
var out T
|
||||||
|
if payload == nil {
|
||||||
|
return out, fmt.Errorf("payload is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
if typed, ok := payload.(T); ok {
|
||||||
|
return typed, nil
|
||||||
|
}
|
||||||
|
if ptr, ok := payload.(*T); ok {
|
||||||
|
if ptr == nil {
|
||||||
|
return out, fmt.Errorf("payload pointer is nil")
|
||||||
|
}
|
||||||
|
return *ptr, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return out, fmt.Errorf("marshal payload: %w", err)
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal(b, &out); err != nil {
|
||||||
|
return out, fmt.Errorf("unmarshal payload: %w", err)
|
||||||
|
}
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func countAlertReferences(alerts []model.WeatherAlert) int {
|
||||||
|
total := 0
|
||||||
|
for _, a := range alerts {
|
||||||
|
total += len(a.References)
|
||||||
|
}
|
||||||
|
return total
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableString(s string) any {
|
||||||
|
if strings.TrimSpace(s) == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableFloat64(v *float64) any {
|
||||||
|
if v == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return *v
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableBool(v *bool) any {
|
||||||
|
if v == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return *v
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullableTime(v *time.Time) any {
|
||||||
|
if v == nil || v.IsZero() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return v.UTC()
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactJSONText(v any) (any, error) {
|
||||||
|
if v == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
b, err := json.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if string(b) == "null" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return string(b), nil
|
||||||
|
}
|
||||||
247
internal/sinks/postgres/map_test.go
Normal file
247
internal/sinks/postgres/map_test.go
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
package postgres
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
fkevent "gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksinks "gitea.maximumdirect.net/ejr/feedkit/sinks"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/model"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMapPostgresEventObservationStructPayload(t *testing.T) {
|
||||||
|
isDay := true
|
||||||
|
temp := 21.5
|
||||||
|
obs := model.WeatherObservation{
|
||||||
|
StationID: "KSTL",
|
||||||
|
StationName: "St. Louis",
|
||||||
|
Timestamp: time.Date(2026, 3, 16, 19, 0, 0, 0, time.UTC),
|
||||||
|
ConditionCode: model.WMOCode(1),
|
||||||
|
IsDay: &isDay,
|
||||||
|
TextDescription: "few clouds",
|
||||||
|
TemperatureC: &temp,
|
||||||
|
PresentWeather: []model.PresentWeather{{Raw: map[string]any{"a": 1, "b": "x"}}},
|
||||||
|
}
|
||||||
|
|
||||||
|
writes, err := mapPostgresEvent(context.Background(), testEvent(standards.SchemaWeatherObservationV1, "observation", obs))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("mapPostgresEvent() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(writes) != 2 {
|
||||||
|
t.Fatalf("mapPostgresEvent() writes len = %d, want 2", len(writes))
|
||||||
|
}
|
||||||
|
if writes[0].Table != tableObservations {
|
||||||
|
t.Fatalf("writes[0].Table = %q, want %q", writes[0].Table, tableObservations)
|
||||||
|
}
|
||||||
|
if got := writes[0].Values["station_id"]; got != "KSTL" {
|
||||||
|
t.Fatalf("observations station_id = %#v, want KSTL", got)
|
||||||
|
}
|
||||||
|
if writes[1].Table != tableObservationPresentWeather {
|
||||||
|
t.Fatalf("writes[1].Table = %q, want %q", writes[1].Table, tableObservationPresentWeather)
|
||||||
|
}
|
||||||
|
if got := writes[1].Values["raw_text"]; got != `{"a":1,"b":"x"}` {
|
||||||
|
t.Fatalf("present_weather raw_text = %#v, want compact JSON", got)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertAllWritesIncludeAllColumns(t, writes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapPostgresEventForecastStructPayload(t *testing.T) {
|
||||||
|
isDay := true
|
||||||
|
temp := 10.5
|
||||||
|
run := model.WeatherForecastRun{
|
||||||
|
LocationID: "LOC-1",
|
||||||
|
LocationName: "St. Louis",
|
||||||
|
IssuedAt: time.Date(2026, 3, 16, 18, 0, 0, 0, time.UTC),
|
||||||
|
Product: model.ForecastProductHourly,
|
||||||
|
Periods: []model.WeatherForecastPeriod{
|
||||||
|
{
|
||||||
|
StartTime: time.Date(2026, 3, 16, 19, 0, 0, 0, time.UTC),
|
||||||
|
EndTime: time.Date(2026, 3, 16, 20, 0, 0, 0, time.UTC),
|
||||||
|
IsDay: &isDay,
|
||||||
|
ConditionCode: model.WMOCode(2),
|
||||||
|
TemperatureC: &temp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
StartTime: time.Date(2026, 3, 16, 20, 0, 0, 0, time.UTC),
|
||||||
|
EndTime: time.Date(2026, 3, 16, 21, 0, 0, 0, time.UTC),
|
||||||
|
ConditionCode: model.WMOCode(3),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writes, err := mapPostgresEvent(context.Background(), testEvent(standards.SchemaWeatherForecastV1, "forecast", run))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("mapPostgresEvent() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(writes) != 3 {
|
||||||
|
t.Fatalf("mapPostgresEvent() writes len = %d, want 3", len(writes))
|
||||||
|
}
|
||||||
|
|
||||||
|
if writes[0].Table != tableForecasts {
|
||||||
|
t.Fatalf("writes[0].Table = %q, want %q", writes[0].Table, tableForecasts)
|
||||||
|
}
|
||||||
|
if got := writes[0].Values["period_count"]; got != 2 {
|
||||||
|
t.Fatalf("forecasts period_count = %#v, want 2", got)
|
||||||
|
}
|
||||||
|
if writes[1].Table != tableForecastPeriods || writes[2].Table != tableForecastPeriods {
|
||||||
|
t.Fatalf("forecast period writes not in expected order")
|
||||||
|
}
|
||||||
|
if got := writes[1].Values["period_index"]; got != 0 {
|
||||||
|
t.Fatalf("first period index = %#v, want 0", got)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertAllWritesIncludeAllColumns(t, writes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapPostgresEventAlertStructPayload(t *testing.T) {
|
||||||
|
sent := time.Date(2026, 3, 16, 17, 0, 0, 0, time.UTC)
|
||||||
|
run := model.WeatherAlertRun{
|
||||||
|
AsOf: time.Date(2026, 3, 16, 18, 0, 0, 0, time.UTC),
|
||||||
|
Alerts: []model.WeatherAlert{
|
||||||
|
{
|
||||||
|
ID: "urn:alert:1",
|
||||||
|
Headline: "Winter Weather Advisory",
|
||||||
|
Severity: "Moderate",
|
||||||
|
References: []model.AlertReference{
|
||||||
|
{ID: "urn:ref:1", Sent: &sent},
|
||||||
|
{Identifier: "ref-two"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ID: "urn:alert:2",
|
||||||
|
Headline: "Second alert",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writes, err := mapPostgresEvent(context.Background(), testEvent(standards.SchemaWeatherAlertV1, "alert", run))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("mapPostgresEvent() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(writes) != 5 {
|
||||||
|
t.Fatalf("mapPostgresEvent() writes len = %d, want 5", len(writes))
|
||||||
|
}
|
||||||
|
|
||||||
|
counts := map[string]int{}
|
||||||
|
for _, w := range writes {
|
||||||
|
counts[w.Table]++
|
||||||
|
}
|
||||||
|
if counts[tableAlertRuns] != 1 || counts[tableAlerts] != 2 || counts[tableAlertReferences] != 2 {
|
||||||
|
t.Fatalf("unexpected table write counts: %#v", counts)
|
||||||
|
}
|
||||||
|
|
||||||
|
firstAlert, ok := firstWriteForTable(writes, tableAlerts)
|
||||||
|
if !ok {
|
||||||
|
t.Fatalf("missing alerts write")
|
||||||
|
}
|
||||||
|
if got := firstAlert.Values["reference_count"]; got != 2 {
|
||||||
|
t.Fatalf("alerts reference_count = %#v, want 2", got)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertAllWritesIncludeAllColumns(t, writes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapPostgresEventMapPayload(t *testing.T) {
|
||||||
|
run := model.WeatherForecastRun{
|
||||||
|
IssuedAt: time.Date(2026, 3, 16, 18, 0, 0, 0, time.UTC),
|
||||||
|
Product: model.ForecastProductHourly,
|
||||||
|
Periods: []model.WeatherForecastPeriod{
|
||||||
|
{
|
||||||
|
StartTime: time.Date(2026, 3, 16, 19, 0, 0, 0, time.UTC),
|
||||||
|
EndTime: time.Date(2026, 3, 16, 20, 0, 0, 0, time.UTC),
|
||||||
|
ConditionCode: model.WMOCode(2),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
b, err := json.Marshal(run)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("json.Marshal() error = %v", err)
|
||||||
|
}
|
||||||
|
var payload map[string]any
|
||||||
|
if err := json.Unmarshal(b, &payload); err != nil {
|
||||||
|
t.Fatalf("json.Unmarshal() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
writes, err := mapPostgresEvent(context.Background(), testEvent(standards.SchemaWeatherForecastV1, "forecast", payload))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("mapPostgresEvent() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(writes) != 2 {
|
||||||
|
t.Fatalf("mapPostgresEvent() writes len = %d, want 2", len(writes))
|
||||||
|
}
|
||||||
|
if writes[0].Table != tableForecasts {
|
||||||
|
t.Fatalf("writes[0].Table = %q, want %q", writes[0].Table, tableForecasts)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertAllWritesIncludeAllColumns(t, writes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapPostgresEventUnknownSchemaNoOp(t *testing.T) {
|
||||||
|
writes, err := mapPostgresEvent(context.Background(), testEvent("weather.unknown.v1", "observation", map[string]any{"x": 1}))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("mapPostgresEvent() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(writes) != 0 {
|
||||||
|
t.Fatalf("mapPostgresEvent() writes len = %d, want 0", len(writes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapPostgresEventMalformedPayload(t *testing.T) {
|
||||||
|
_, err := mapPostgresEvent(context.Background(), testEvent(standards.SchemaWeatherForecastV1, "forecast", "bad"))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("mapPostgresEvent() expected error for malformed payload")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), "decode forecast payload") {
|
||||||
|
t.Fatalf("error = %q, want decode forecast payload context", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func testEvent(schema string, kind fkevent.Kind, payload any) fkevent.Event {
|
||||||
|
effectiveAt := time.Date(2026, 3, 16, 18, 30, 0, 0, time.UTC)
|
||||||
|
return fkevent.Event{
|
||||||
|
ID: "evt-1",
|
||||||
|
Kind: kind,
|
||||||
|
Source: "test-source",
|
||||||
|
Schema: schema,
|
||||||
|
EmittedAt: time.Date(2026, 3, 16, 18, 31, 0, 0, time.UTC),
|
||||||
|
EffectiveAt: &effectiveAt,
|
||||||
|
Payload: payload,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func firstWriteForTable(writes []fksinks.PostgresWrite, table string) (fksinks.PostgresWrite, bool) {
|
||||||
|
for _, w := range writes {
|
||||||
|
if w.Table == table {
|
||||||
|
return w, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fksinks.PostgresWrite{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertAllWritesIncludeAllColumns(t *testing.T, writes []fksinks.PostgresWrite) {
|
||||||
|
t.Helper()
|
||||||
|
colCounts := tableColumnCounts()
|
||||||
|
for i, w := range writes {
|
||||||
|
expectedCount, ok := colCounts[w.Table]
|
||||||
|
if !ok {
|
||||||
|
t.Fatalf("writes[%d] references unknown table %q", i, w.Table)
|
||||||
|
}
|
||||||
|
if len(w.Values) != expectedCount {
|
||||||
|
t.Fatalf("writes[%d] table=%q has %d values, want %d", i, w.Table, len(w.Values), expectedCount)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func tableColumnCounts() map[string]int {
|
||||||
|
s := weatherPostgresSchema()
|
||||||
|
m := make(map[string]int, len(s.Tables))
|
||||||
|
for _, tbl := range s.Tables {
|
||||||
|
m[tbl.Name] = len(tbl.Columns)
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
238
internal/sinks/postgres/schema.go
Normal file
238
internal/sinks/postgres/schema.go
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
package postgres
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
|
fksinks "gitea.maximumdirect.net/ejr/feedkit/sinks"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
tableObservations = "observations"
|
||||||
|
tableObservationPresentWeather = "observation_present_weather"
|
||||||
|
tableForecasts = "forecasts"
|
||||||
|
tableForecastPeriods = "forecast_periods"
|
||||||
|
tableAlertRuns = "alert_runs"
|
||||||
|
tableAlerts = "alerts"
|
||||||
|
tableAlertReferences = "alert_references"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RegisterPostgresSchemas registers weatherfeeder's Postgres schema for each
|
||||||
|
// configured sink using driver=postgres.
|
||||||
|
func RegisterPostgresSchemas(cfg *config.Config) error {
|
||||||
|
if cfg == nil {
|
||||||
|
return fmt.Errorf("register postgres schemas: config is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
schema := weatherPostgresSchema()
|
||||||
|
for i, sk := range cfg.Sinks {
|
||||||
|
if !isPostgresDriver(sk.Driver) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := fksinks.RegisterPostgresSchema(sk.Name, schema); err != nil {
|
||||||
|
return fmt.Errorf("register postgres schema for sinks[%d] name=%q: %w", i, sk.Name, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isPostgresDriver(driver string) bool {
|
||||||
|
return strings.EqualFold(strings.TrimSpace(driver), "postgres")
|
||||||
|
}
|
||||||
|
|
||||||
|
func weatherPostgresSchema() fksinks.PostgresSchema {
|
||||||
|
return fksinks.PostgresSchema{
|
||||||
|
Tables: []fksinks.PostgresTable{
|
||||||
|
{
|
||||||
|
Name: tableObservations,
|
||||||
|
Columns: []fksinks.PostgresColumn{
|
||||||
|
{Name: "event_id", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_kind", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_source", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_schema", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_emitted_at", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "event_effective_at", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "station_id", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "station_name", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "observed_at", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "condition_code", Type: "INTEGER", Nullable: false},
|
||||||
|
{Name: "is_day", Type: "BOOLEAN", Nullable: true},
|
||||||
|
{Name: "text_description", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "temperature_c", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "dewpoint_c", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "wind_direction_degrees", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "wind_speed_kmh", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "wind_gust_kmh", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "barometric_pressure_pa", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "visibility_meters", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "relative_humidity_percent", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "apparent_temperature_c", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
},
|
||||||
|
PrimaryKey: []string{"event_id"},
|
||||||
|
PruneColumn: "observed_at",
|
||||||
|
Indexes: []fksinks.PostgresIndex{
|
||||||
|
{Name: "idx_wf_obs_station_observed_at", Columns: []string{"station_id", "observed_at"}},
|
||||||
|
{Name: "idx_wf_obs_observed_at", Columns: []string{"observed_at"}},
|
||||||
|
{Name: "idx_wf_obs_condition_code", Columns: []string{"condition_code"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: tableObservationPresentWeather,
|
||||||
|
Columns: []fksinks.PostgresColumn{
|
||||||
|
{Name: "event_id", Type: "TEXT REFERENCES observations(event_id) ON DELETE CASCADE", Nullable: false},
|
||||||
|
{Name: "weather_index", Type: "INTEGER", Nullable: false},
|
||||||
|
{Name: "observed_at", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "raw_text", Type: "TEXT", Nullable: true},
|
||||||
|
},
|
||||||
|
PrimaryKey: []string{"event_id", "weather_index"},
|
||||||
|
PruneColumn: "observed_at",
|
||||||
|
Indexes: []fksinks.PostgresIndex{
|
||||||
|
{Name: "idx_wf_obs_present_observed_at", Columns: []string{"observed_at"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: tableForecasts,
|
||||||
|
Columns: []fksinks.PostgresColumn{
|
||||||
|
{Name: "event_id", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_kind", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_source", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_schema", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_emitted_at", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "event_effective_at", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "location_id", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "location_name", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "issued_at", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "updated_at", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "product", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "latitude", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "longitude", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "elevation_meters", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "period_count", Type: "INTEGER", Nullable: false},
|
||||||
|
},
|
||||||
|
PrimaryKey: []string{"event_id"},
|
||||||
|
PruneColumn: "issued_at",
|
||||||
|
Indexes: []fksinks.PostgresIndex{
|
||||||
|
{Name: "idx_wf_fc_location_product_issued_at", Columns: []string{"location_id", "product", "issued_at"}},
|
||||||
|
{Name: "idx_wf_fc_issued_at", Columns: []string{"issued_at"}},
|
||||||
|
{Name: "idx_wf_fc_product_issued_at", Columns: []string{"product", "issued_at"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: tableForecastPeriods,
|
||||||
|
Columns: []fksinks.PostgresColumn{
|
||||||
|
{Name: "run_event_id", Type: "TEXT REFERENCES forecasts(event_id) ON DELETE CASCADE", Nullable: false},
|
||||||
|
{Name: "period_index", Type: "INTEGER", Nullable: false},
|
||||||
|
{Name: "issued_at", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "start_time", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "end_time", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "name", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "is_day", Type: "BOOLEAN", Nullable: true},
|
||||||
|
{Name: "condition_code", Type: "INTEGER", Nullable: false},
|
||||||
|
{Name: "text_description", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "temperature_c", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "temperature_c_min", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "temperature_c_max", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "dewpoint_c", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "relative_humidity_percent", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "wind_direction_degrees", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "wind_speed_kmh", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "wind_gust_kmh", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "barometric_pressure_pa", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "visibility_meters", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "apparent_temperature_c", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "cloud_cover_percent", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "probability_of_precipitation_percent", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "precipitation_amount_mm", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "snowfall_depth_mm", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "uv_index", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
},
|
||||||
|
PrimaryKey: []string{"run_event_id", "period_index"},
|
||||||
|
PruneColumn: "issued_at",
|
||||||
|
Indexes: []fksinks.PostgresIndex{
|
||||||
|
{Name: "idx_wf_fc_period_start_time", Columns: []string{"start_time"}},
|
||||||
|
{Name: "idx_wf_fc_period_end_time", Columns: []string{"end_time"}},
|
||||||
|
{Name: "idx_wf_fc_period_run_start", Columns: []string{"run_event_id", "start_time"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: tableAlertRuns,
|
||||||
|
Columns: []fksinks.PostgresColumn{
|
||||||
|
{Name: "event_id", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_kind", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_source", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_schema", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event_emitted_at", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "event_effective_at", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "location_id", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "location_name", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "as_of", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "latitude", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "longitude", Type: "DOUBLE PRECISION", Nullable: true},
|
||||||
|
{Name: "alert_count", Type: "INTEGER", Nullable: false},
|
||||||
|
},
|
||||||
|
PrimaryKey: []string{"event_id"},
|
||||||
|
PruneColumn: "as_of",
|
||||||
|
Indexes: []fksinks.PostgresIndex{
|
||||||
|
{Name: "idx_wf_alert_run_location_as_of", Columns: []string{"location_id", "as_of"}},
|
||||||
|
{Name: "idx_wf_alert_run_as_of", Columns: []string{"as_of"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: tableAlerts,
|
||||||
|
Columns: []fksinks.PostgresColumn{
|
||||||
|
{Name: "run_event_id", Type: "TEXT REFERENCES alert_runs(event_id) ON DELETE CASCADE", Nullable: false},
|
||||||
|
{Name: "alert_index", Type: "INTEGER", Nullable: false},
|
||||||
|
{Name: "as_of", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "alert_id", Type: "TEXT", Nullable: false},
|
||||||
|
{Name: "event", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "headline", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "severity", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "urgency", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "certainty", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "status", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "message_type", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "category", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "response", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "description", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "instruction", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "sent", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "effective", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "onset", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "expires", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
{Name: "area_description", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "sender_name", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "reference_count", Type: "INTEGER", Nullable: false},
|
||||||
|
},
|
||||||
|
PrimaryKey: []string{"run_event_id", "alert_index"},
|
||||||
|
PruneColumn: "as_of",
|
||||||
|
Indexes: []fksinks.PostgresIndex{
|
||||||
|
{Name: "idx_wf_alerts_alert_id", Columns: []string{"alert_id"}},
|
||||||
|
{Name: "idx_wf_alerts_severity_expires", Columns: []string{"severity", "expires"}},
|
||||||
|
{Name: "idx_wf_alerts_as_of", Columns: []string{"as_of"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: tableAlertReferences,
|
||||||
|
Columns: []fksinks.PostgresColumn{
|
||||||
|
{Name: "run_event_id", Type: "TEXT REFERENCES alert_runs(event_id) ON DELETE CASCADE", Nullable: false},
|
||||||
|
{Name: "alert_index", Type: "INTEGER", Nullable: false},
|
||||||
|
{Name: "reference_index", Type: "INTEGER", Nullable: false},
|
||||||
|
{Name: "as_of", Type: "TIMESTAMPTZ", Nullable: false},
|
||||||
|
{Name: "id", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "identifier", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "sender", Type: "TEXT", Nullable: true},
|
||||||
|
{Name: "sent", Type: "TIMESTAMPTZ", Nullable: true},
|
||||||
|
},
|
||||||
|
PrimaryKey: []string{"run_event_id", "alert_index", "reference_index"},
|
||||||
|
PruneColumn: "as_of",
|
||||||
|
Indexes: []fksinks.PostgresIndex{
|
||||||
|
{Name: "idx_wf_alert_refs_as_of", Columns: []string{"as_of"}},
|
||||||
|
{Name: "idx_wf_alert_refs_sent", Columns: []string{"sent"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MapEvent: mapPostgresEvent,
|
||||||
|
}
|
||||||
|
}
|
||||||
95
internal/sinks/postgres/schema_test.go
Normal file
95
internal/sinks/postgres/schema_test.go
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
package postgres
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRegisterPostgresSchemasNilConfig(t *testing.T) {
|
||||||
|
err := RegisterPostgresSchemas(nil)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("RegisterPostgresSchemas(nil) expected error")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), "config is nil") {
|
||||||
|
t.Fatalf("error = %q, want config is nil", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRegisterPostgresSchemasNonPostgresNoOp(t *testing.T) {
|
||||||
|
cfg := &config.Config{
|
||||||
|
Sinks: []config.SinkConfig{
|
||||||
|
{Name: "stdout_only", Driver: "stdout"},
|
||||||
|
{Name: "nats_only", Driver: "nats"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := RegisterPostgresSchemas(cfg); err != nil {
|
||||||
|
t.Fatalf("RegisterPostgresSchemas(non-postgres) error = %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRegisterPostgresSchemasDuplicateRegistrationFails(t *testing.T) {
|
||||||
|
sinkName := uniqueSinkName("pg_test")
|
||||||
|
cfg := &config.Config{
|
||||||
|
Sinks: []config.SinkConfig{
|
||||||
|
{Name: sinkName, Driver: "postgres"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := RegisterPostgresSchemas(cfg); err != nil {
|
||||||
|
t.Fatalf("first RegisterPostgresSchemas() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err := RegisterPostgresSchemas(cfg)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("second RegisterPostgresSchemas() expected duplicate error")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), "already registered") {
|
||||||
|
t.Fatalf("error = %q, want already registered", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWeatherPostgresSchemaShape(t *testing.T) {
|
||||||
|
s := weatherPostgresSchema()
|
||||||
|
if s.MapEvent == nil {
|
||||||
|
t.Fatalf("weatherPostgresSchema().MapEvent is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
wantTables := map[string]bool{
|
||||||
|
tableObservations: true,
|
||||||
|
tableObservationPresentWeather: true,
|
||||||
|
tableForecasts: true,
|
||||||
|
tableForecastPeriods: true,
|
||||||
|
tableAlertRuns: true,
|
||||||
|
tableAlerts: true,
|
||||||
|
tableAlertReferences: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(s.Tables) != len(wantTables) {
|
||||||
|
t.Fatalf("weatherPostgresSchema().Tables len = %d, want %d", len(s.Tables), len(wantTables))
|
||||||
|
}
|
||||||
|
|
||||||
|
seenIndexes := map[string]bool{}
|
||||||
|
for _, tbl := range s.Tables {
|
||||||
|
if !wantTables[tbl.Name] {
|
||||||
|
t.Fatalf("unexpected table %q in schema", tbl.Name)
|
||||||
|
}
|
||||||
|
if tbl.PruneColumn == "" {
|
||||||
|
t.Fatalf("table %q missing prune column", tbl.Name)
|
||||||
|
}
|
||||||
|
for _, idx := range tbl.Indexes {
|
||||||
|
if seenIndexes[idx.Name] {
|
||||||
|
t.Fatalf("duplicate index name %q", idx.Name)
|
||||||
|
}
|
||||||
|
seenIndexes[idx.Name] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func uniqueSinkName(prefix string) string {
|
||||||
|
return fmt.Sprintf("%s_%d", prefix, time.Now().UnixNano())
|
||||||
|
}
|
||||||
@@ -19,8 +19,11 @@ func RegisterBuiltins(r *fksource.Registry) {
|
|||||||
r.RegisterPoll("nws_alerts", func(cfg config.SourceConfig) (fksource.PollSource, error) {
|
r.RegisterPoll("nws_alerts", func(cfg config.SourceConfig) (fksource.PollSource, error) {
|
||||||
return nws.NewAlertsSource(cfg)
|
return nws.NewAlertsSource(cfg)
|
||||||
})
|
})
|
||||||
r.RegisterPoll("nws_forecast", func(cfg config.SourceConfig) (fksource.PollSource, error) {
|
r.RegisterPoll("nws_forecast_hourly", func(cfg config.SourceConfig) (fksource.PollSource, error) {
|
||||||
return nws.NewForecastSource(cfg)
|
return nws.NewHourlyForecastSource(cfg)
|
||||||
|
})
|
||||||
|
r.RegisterPoll("nws_forecast_narrative", func(cfg config.SourceConfig) (fksource.PollSource, error) {
|
||||||
|
return nws.NewNarrativeForecastSource(cfg)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Open-Meteo drivers
|
// Open-Meteo drivers
|
||||||
|
|||||||
60
internal/sources/builtins_test.go
Normal file
60
internal/sources/builtins_test.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
package sources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
|
fksource "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRegisterBuiltinsRegistersNWSHourlyForecastDriver(t *testing.T) {
|
||||||
|
reg := fksource.NewRegistry()
|
||||||
|
RegisterBuiltins(reg)
|
||||||
|
|
||||||
|
in, err := reg.BuildInput(sourceConfigForDriver("nws_forecast_hourly"))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("BuildInput(nws_forecast_hourly) error = %v", err)
|
||||||
|
}
|
||||||
|
if _, ok := in.(fksource.PollSource); !ok {
|
||||||
|
t.Fatalf("BuildInput(nws_forecast_hourly) type = %T, want PollSource", in)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRegisterBuiltinsRegistersNWSNarrativeForecastDriver(t *testing.T) {
|
||||||
|
reg := fksource.NewRegistry()
|
||||||
|
RegisterBuiltins(reg)
|
||||||
|
|
||||||
|
in, err := reg.BuildInput(sourceConfigForDriver("nws_forecast_narrative"))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("BuildInput(nws_forecast_narrative) error = %v", err)
|
||||||
|
}
|
||||||
|
if _, ok := in.(fksource.PollSource); !ok {
|
||||||
|
t.Fatalf("BuildInput(nws_forecast_narrative) type = %T, want PollSource", in)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRegisterBuiltinsDoesNotRegisterLegacyNWSForecastDriver(t *testing.T) {
|
||||||
|
reg := fksource.NewRegistry()
|
||||||
|
RegisterBuiltins(reg)
|
||||||
|
|
||||||
|
_, err := reg.BuildInput(sourceConfigForDriver("nws_forecast"))
|
||||||
|
if err == nil {
|
||||||
|
t.Fatalf("BuildInput(nws_forecast) expected unknown driver error")
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), `unknown source driver: "nws_forecast"`) {
|
||||||
|
t.Fatalf("error = %q, want unknown source driver for nws_forecast", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func sourceConfigForDriver(driver string) config.SourceConfig {
|
||||||
|
return config.SourceConfig{
|
||||||
|
Name: "test-source",
|
||||||
|
Driver: driver,
|
||||||
|
Mode: config.SourceModePoll,
|
||||||
|
Params: map[string]any{
|
||||||
|
"url": "https://example.invalid",
|
||||||
|
"user_agent": "test-agent",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
// FILE: ./internal/sources/common/config.go
|
|
||||||
package common
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
// This file centralizes small, boring config-validation patterns shared across
|
|
||||||
// weatherfeeder source drivers.
|
|
||||||
//
|
|
||||||
// Goal: keep driver constructors (New*Source) easy to read and consistent, while
|
|
||||||
// keeping driver-specific options in cfg.Params (feedkit remains domain-agnostic).
|
|
||||||
|
|
||||||
// HTTPSourceConfig is the standard "HTTP-polling source" config shape used across drivers.
|
|
||||||
type HTTPSourceConfig struct {
|
|
||||||
Name string
|
|
||||||
URL string
|
|
||||||
UserAgent string
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequireHTTPSourceConfig enforces weatherfeeder's standard HTTP source config:
|
|
||||||
//
|
|
||||||
// - cfg.Name must be present
|
|
||||||
// - cfg.Params must be present
|
|
||||||
// - params.url must be present (accepts "url" or "URL")
|
|
||||||
// - params.user_agent must be present (accepts "user_agent" or "userAgent")
|
|
||||||
//
|
|
||||||
// We intentionally require a User-Agent for *all* sources, even when upstreams
|
|
||||||
// do not strictly require one. This keeps config uniform across providers.
|
|
||||||
func RequireHTTPSourceConfig(driver string, cfg config.SourceConfig) (HTTPSourceConfig, error) {
|
|
||||||
if strings.TrimSpace(cfg.Name) == "" {
|
|
||||||
return HTTPSourceConfig{}, fmt.Errorf("%s: name is required", driver)
|
|
||||||
}
|
|
||||||
if cfg.Params == nil {
|
|
||||||
return HTTPSourceConfig{}, fmt.Errorf("%s %q: params are required (need params.url and params.user_agent)", driver, cfg.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
url, ok := cfg.ParamString("url", "URL")
|
|
||||||
if !ok {
|
|
||||||
return HTTPSourceConfig{}, fmt.Errorf("%s %q: params.url is required", driver, cfg.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
ua, ok := cfg.ParamString("user_agent", "userAgent")
|
|
||||||
if !ok {
|
|
||||||
return HTTPSourceConfig{}, fmt.Errorf("%s %q: params.user_agent is required", driver, cfg.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
return HTTPSourceConfig{
|
|
||||||
Name: cfg.Name,
|
|
||||||
URL: url,
|
|
||||||
UserAgent: ua,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- The helpers below remain useful for future drivers; they are not required
|
|
||||||
// --- by the observation sources after adopting RequireHTTPSourceConfig.
|
|
||||||
|
|
||||||
// RequireName ensures cfg.Name is present and non-whitespace.
|
|
||||||
func RequireName(driver string, cfg config.SourceConfig) error {
|
|
||||||
if strings.TrimSpace(cfg.Name) == "" {
|
|
||||||
return fmt.Errorf("%s: name is required", driver)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequireParams ensures cfg.Params is non-nil. The "want" string should be a short
|
|
||||||
// description of required keys, e.g. "need params.url and params.user_agent".
|
|
||||||
func RequireParams(driver string, cfg config.SourceConfig, want string) error {
|
|
||||||
if cfg.Params == nil {
|
|
||||||
return fmt.Errorf("%s %q: params are required (%s)", driver, cfg.Name, want)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequireURL returns the configured URL for a source.
|
|
||||||
// Canonical key is "url"; we also accept "URL" as a convenience.
|
|
||||||
func RequireURL(driver string, cfg config.SourceConfig) (string, error) {
|
|
||||||
if cfg.Params == nil {
|
|
||||||
return "", fmt.Errorf("%s %q: params are required (need params.url)", driver, cfg.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
u, ok := cfg.ParamString("url", "URL")
|
|
||||||
if !ok {
|
|
||||||
return "", fmt.Errorf("%s %q: params.url is required", driver, cfg.Name)
|
|
||||||
}
|
|
||||||
return u, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequireUserAgent returns the configured User-Agent for a source.
|
|
||||||
// Canonical key is "user_agent"; we also accept "userAgent" as a convenience.
|
|
||||||
func RequireUserAgent(driver string, cfg config.SourceConfig) (string, error) {
|
|
||||||
if cfg.Params == nil {
|
|
||||||
return "", fmt.Errorf("%s %q: params are required (need params.user_agent)", driver, cfg.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
ua, ok := cfg.ParamString("user_agent", "userAgent")
|
|
||||||
if !ok {
|
|
||||||
return "", fmt.Errorf("%s %q: params.user_agent is required", driver, cfg.Name)
|
|
||||||
}
|
|
||||||
return ua, nil
|
|
||||||
}
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
// FILE: ./internal/sources/common/http_source.go
|
|
||||||
package common
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/transport"
|
|
||||||
)
|
|
||||||
|
|
||||||
// HTTPSource is a tiny, reusable "HTTP polling spine" for weatherfeeder sources.
|
|
||||||
//
|
|
||||||
// It centralizes the boring parts:
|
|
||||||
// - standard config shape (url + user_agent) via RequireHTTPSourceConfig
|
|
||||||
// - a default http.Client with timeout
|
|
||||||
// - FetchBody / headers / max-body safety limit
|
|
||||||
// - consistent error wrapping (driver + source name)
|
|
||||||
//
|
|
||||||
// Individual drivers remain responsible for:
|
|
||||||
// - decoding minimal metadata (for Event.ID / EffectiveAt)
|
|
||||||
// - constructing the event envelope (kind/schema/payload)
|
|
||||||
type HTTPSource struct {
|
|
||||||
Driver string
|
|
||||||
Name string
|
|
||||||
URL string
|
|
||||||
UserAgent string
|
|
||||||
Accept string
|
|
||||||
Client *http.Client
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewHTTPSource builds an HTTPSource using weatherfeeder's standard HTTP source
|
|
||||||
// config (params.url + params.user_agent) and a default HTTP client.
|
|
||||||
func NewHTTPSource(driver string, cfg config.SourceConfig, accept string) (*HTTPSource, error) {
|
|
||||||
c, err := RequireHTTPSourceConfig(driver, cfg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &HTTPSource{
|
|
||||||
Driver: driver,
|
|
||||||
Name: c.Name,
|
|
||||||
URL: c.URL,
|
|
||||||
UserAgent: c.UserAgent,
|
|
||||||
Accept: accept,
|
|
||||||
Client: transport.NewHTTPClient(transport.DefaultHTTPTimeout),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// FetchBytes fetches the URL and returns the raw response body bytes.
|
|
||||||
func (s *HTTPSource) FetchBytes(ctx context.Context) ([]byte, error) {
|
|
||||||
client := s.Client
|
|
||||||
if client == nil {
|
|
||||||
// Defensive: allow tests or callers to nil out Client; keep behavior sane.
|
|
||||||
client = transport.NewHTTPClient(transport.DefaultHTTPTimeout)
|
|
||||||
}
|
|
||||||
|
|
||||||
b, err := transport.FetchBody(ctx, client, s.URL, s.UserAgent, s.Accept)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("%s %q: %w", s.Driver, s.Name, err)
|
|
||||||
}
|
|
||||||
return b, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// FetchJSON fetches the URL and returns the raw body as json.RawMessage.
|
|
||||||
// json.Unmarshal accepts json.RawMessage directly, so callers can decode minimal
|
|
||||||
// metadata without keeping both []byte and RawMessage in their own structs.
|
|
||||||
func (s *HTTPSource) FetchJSON(ctx context.Context) (json.RawMessage, error) {
|
|
||||||
b, err := s.FetchBytes(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return json.RawMessage(b), nil
|
|
||||||
}
|
|
||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/event"
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
nwscommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/nws"
|
nwscommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/nws"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
@@ -22,14 +23,14 @@ import (
|
|||||||
// Output schema:
|
// Output schema:
|
||||||
// - standards.SchemaRawNWSAlertsV1
|
// - standards.SchemaRawNWSAlertsV1
|
||||||
type AlertsSource struct {
|
type AlertsSource struct {
|
||||||
http *common.HTTPSource
|
http *fksources.HTTPSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewAlertsSource(cfg config.SourceConfig) (*AlertsSource, error) {
|
func NewAlertsSource(cfg config.SourceConfig) (*AlertsSource, error) {
|
||||||
const driver = "nws_alerts"
|
const driver = "nws_alerts"
|
||||||
|
|
||||||
// NWS alerts responses are GeoJSON-ish; allow fallback to plain JSON as well.
|
// NWS alerts responses are GeoJSON-ish; allow fallback to plain JSON as well.
|
||||||
hs, err := common.NewHTTPSource(driver, cfg, "application/geo+json, application/json")
|
hs, err := fksources.NewHTTPSource(driver, cfg, "application/geo+json, application/json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -43,10 +44,13 @@ func (s *AlertsSource) Name() string { return s.http.Name }
|
|||||||
func (s *AlertsSource) Kind() event.Kind { return event.Kind("alert") }
|
func (s *AlertsSource) Kind() event.Kind { return event.Kind("alert") }
|
||||||
|
|
||||||
func (s *AlertsSource) Poll(ctx context.Context) ([]event.Event, error) {
|
func (s *AlertsSource) Poll(ctx context.Context) ([]event.Event, error) {
|
||||||
raw, meta, err := s.fetchRaw(ctx)
|
raw, meta, changed, err := s.fetchRaw(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
// EffectiveAt policy for alerts:
|
// EffectiveAt policy for alerts:
|
||||||
// Prefer the collection-level "updated" timestamp (best dedupe signal).
|
// Prefer the collection-level "updated" timestamp (best dedupe signal).
|
||||||
@@ -97,16 +101,19 @@ type alertsMeta struct {
|
|||||||
ParsedLatestFeatureTime time.Time `json:"-"`
|
ParsedLatestFeatureTime time.Time `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *AlertsSource) fetchRaw(ctx context.Context) (json.RawMessage, alertsMeta, error) {
|
func (s *AlertsSource) fetchRaw(ctx context.Context) (json.RawMessage, alertsMeta, bool, error) {
|
||||||
raw, err := s.http.FetchJSON(ctx)
|
raw, changed, err := s.http.FetchJSONIfChanged(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, alertsMeta{}, err
|
return nil, alertsMeta{}, false, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, alertsMeta{}, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var meta alertsMeta
|
var meta alertsMeta
|
||||||
if err := json.Unmarshal(raw, &meta); err != nil {
|
if err := json.Unmarshal(raw, &meta); err != nil {
|
||||||
// If metadata decode fails, still return raw; Poll will fall back to Source:EmittedAt.
|
// If metadata decode fails, still return raw; Poll will fall back to Source:EmittedAt.
|
||||||
return raw, alertsMeta{}, nil
|
return raw, alertsMeta{}, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Top-level updated (preferred).
|
// Top-level updated (preferred).
|
||||||
@@ -143,5 +150,5 @@ func (s *AlertsSource) fetchRaw(ctx context.Context) (json.RawMessage, alertsMet
|
|||||||
}
|
}
|
||||||
meta.ParsedLatestFeatureTime = latest
|
meta.ParsedLatestFeatureTime = latest
|
||||||
|
|
||||||
return raw, meta, nil
|
return raw, meta, true, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// FILE: internal/sources/nws/forecast.go
|
// FILE: internal/sources/nws/forecast_hourly.go
|
||||||
package nws
|
package nws
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@@ -9,44 +9,48 @@ import (
|
|||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/event"
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
nwscommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/nws"
|
nwscommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/nws"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ForecastSource polls an NWS forecast endpoint (narrative or hourly) and emits a RAW forecast Event.
|
// HourlyForecastSource polls an NWS hourly forecast endpoint and emits a RAW forecast Event.
|
||||||
//
|
//
|
||||||
// It intentionally emits the *entire* upstream payload as json.RawMessage and only decodes
|
// It intentionally emits the *entire* upstream payload as json.RawMessage and only decodes
|
||||||
// minimal metadata for Event.EffectiveAt and Event.ID.
|
// minimal metadata for Event.EffectiveAt and Event.ID.
|
||||||
//
|
//
|
||||||
// Output schema (current implementation):
|
// Output schema (current implementation):
|
||||||
// - standards.SchemaRawNWSHourlyForecastV1
|
// - standards.SchemaRawNWSHourlyForecastV1
|
||||||
type ForecastSource struct {
|
type HourlyForecastSource struct {
|
||||||
http *common.HTTPSource
|
http *fksources.HTTPSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewForecastSource(cfg config.SourceConfig) (*ForecastSource, error) {
|
func NewHourlyForecastSource(cfg config.SourceConfig) (*HourlyForecastSource, error) {
|
||||||
const driver = "nws_forecast"
|
const driver = "nws_forecast_hourly"
|
||||||
|
|
||||||
// NWS forecast endpoints are GeoJSON (and sometimes also advertise json-ld/json).
|
// NWS forecast endpoints are GeoJSON (and sometimes also advertise json-ld/json).
|
||||||
hs, err := common.NewHTTPSource(driver, cfg, "application/geo+json, application/json")
|
hs, err := fksources.NewHTTPSource(driver, cfg, "application/geo+json, application/json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ForecastSource{http: hs}, nil
|
return &HourlyForecastSource{http: hs}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ForecastSource) Name() string { return s.http.Name }
|
func (s *HourlyForecastSource) Name() string { return s.http.Name }
|
||||||
|
|
||||||
// Kind is used for routing/policy.
|
// Kind is used for routing/policy.
|
||||||
func (s *ForecastSource) Kind() event.Kind { return event.Kind("forecast") }
|
func (s *HourlyForecastSource) Kind() event.Kind { return event.Kind("forecast") }
|
||||||
|
|
||||||
func (s *ForecastSource) Poll(ctx context.Context) ([]event.Event, error) {
|
func (s *HourlyForecastSource) Poll(ctx context.Context) ([]event.Event, error) {
|
||||||
raw, meta, err := s.fetchRaw(ctx)
|
raw, meta, changed, err := s.fetchRaw(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
// EffectiveAt is optional; for forecasts it’s most naturally the run “issued” time.
|
// EffectiveAt is optional; for forecasts it’s most naturally the run “issued” time.
|
||||||
// NWS gridpoint forecasts expose generatedAt (preferred) and updateTime/updated.
|
// NWS gridpoint forecasts expose generatedAt (preferred) and updateTime/updated.
|
||||||
@@ -80,7 +84,7 @@ func (s *ForecastSource) Poll(ctx context.Context) ([]event.Event, error) {
|
|||||||
|
|
||||||
// ---- RAW fetch + minimal metadata decode ----
|
// ---- RAW fetch + minimal metadata decode ----
|
||||||
|
|
||||||
type forecastMeta struct {
|
type hourlyForecastMeta struct {
|
||||||
// Present for GeoJSON Feature responses, but often stable (endpoint URL).
|
// Present for GeoJSON Feature responses, but often stable (endpoint URL).
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
|
|
||||||
@@ -94,16 +98,19 @@ type forecastMeta struct {
|
|||||||
ParsedUpdateTime time.Time `json:"-"`
|
ParsedUpdateTime time.Time `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ForecastSource) fetchRaw(ctx context.Context) (json.RawMessage, forecastMeta, error) {
|
func (s *HourlyForecastSource) fetchRaw(ctx context.Context) (json.RawMessage, hourlyForecastMeta, bool, error) {
|
||||||
raw, err := s.http.FetchJSON(ctx)
|
raw, changed, err := s.http.FetchJSONIfChanged(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, forecastMeta{}, err
|
return nil, hourlyForecastMeta{}, false, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, hourlyForecastMeta{}, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var meta forecastMeta
|
var meta hourlyForecastMeta
|
||||||
if err := json.Unmarshal(raw, &meta); err != nil {
|
if err := json.Unmarshal(raw, &meta); err != nil {
|
||||||
// If metadata decode fails, still return raw; Poll will fall back to Source:EmittedAt.
|
// If metadata decode fails, still return raw; Poll will fall back to Source:EmittedAt.
|
||||||
return raw, forecastMeta{}, nil
|
return raw, hourlyForecastMeta{}, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// generatedAt (preferred)
|
// generatedAt (preferred)
|
||||||
@@ -125,5 +132,5 @@ func (s *ForecastSource) fetchRaw(ctx context.Context) (json.RawMessage, forecas
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return raw, meta, nil
|
return raw, meta, true, nil
|
||||||
}
|
}
|
||||||
136
internal/sources/nws/forecast_narrative.go
Normal file
136
internal/sources/nws/forecast_narrative.go
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
// FILE: internal/sources/nws/forecast_narrative.go
|
||||||
|
package nws
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
|
nwscommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/nws"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
||||||
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NarrativeForecastSource polls an NWS narrative forecast endpoint and emits a RAW forecast Event.
|
||||||
|
//
|
||||||
|
// It intentionally emits the *entire* upstream payload as json.RawMessage and only decodes
|
||||||
|
// minimal metadata for Event.EffectiveAt and Event.ID.
|
||||||
|
//
|
||||||
|
// Output schema:
|
||||||
|
// - standards.SchemaRawNWSNarrativeForecastV1
|
||||||
|
type NarrativeForecastSource struct {
|
||||||
|
http *fksources.HTTPSource
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewNarrativeForecastSource(cfg config.SourceConfig) (*NarrativeForecastSource, error) {
|
||||||
|
const driver = "nws_forecast_narrative"
|
||||||
|
|
||||||
|
// NWS forecast endpoints are GeoJSON (and sometimes also advertise json-ld/json).
|
||||||
|
hs, err := fksources.NewHTTPSource(driver, cfg, "application/geo+json, application/json")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &NarrativeForecastSource{http: hs}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *NarrativeForecastSource) Name() string { return s.http.Name }
|
||||||
|
|
||||||
|
// Kind is used for routing/policy.
|
||||||
|
func (s *NarrativeForecastSource) Kind() event.Kind { return event.Kind("forecast") }
|
||||||
|
|
||||||
|
func (s *NarrativeForecastSource) Poll(ctx context.Context) ([]event.Event, error) {
|
||||||
|
raw, meta, changed, err := s.fetchRaw(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// EffectiveAt is optional; for forecasts it’s most naturally the run “issued” time.
|
||||||
|
// NWS gridpoint forecasts expose generatedAt (preferred) and updateTime/updated.
|
||||||
|
var effectiveAt *time.Time
|
||||||
|
switch {
|
||||||
|
case !meta.ParsedGeneratedAt.IsZero():
|
||||||
|
t := meta.ParsedGeneratedAt.UTC()
|
||||||
|
effectiveAt = &t
|
||||||
|
case !meta.ParsedUpdateTime.IsZero():
|
||||||
|
t := meta.ParsedUpdateTime.UTC()
|
||||||
|
effectiveAt = &t
|
||||||
|
}
|
||||||
|
|
||||||
|
emittedAt := time.Now().UTC()
|
||||||
|
|
||||||
|
// NWS gridpoint forecast GeoJSON commonly has a stable "id" equal to the endpoint URL.
|
||||||
|
// That is *not* unique per issued run, so we intentionally do not use it for Event.ID.
|
||||||
|
// Instead we rely on Source:EffectiveAt (or Source:EmittedAt fallback).
|
||||||
|
eventID := common.ChooseEventID("", s.http.Name, effectiveAt, emittedAt)
|
||||||
|
|
||||||
|
return common.SingleRawEvent(
|
||||||
|
s.Kind(),
|
||||||
|
s.http.Name,
|
||||||
|
standards.SchemaRawNWSNarrativeForecastV1,
|
||||||
|
eventID,
|
||||||
|
emittedAt,
|
||||||
|
effectiveAt,
|
||||||
|
raw,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- RAW fetch + minimal metadata decode ----
|
||||||
|
|
||||||
|
type narrativeForecastMeta struct {
|
||||||
|
// Present for GeoJSON Feature responses, but often stable (endpoint URL).
|
||||||
|
ID string `json:"id"`
|
||||||
|
|
||||||
|
Properties struct {
|
||||||
|
GeneratedAt string `json:"generatedAt"` // preferred “issued/run generated” time
|
||||||
|
UpdateTime string `json:"updateTime"` // last update time of underlying data
|
||||||
|
Updated string `json:"updated"` // deprecated alias for updateTime
|
||||||
|
} `json:"properties"`
|
||||||
|
|
||||||
|
ParsedGeneratedAt time.Time `json:"-"`
|
||||||
|
ParsedUpdateTime time.Time `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *NarrativeForecastSource) fetchRaw(ctx context.Context) (json.RawMessage, narrativeForecastMeta, bool, error) {
|
||||||
|
raw, changed, err := s.http.FetchJSONIfChanged(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, narrativeForecastMeta{}, false, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, narrativeForecastMeta{}, false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var meta narrativeForecastMeta
|
||||||
|
if err := json.Unmarshal(raw, &meta); err != nil {
|
||||||
|
// If metadata decode fails, still return raw; Poll will fall back to Source:EmittedAt.
|
||||||
|
return raw, narrativeForecastMeta{}, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// generatedAt (preferred)
|
||||||
|
genStr := strings.TrimSpace(meta.Properties.GeneratedAt)
|
||||||
|
if genStr != "" {
|
||||||
|
if t, err := nwscommon.ParseTime(genStr); err == nil {
|
||||||
|
meta.ParsedGeneratedAt = t.UTC()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateTime, with fallback to deprecated "updated"
|
||||||
|
updStr := strings.TrimSpace(meta.Properties.UpdateTime)
|
||||||
|
if updStr == "" {
|
||||||
|
updStr = strings.TrimSpace(meta.Properties.Updated)
|
||||||
|
}
|
||||||
|
if updStr != "" {
|
||||||
|
if t, err := nwscommon.ParseTime(updStr); err == nil {
|
||||||
|
meta.ParsedUpdateTime = t.UTC()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return raw, meta, true, nil
|
||||||
|
}
|
||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/event"
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
nwscommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/nws"
|
nwscommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/nws"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
@@ -16,13 +17,13 @@ import (
|
|||||||
|
|
||||||
// ObservationSource polls an NWS station observation endpoint and emits a RAW observation Event.
|
// ObservationSource polls an NWS station observation endpoint and emits a RAW observation Event.
|
||||||
type ObservationSource struct {
|
type ObservationSource struct {
|
||||||
http *common.HTTPSource
|
http *fksources.HTTPSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewObservationSource(cfg config.SourceConfig) (*ObservationSource, error) {
|
func NewObservationSource(cfg config.SourceConfig) (*ObservationSource, error) {
|
||||||
const driver = "nws_observation"
|
const driver = "nws_observation"
|
||||||
|
|
||||||
hs, err := common.NewHTTPSource(driver, cfg, "application/geo+json, application/json")
|
hs, err := fksources.NewHTTPSource(driver, cfg, "application/geo+json, application/json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -35,10 +36,13 @@ func (s *ObservationSource) Name() string { return s.http.Name }
|
|||||||
func (s *ObservationSource) Kind() event.Kind { return event.Kind("observation") }
|
func (s *ObservationSource) Kind() event.Kind { return event.Kind("observation") }
|
||||||
|
|
||||||
func (s *ObservationSource) Poll(ctx context.Context) ([]event.Event, error) {
|
func (s *ObservationSource) Poll(ctx context.Context) ([]event.Event, error) {
|
||||||
raw, meta, err := s.fetchRaw(ctx)
|
raw, meta, changed, err := s.fetchRaw(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
// EffectiveAt is optional; for observations it’s naturally the observation timestamp.
|
// EffectiveAt is optional; for observations it’s naturally the observation timestamp.
|
||||||
var effectiveAt *time.Time
|
var effectiveAt *time.Time
|
||||||
@@ -72,16 +76,19 @@ type observationMeta struct {
|
|||||||
ParsedTimestamp time.Time `json:"-"`
|
ParsedTimestamp time.Time `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ObservationSource) fetchRaw(ctx context.Context) (json.RawMessage, observationMeta, error) {
|
func (s *ObservationSource) fetchRaw(ctx context.Context) (json.RawMessage, observationMeta, bool, error) {
|
||||||
raw, err := s.http.FetchJSON(ctx)
|
raw, changed, err := s.http.FetchJSONIfChanged(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, observationMeta{}, err
|
return nil, observationMeta{}, false, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, observationMeta{}, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var meta observationMeta
|
var meta observationMeta
|
||||||
if err := json.Unmarshal(raw, &meta); err != nil {
|
if err := json.Unmarshal(raw, &meta); err != nil {
|
||||||
// If metadata decode fails, still return raw; envelope will fall back to Source:EffectiveAt.
|
// If metadata decode fails, still return raw; envelope will fall back to Source:EffectiveAt.
|
||||||
return raw, observationMeta{}, nil
|
return raw, observationMeta{}, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
tsStr := strings.TrimSpace(meta.Properties.Timestamp)
|
tsStr := strings.TrimSpace(meta.Properties.Timestamp)
|
||||||
@@ -91,5 +98,5 @@ func (s *ObservationSource) fetchRaw(ctx context.Context) (json.RawMessage, obse
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return raw, meta, nil
|
return raw, meta, true, nil
|
||||||
}
|
}
|
||||||
|
|||||||
63
internal/sources/nws/observation_test.go
Normal file
63
internal/sources/nws/observation_test.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
package nws
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestObservationSourcePollReturnsNoEventsOn304(t *testing.T) {
|
||||||
|
var call int
|
||||||
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
call++
|
||||||
|
switch call {
|
||||||
|
case 1:
|
||||||
|
w.Header().Set("ETag", `"obs-v1"`)
|
||||||
|
_, _ = w.Write([]byte(`{"id":"obs-1","properties":{"timestamp":"2026-03-28T12:00:00Z"}}`))
|
||||||
|
case 2:
|
||||||
|
if got := r.Header.Get("If-None-Match"); got != `"obs-v1"` {
|
||||||
|
t.Fatalf("second request If-None-Match = %q", got)
|
||||||
|
}
|
||||||
|
w.WriteHeader(http.StatusNotModified)
|
||||||
|
default:
|
||||||
|
t.Fatalf("unexpected call count %d", call)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
defer srv.Close()
|
||||||
|
|
||||||
|
src, err := NewObservationSource(config.SourceConfig{
|
||||||
|
Name: "NWSObservationTest",
|
||||||
|
Driver: "nws_observation",
|
||||||
|
Mode: config.SourceModePoll,
|
||||||
|
Params: map[string]any{
|
||||||
|
"url": srv.URL,
|
||||||
|
"user_agent": "test-agent",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("NewObservationSource() error = %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
first, err := src.Poll(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("first Poll() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(first) != 1 {
|
||||||
|
t.Fatalf("first Poll() len = %d, want 1", len(first))
|
||||||
|
}
|
||||||
|
if first[0].Kind != event.Kind("observation") {
|
||||||
|
t.Fatalf("first Poll() kind = %q", first[0].Kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
second, err := src.Poll(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("second Poll() error = %v", err)
|
||||||
|
}
|
||||||
|
if len(second) != 0 {
|
||||||
|
t.Fatalf("second Poll() len = %d, want 0", len(second))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/event"
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/openmeteo"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/openmeteo"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
@@ -15,13 +16,13 @@ import (
|
|||||||
|
|
||||||
// ForecastSource polls an Open-Meteo hourly forecast endpoint and emits one RAW Forecast Event.
|
// ForecastSource polls an Open-Meteo hourly forecast endpoint and emits one RAW Forecast Event.
|
||||||
type ForecastSource struct {
|
type ForecastSource struct {
|
||||||
http *common.HTTPSource
|
http *fksources.HTTPSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewForecastSource(cfg config.SourceConfig) (*ForecastSource, error) {
|
func NewForecastSource(cfg config.SourceConfig) (*ForecastSource, error) {
|
||||||
const driver = "openmeteo_forecast"
|
const driver = "openmeteo_forecast"
|
||||||
|
|
||||||
hs, err := common.NewHTTPSource(driver, cfg, "application/json")
|
hs, err := fksources.NewHTTPSource(driver, cfg, "application/json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -34,10 +35,13 @@ func (s *ForecastSource) Name() string { return s.http.Name }
|
|||||||
func (s *ForecastSource) Kind() event.Kind { return event.Kind("forecast") }
|
func (s *ForecastSource) Kind() event.Kind { return event.Kind("forecast") }
|
||||||
|
|
||||||
func (s *ForecastSource) Poll(ctx context.Context) ([]event.Event, error) {
|
func (s *ForecastSource) Poll(ctx context.Context) ([]event.Event, error) {
|
||||||
raw, meta, err := s.fetchRaw(ctx)
|
raw, meta, changed, err := s.fetchRaw(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
// Open-Meteo does not expose a true "issued at" timestamp for forecast runs.
|
// Open-Meteo does not expose a true "issued at" timestamp for forecast runs.
|
||||||
// We use current.time when present; otherwise we fall back to the first hourly time
|
// We use current.time when present; otherwise we fall back to the first hourly time
|
||||||
@@ -79,16 +83,19 @@ type forecastMeta struct {
|
|||||||
ParsedTimestamp time.Time `json:"-"`
|
ParsedTimestamp time.Time `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ForecastSource) fetchRaw(ctx context.Context) (json.RawMessage, forecastMeta, error) {
|
func (s *ForecastSource) fetchRaw(ctx context.Context) (json.RawMessage, forecastMeta, bool, error) {
|
||||||
raw, err := s.http.FetchJSON(ctx)
|
raw, changed, err := s.http.FetchJSONIfChanged(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, forecastMeta{}, err
|
return nil, forecastMeta{}, false, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, forecastMeta{}, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var meta forecastMeta
|
var meta forecastMeta
|
||||||
if err := json.Unmarshal(raw, &meta); err != nil {
|
if err := json.Unmarshal(raw, &meta); err != nil {
|
||||||
// If metadata decode fails, still return raw; Poll will fall back to Source:EmittedAt.
|
// If metadata decode fails, still return raw; Poll will fall back to Source:EmittedAt.
|
||||||
return raw, forecastMeta{}, nil
|
return raw, forecastMeta{}, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
ts := strings.TrimSpace(meta.Current.Time)
|
ts := strings.TrimSpace(meta.Current.Time)
|
||||||
@@ -106,5 +113,5 @@ func (s *ForecastSource) fetchRaw(ctx context.Context) (json.RawMessage, forecas
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return raw, meta, nil
|
return raw, meta, true, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/event"
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/openmeteo"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/openmeteo"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
@@ -15,13 +16,13 @@ import (
|
|||||||
|
|
||||||
// ObservationSource polls an Open-Meteo endpoint and emits one RAW Observation Event.
|
// ObservationSource polls an Open-Meteo endpoint and emits one RAW Observation Event.
|
||||||
type ObservationSource struct {
|
type ObservationSource struct {
|
||||||
http *common.HTTPSource
|
http *fksources.HTTPSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewObservationSource(cfg config.SourceConfig) (*ObservationSource, error) {
|
func NewObservationSource(cfg config.SourceConfig) (*ObservationSource, error) {
|
||||||
const driver = "openmeteo_observation"
|
const driver = "openmeteo_observation"
|
||||||
|
|
||||||
hs, err := common.NewHTTPSource(driver, cfg, "application/json")
|
hs, err := fksources.NewHTTPSource(driver, cfg, "application/json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -34,10 +35,13 @@ func (s *ObservationSource) Name() string { return s.http.Name }
|
|||||||
func (s *ObservationSource) Kind() event.Kind { return event.Kind("observation") }
|
func (s *ObservationSource) Kind() event.Kind { return event.Kind("observation") }
|
||||||
|
|
||||||
func (s *ObservationSource) Poll(ctx context.Context) ([]event.Event, error) {
|
func (s *ObservationSource) Poll(ctx context.Context) ([]event.Event, error) {
|
||||||
raw, meta, err := s.fetchRaw(ctx)
|
raw, meta, changed, err := s.fetchRaw(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
var effectiveAt *time.Time
|
var effectiveAt *time.Time
|
||||||
if !meta.ParsedTimestamp.IsZero() {
|
if !meta.ParsedTimestamp.IsZero() {
|
||||||
@@ -72,21 +76,24 @@ type openMeteoMeta struct {
|
|||||||
ParsedTimestamp time.Time `json:"-"`
|
ParsedTimestamp time.Time `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ObservationSource) fetchRaw(ctx context.Context) (json.RawMessage, openMeteoMeta, error) {
|
func (s *ObservationSource) fetchRaw(ctx context.Context) (json.RawMessage, openMeteoMeta, bool, error) {
|
||||||
raw, err := s.http.FetchJSON(ctx)
|
raw, changed, err := s.http.FetchJSONIfChanged(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, openMeteoMeta{}, err
|
return nil, openMeteoMeta{}, false, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, openMeteoMeta{}, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var meta openMeteoMeta
|
var meta openMeteoMeta
|
||||||
if err := json.Unmarshal(raw, &meta); err != nil {
|
if err := json.Unmarshal(raw, &meta); err != nil {
|
||||||
// If metadata decode fails, still return raw; envelope will omit EffectiveAt.
|
// If metadata decode fails, still return raw; envelope will omit EffectiveAt.
|
||||||
return raw, openMeteoMeta{}, nil
|
return raw, openMeteoMeta{}, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if t, err := openmeteo.ParseTime(meta.Current.Time, meta.Timezone, meta.UTCOffsetSeconds); err == nil {
|
if t, err := openmeteo.ParseTime(meta.Current.Time, meta.Timezone, meta.UTCOffsetSeconds); err == nil {
|
||||||
meta.ParsedTimestamp = t.UTC()
|
meta.ParsedTimestamp = t.UTC()
|
||||||
}
|
}
|
||||||
|
|
||||||
return raw, meta, nil
|
return raw, meta, true, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,19 +9,20 @@ import (
|
|||||||
|
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/config"
|
"gitea.maximumdirect.net/ejr/feedkit/config"
|
||||||
"gitea.maximumdirect.net/ejr/feedkit/event"
|
"gitea.maximumdirect.net/ejr/feedkit/event"
|
||||||
|
fksources "gitea.maximumdirect.net/ejr/feedkit/sources"
|
||||||
owcommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/openweather"
|
owcommon "gitea.maximumdirect.net/ejr/weatherfeeder/internal/providers/openweather"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/internal/sources/common"
|
||||||
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
"gitea.maximumdirect.net/ejr/weatherfeeder/standards"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ObservationSource struct {
|
type ObservationSource struct {
|
||||||
http *common.HTTPSource
|
http *fksources.HTTPSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewObservationSource(cfg config.SourceConfig) (*ObservationSource, error) {
|
func NewObservationSource(cfg config.SourceConfig) (*ObservationSource, error) {
|
||||||
const driver = "openweather_observation"
|
const driver = "openweather_observation"
|
||||||
|
|
||||||
hs, err := common.NewHTTPSource(driver, cfg, "application/json")
|
hs, err := fksources.NewHTTPSource(driver, cfg, "application/json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -42,10 +43,13 @@ func (s *ObservationSource) Poll(ctx context.Context) ([]event.Event, error) {
|
|||||||
return nil, fmt.Errorf("%s %q: %w", s.http.Driver, s.http.Name, err)
|
return nil, fmt.Errorf("%s %q: %w", s.http.Driver, s.http.Name, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
raw, meta, err := s.fetchRaw(ctx)
|
raw, meta, changed, err := s.fetchRaw(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
var effectiveAt *time.Time
|
var effectiveAt *time.Time
|
||||||
if !meta.ParsedTimestamp.IsZero() {
|
if !meta.ParsedTimestamp.IsZero() {
|
||||||
@@ -75,20 +79,23 @@ type openWeatherMeta struct {
|
|||||||
ParsedTimestamp time.Time `json:"-"`
|
ParsedTimestamp time.Time `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ObservationSource) fetchRaw(ctx context.Context) (json.RawMessage, openWeatherMeta, error) {
|
func (s *ObservationSource) fetchRaw(ctx context.Context) (json.RawMessage, openWeatherMeta, bool, error) {
|
||||||
raw, err := s.http.FetchJSON(ctx)
|
raw, changed, err := s.http.FetchJSONIfChanged(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, openWeatherMeta{}, err
|
return nil, openWeatherMeta{}, false, err
|
||||||
|
}
|
||||||
|
if !changed {
|
||||||
|
return nil, openWeatherMeta{}, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var meta openWeatherMeta
|
var meta openWeatherMeta
|
||||||
if err := json.Unmarshal(raw, &meta); err != nil {
|
if err := json.Unmarshal(raw, &meta); err != nil {
|
||||||
return raw, openWeatherMeta{}, nil
|
return raw, openWeatherMeta{}, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if meta.Dt > 0 {
|
if meta.Dt > 0 {
|
||||||
meta.ParsedTimestamp = time.Unix(meta.Dt, 0).UTC()
|
meta.ParsedTimestamp = time.Unix(meta.Dt, 0).UTC()
|
||||||
}
|
}
|
||||||
|
|
||||||
return raw, meta, nil
|
return raw, meta, true, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// FILE: internal/model/alert.go
|
// FILE: model/alert.go
|
||||||
package model
|
package model
|
||||||
|
|
||||||
import "time"
|
import "time"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// FILE: internal/model/doc.go
|
// FILE: model/doc.go
|
||||||
// Package model defines weatherfeeder's canonical domain payload types.
|
// Package model defines weatherfeeder's canonical domain payload types.
|
||||||
//
|
//
|
||||||
// These structs are emitted as the Payload of canonical events (schemas "weather.*.vN").
|
// These structs are emitted as the Payload of canonical events (schemas "weather.*.vN").
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// FILE: internal/model/forecast.go
|
// FILE: model/forecast.go
|
||||||
package model
|
package model
|
||||||
|
|
||||||
import "time"
|
import "time"
|
||||||
@@ -75,18 +75,8 @@ type WeatherForecastPeriod struct {
|
|||||||
// Like WeatherObservation, this is required; use an “unknown” WMOCode if unmappable.
|
// Like WeatherObservation, this is required; use an “unknown” WMOCode if unmappable.
|
||||||
ConditionCode WMOCode `json:"conditionCode"`
|
ConditionCode WMOCode `json:"conditionCode"`
|
||||||
|
|
||||||
// Provider-independent short text describing the conditions (normalized, if possible).
|
// Human-facing narrative summary for this period.
|
||||||
ConditionText string `json:"conditionText,omitempty"`
|
TextDescription string `json:"textDescription,omitempty"`
|
||||||
|
|
||||||
// Provider-specific “evidence” for troubleshooting mapping and drift.
|
|
||||||
ProviderRawDescription string `json:"providerRawDescription,omitempty"`
|
|
||||||
|
|
||||||
// Human-facing narrative. Not all providers supply rich text (Open-Meteo often won’t).
|
|
||||||
TextDescription string `json:"textDescription,omitempty"` // short phrase / summary
|
|
||||||
DetailedText string `json:"detailedText,omitempty"` // longer narrative, if available
|
|
||||||
|
|
||||||
// Provider-specific (legacy / transitional)
|
|
||||||
IconURL string `json:"iconUrl,omitempty"`
|
|
||||||
|
|
||||||
// Core predicted measurements (nullable; units align with WeatherObservation)
|
// Core predicted measurements (nullable; units align with WeatherObservation)
|
||||||
TemperatureC *float64 `json:"temperatureC,omitempty"`
|
TemperatureC *float64 `json:"temperatureC,omitempty"`
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// FILE: internal/model/observation.go
|
// FILE: model/observation.go
|
||||||
package model
|
package model
|
||||||
|
|
||||||
import "time"
|
import "time"
|
||||||
@@ -11,18 +11,9 @@ type WeatherObservation struct {
|
|||||||
|
|
||||||
// Canonical internal representation (provider-independent).
|
// Canonical internal representation (provider-independent).
|
||||||
ConditionCode WMOCode `json:"conditionCode"`
|
ConditionCode WMOCode `json:"conditionCode"`
|
||||||
ConditionText string `json:"conditionText,omitempty"`
|
|
||||||
IsDay *bool `json:"isDay,omitempty"`
|
IsDay *bool `json:"isDay,omitempty"`
|
||||||
|
|
||||||
// Provider-specific “evidence” for troubleshooting mapping and drift.
|
|
||||||
ProviderRawDescription string `json:"providerRawDescription,omitempty"`
|
|
||||||
|
|
||||||
// Human-facing (legacy / transitional)
|
|
||||||
TextDescription string `json:"textDescription,omitempty"`
|
TextDescription string `json:"textDescription,omitempty"`
|
||||||
|
|
||||||
// Provider-specific (legacy / transitional)
|
|
||||||
IconURL string `json:"iconUrl,omitempty"`
|
|
||||||
|
|
||||||
// Core measurements (nullable)
|
// Core measurements (nullable)
|
||||||
TemperatureC *float64 `json:"temperatureC,omitempty"`
|
TemperatureC *float64 `json:"temperatureC,omitempty"`
|
||||||
DewpointC *float64 `json:"dewpointC,omitempty"`
|
DewpointC *float64 `json:"dewpointC,omitempty"`
|
||||||
@@ -32,22 +23,12 @@ type WeatherObservation struct {
|
|||||||
WindGustKmh *float64 `json:"windGustKmh,omitempty"`
|
WindGustKmh *float64 `json:"windGustKmh,omitempty"`
|
||||||
|
|
||||||
BarometricPressurePa *float64 `json:"barometricPressurePa,omitempty"`
|
BarometricPressurePa *float64 `json:"barometricPressurePa,omitempty"`
|
||||||
SeaLevelPressurePa *float64 `json:"seaLevelPressurePa,omitempty"`
|
|
||||||
VisibilityMeters *float64 `json:"visibilityMeters,omitempty"`
|
VisibilityMeters *float64 `json:"visibilityMeters,omitempty"`
|
||||||
|
|
||||||
RelativeHumidityPercent *float64 `json:"relativeHumidityPercent,omitempty"`
|
RelativeHumidityPercent *float64 `json:"relativeHumidityPercent,omitempty"`
|
||||||
ApparentTemperatureC *float64 `json:"apparentTemperatureC,omitempty"`
|
ApparentTemperatureC *float64 `json:"apparentTemperatureC,omitempty"`
|
||||||
|
|
||||||
ElevationMeters *float64 `json:"elevationMeters,omitempty"`
|
|
||||||
RawMessage string `json:"rawMessage,omitempty"`
|
|
||||||
|
|
||||||
PresentWeather []PresentWeather `json:"presentWeather,omitempty"`
|
PresentWeather []PresentWeather `json:"presentWeather,omitempty"`
|
||||||
CloudLayers []CloudLayer `json:"cloudLayers,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type CloudLayer struct {
|
|
||||||
BaseMeters *float64 `json:"baseMeters,omitempty"`
|
|
||||||
Amount string `json:"amount,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type PresentWeather struct {
|
type PresentWeather struct {
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ const (
|
|||||||
SchemaRawOpenWeatherCurrentV1 = "raw.openweather.current.v1"
|
SchemaRawOpenWeatherCurrentV1 = "raw.openweather.current.v1"
|
||||||
|
|
||||||
SchemaRawNWSHourlyForecastV1 = "raw.nws.hourly.forecast.v1"
|
SchemaRawNWSHourlyForecastV1 = "raw.nws.hourly.forecast.v1"
|
||||||
|
SchemaRawNWSNarrativeForecastV1 = "raw.nws.narrative.forecast.v1"
|
||||||
SchemaRawOpenMeteoHourlyForecastV1 = "raw.openmeteo.hourly.forecast.v1"
|
SchemaRawOpenMeteoHourlyForecastV1 = "raw.openmeteo.hourly.forecast.v1"
|
||||||
SchemaRawOpenWeatherHourlyForecastV1 = "raw.openweather.hourly.forecast.v1"
|
SchemaRawOpenWeatherHourlyForecastV1 = "raw.openweather.hourly.forecast.v1"
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ type WMODescription struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// WMODescriptions is the canonical internal mapping of WMO code -> day/night text.
|
// WMODescriptions is the canonical internal mapping of WMO code -> day/night text.
|
||||||
// These are used to populate model.WeatherObservation.ConditionText.
|
// These are used to populate canonical text fields derived from WMO codes.
|
||||||
var WMODescriptions = map[model.WMOCode]WMODescription{
|
var WMODescriptions = map[model.WMOCode]WMODescription{
|
||||||
0: {Day: "Sunny", Night: "Clear"},
|
0: {Day: "Sunny", Night: "Clear"},
|
||||||
1: {Day: "Mainly Sunny", Night: "Mainly Clear"},
|
1: {Day: "Mainly Sunny", Night: "Mainly Clear"},
|
||||||
@@ -56,7 +56,8 @@ var WMODescriptions = map[model.WMOCode]WMODescription{
|
|||||||
// WMOText returns the canonical text description for a WMO code.
|
// WMOText returns the canonical text description for a WMO code.
|
||||||
// If isDay is nil, it prefers the Day description (if present).
|
// If isDay is nil, it prefers the Day description (if present).
|
||||||
//
|
//
|
||||||
// This is intended to be used by drivers after they set ConditionCode.
|
// This is intended to be used by drivers after they set ConditionCode when they
|
||||||
|
// need a human-readable description.
|
||||||
func WMOText(code model.WMOCode, isDay *bool) string {
|
func WMOText(code model.WMOCode, isDay *bool) string {
|
||||||
if code == model.WMOUnknown {
|
if code == model.WMOUnknown {
|
||||||
return "Unknown"
|
return "Unknown"
|
||||||
|
|||||||
Reference in New Issue
Block a user