Compare commits
22 Commits
e35b4f882d
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
| 010daa227d | |||
| 81fc110224 | |||
| 19d9724c9c | |||
| 2c1410f0b1 | |||
| 9810b4dc1b | |||
| 46614574c4 | |||
| b3fe58408e | |||
| 5ae42692b1 | |||
| 35f04e039d | |||
| 4f60f1c9a0 | |||
| cf788930e0 | |||
| f611545ae5 | |||
| 8820a9ef9f | |||
| 95b64b4d54 | |||
| b6b7ca44dc | |||
| e8e0731adc | |||
| 535061453b | |||
| 60ca038a7e | |||
| 515ad9a1dd | |||
| a72af59051 | |||
| e62fcb03bc | |||
| 7004e7fb3e |
@@ -25,10 +25,18 @@ Reliable, persistent visibility into which services need updating — data never
|
|||||||
- ✓ Proper UPSERT preserving tag assignments on re-webhook — Phase 1
|
- ✓ Proper UPSERT preserving tag assignments on re-webhook — Phase 1
|
||||||
- ✓ Request body size limits (1MB) on webhook and API endpoints — Phase 1
|
- ✓ Request body size limits (1MB) on webhook and API endpoints — Phase 1
|
||||||
- ✓ Test error handling uses t.Fatalf (no silent failures) — Phase 1
|
- ✓ Test error handling uses t.Fatalf (no silent failures) — Phase 1
|
||||||
|
- ✓ Store interface abstracts all persistence operations (9 methods) — Phase 2
|
||||||
|
- ✓ Server struct replaces package-level globals (db, mu, webhookSecret) — Phase 2
|
||||||
|
- ✓ Schema migrations via golang-migrate with embedded SQL files — Phase 2
|
||||||
|
- ✓ Per-test in-memory databases for isolated, parallel-safe testing — Phase 2
|
||||||
|
- ✓ PostgreSQL support via pgx/v5 with DATABASE_URL env var selection — Phase 3
|
||||||
|
- ✓ Separate PostgreSQL migration directory with baseline schema — Phase 3
|
||||||
|
- ✓ Docker Compose profiles for optional PostgreSQL service — Phase 3
|
||||||
|
- ✓ Cross-dialect UNIQUE constraint detection (case-insensitive) — Phase 3
|
||||||
|
|
||||||
### Active
|
### Active
|
||||||
|
|
||||||
- [ ] Add PostgreSQL support alongside SQLite (dual DB, user chooses)
|
- [ ] Bulk acknowledge (dismiss all, dismiss by group)
|
||||||
- [ ] Bulk acknowledge (dismiss all, dismiss by group)
|
- [ ] Bulk acknowledge (dismiss all, dismiss by group)
|
||||||
- [ ] Filtering and search across updates
|
- [ ] Filtering and search across updates
|
||||||
- [ ] In-dashboard new-update indicators (badge/counter/toast)
|
- [ ] In-dashboard new-update indicators (badge/counter/toast)
|
||||||
@@ -65,8 +73,12 @@ Reliable, persistent visibility into which services need updating — data never
|
|||||||
|
|
||||||
| Decision | Rationale | Outcome |
|
| Decision | Rationale | Outcome |
|
||||||
|----------|-----------|---------|
|
|----------|-----------|---------|
|
||||||
| Dual DB (SQLite + PostgreSQL) | SQLite is fine for simple setups, Postgres for users who want robustness | — Pending |
|
| Dual DB (SQLite + PostgreSQL) | SQLite is fine for simple setups, Postgres for users who want robustness | ✓ Phase 3 |
|
||||||
| Fix SQLite bugs before adding features | Data trust is the #1 priority; features on a broken foundation waste effort | — Pending |
|
| DATABASE_URL as DB selector | Presence of DATABASE_URL activates PostgreSQL; absence falls back to SQLite with DB_PATH | ✓ Phase 3 |
|
||||||
|
| pgx/v5/stdlib over native pgx | Keeps both stores on database/sql for identical constructor signatures | ✓ Phase 3 |
|
||||||
|
| Fix SQLite bugs before adding features | Data trust is the #1 priority; features on a broken foundation waste effort | ✓ Phase 1 |
|
||||||
|
| Store interface as persistence abstraction | 9 methods, no SQL in handlers; enables PostgreSQL swap without touching HTTP layer | ✓ Phase 2 |
|
||||||
|
| Server struct over package globals | Dependency injection via constructor; enables per-test isolated databases | ✓ Phase 2 |
|
||||||
| Defer auto-grouping to future milestone | Requires research into Docker socket / DIUN metadata; don't want to slow down stability fixes | — Pending |
|
| Defer auto-grouping to future milestone | Requires research into Docker socket / DIUN metadata; don't want to slow down stability fixes | — Pending |
|
||||||
| Defer DIUN bundling to future milestone | Significant scope; need stability and UX improvements first | — Pending |
|
| Defer DIUN bundling to future milestone | Significant scope; need stability and UX improvements first | — Pending |
|
||||||
|
|
||||||
@@ -88,4 +100,4 @@ This document evolves at phase transitions and milestone boundaries.
|
|||||||
4. Update Context with current state
|
4. Update Context with current state
|
||||||
|
|
||||||
---
|
---
|
||||||
*Last updated: 2026-03-23 after Phase 1 completion*
|
*Last updated: 2026-03-24 after Phase 3 completion*
|
||||||
|
|||||||
@@ -17,14 +17,14 @@ Requirements for this milestone. Each maps to roadmap phases.
|
|||||||
### Backend Refactor
|
### Backend Refactor
|
||||||
|
|
||||||
- [x] **REFAC-01**: Database operations are behind a Store interface with separate SQLite and PostgreSQL implementations
|
- [x] **REFAC-01**: Database operations are behind a Store interface with separate SQLite and PostgreSQL implementations
|
||||||
- [ ] **REFAC-02**: Package-level global state (db, mu, webhookSecret) is replaced with a Server struct that holds dependencies
|
- [x] **REFAC-02**: Package-level global state (db, mu, webhookSecret) is replaced with a Server struct that holds dependencies
|
||||||
- [x] **REFAC-03**: Schema migrations use golang-migrate with separate migration directories per dialect (sqlite/, postgres/)
|
- [x] **REFAC-03**: Schema migrations use golang-migrate with separate migration directories per dialect (sqlite/, postgres/)
|
||||||
|
|
||||||
### Database
|
### Database
|
||||||
|
|
||||||
- [ ] **DB-01**: PostgreSQL is supported as an alternative to SQLite via pgx v5 driver
|
- [x] **DB-01**: PostgreSQL is supported as an alternative to SQLite via pgx v5 driver
|
||||||
- [ ] **DB-02**: Database backend is selected via DATABASE_URL env var (present = PostgreSQL, absent = SQLite with DB_PATH)
|
- [x] **DB-02**: Database backend is selected via DATABASE_URL env var (present = PostgreSQL, absent = SQLite with DB_PATH)
|
||||||
- [ ] **DB-03**: Existing SQLite users can upgrade without data loss (baseline migration represents current schema)
|
- [x] **DB-03**: Existing SQLite users can upgrade without data loss (baseline migration represents current schema)
|
||||||
|
|
||||||
### Bulk Actions
|
### Bulk Actions
|
||||||
|
|
||||||
@@ -96,11 +96,11 @@ Which phases cover which requirements. Updated during roadmap creation.
|
|||||||
| DATA-03 | Phase 1 | Complete |
|
| DATA-03 | Phase 1 | Complete |
|
||||||
| DATA-04 | Phase 1 | Complete |
|
| DATA-04 | Phase 1 | Complete |
|
||||||
| REFAC-01 | Phase 2 | Complete |
|
| REFAC-01 | Phase 2 | Complete |
|
||||||
| REFAC-02 | Phase 2 | Pending |
|
| REFAC-02 | Phase 2 | Complete |
|
||||||
| REFAC-03 | Phase 2 | Complete |
|
| REFAC-03 | Phase 2 | Complete |
|
||||||
| DB-01 | Phase 3 | Pending |
|
| DB-01 | Phase 3 | Complete |
|
||||||
| DB-02 | Phase 3 | Pending |
|
| DB-02 | Phase 3 | Complete |
|
||||||
| DB-03 | Phase 3 | Pending |
|
| DB-03 | Phase 3 | Complete |
|
||||||
| BULK-01 | Phase 4 | Pending |
|
| BULK-01 | Phase 4 | Pending |
|
||||||
| BULK-02 | Phase 4 | Pending |
|
| BULK-02 | Phase 4 | Pending |
|
||||||
| SRCH-01 | Phase 4 | Pending |
|
| SRCH-01 | Phase 4 | Pending |
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ This milestone restores data trust and then extends the foundation. Phase 1 fixe
|
|||||||
Decimal phases appear between their surrounding integers in numeric order.
|
Decimal phases appear between their surrounding integers in numeric order.
|
||||||
|
|
||||||
- [ ] **Phase 1: Data Integrity** - Fix active SQLite bugs that silently delete tag assignments and suppress test failures
|
- [ ] **Phase 1: Data Integrity** - Fix active SQLite bugs that silently delete tag assignments and suppress test failures
|
||||||
- [ ] **Phase 2: Backend Refactor** - Replace global state with Store interface + Server struct; prerequisite for PostgreSQL
|
- [x] **Phase 2: Backend Refactor** - Replace global state with Store interface + Server struct; prerequisite for PostgreSQL (completed 2026-03-24)
|
||||||
- [ ] **Phase 3: PostgreSQL Support** - Add PostgreSQL as an alternative backend via DATABASE_URL, with versioned migrations
|
- [ ] **Phase 3: PostgreSQL Support** - Add PostgreSQL as an alternative backend via DATABASE_URL, with versioned migrations
|
||||||
- [ ] **Phase 4: UX Improvements** - Bulk dismiss, search/filter, new-update indicators, and accessibility fixes
|
- [ ] **Phase 4: UX Improvements** - Bulk dismiss, search/filter, new-update indicators, and accessibility fixes
|
||||||
|
|
||||||
@@ -47,7 +47,7 @@ Plans:
|
|||||||
|
|
||||||
Plans:
|
Plans:
|
||||||
- [x] 02-01-PLAN.md — Create Store interface (9 methods), SQLiteStore implementation, golang-migrate migration infrastructure with embedded SQL files
|
- [x] 02-01-PLAN.md — Create Store interface (9 methods), SQLiteStore implementation, golang-migrate migration infrastructure with embedded SQL files
|
||||||
- [ ] 02-02-PLAN.md — Convert handlers to Server struct methods, remove globals, rewrite tests for per-test isolated databases, update main.go wiring
|
- [x] 02-02-PLAN.md — Convert handlers to Server struct methods, remove globals, rewrite tests for per-test isolated databases, update main.go wiring
|
||||||
|
|
||||||
### Phase 3: PostgreSQL Support
|
### Phase 3: PostgreSQL Support
|
||||||
**Goal**: Users running PostgreSQL infrastructure can point DiunDashboard at a Postgres database via DATABASE_URL and the dashboard works identically to the SQLite deployment
|
**Goal**: Users running PostgreSQL infrastructure can point DiunDashboard at a Postgres database via DATABASE_URL and the dashboard works identically to the SQLite deployment
|
||||||
@@ -58,8 +58,11 @@ Plans:
|
|||||||
2. A fresh PostgreSQL deployment receives all schema tables via automatic migration on startup
|
2. A fresh PostgreSQL deployment receives all schema tables via automatic migration on startup
|
||||||
3. An existing SQLite user can upgrade to the new binary without any data loss or manual schema changes
|
3. An existing SQLite user can upgrade to the new binary without any data loss or manual schema changes
|
||||||
4. The app can be run with Docker Compose using an optional postgres service profile
|
4. The app can be run with Docker Compose using an optional postgres service profile
|
||||||
**Plans**: TBD
|
**Plans**: 2 plans
|
||||||
**UI hint**: no
|
|
||||||
|
Plans:
|
||||||
|
- [x] 03-01-PLAN.md — Create PostgresStore (9 Store methods), PostgreSQL migration files, rename RunMigrations to RunSQLiteMigrations, add RunPostgresMigrations
|
||||||
|
- [x] 03-02-PLAN.md — Wire DATABASE_URL branching in main.go, fix cross-dialect UNIQUE detection, add Docker Compose postgres profiles, create build-tagged test helper
|
||||||
|
|
||||||
### Phase 4: UX Improvements
|
### Phase 4: UX Improvements
|
||||||
**Goal**: Users can manage a large list of updates efficiently — dismissing many at once, finding specific images quickly, and seeing new arrivals without manual refreshes
|
**Goal**: Users can manage a large list of updates efficiently — dismissing many at once, finding specific images quickly, and seeing new arrivals without manual refreshes
|
||||||
@@ -72,9 +75,14 @@ Plans:
|
|||||||
4. A badge/counter showing pending update count is always visible; the browser tab title reflects it (e.g., "DiunDash (3)")
|
4. A badge/counter showing pending update count is always visible; the browser tab title reflects it (e.g., "DiunDash (3)")
|
||||||
5. New updates arriving during active polling trigger a visible in-page toast, and updates seen for the first time since the user's last visit are visually highlighted
|
5. New updates arriving during active polling trigger a visible in-page toast, and updates seen for the first time since the user's last visit are visually highlighted
|
||||||
6. The light/dark theme toggle is available and respects system preference; the drag handle for tag reordering is always visible without hover
|
6. The light/dark theme toggle is available and respects system preference; the drag handle for tag reordering is always visible without hover
|
||||||
**Plans**: TBD
|
**Plans**: 3 plans
|
||||||
**UI hint**: yes
|
**UI hint**: yes
|
||||||
|
|
||||||
|
Plans:
|
||||||
|
- [ ] 04-01-PLAN.md — Backend bulk dismiss: extend Store interface with AcknowledgeAll + AcknowledgeByTag, implement in both stores, add HTTP handlers and tests
|
||||||
|
- [ ] 04-02-PLAN.md — Frontend search/filter/sort controls, theme toggle, drag handle visibility fix
|
||||||
|
- [ ] 04-03-PLAN.md — Frontend bulk dismiss UI, update indicators (badge, tab title, toast, new-since-last-visit highlight)
|
||||||
|
|
||||||
## Progress
|
## Progress
|
||||||
|
|
||||||
**Execution Order:**
|
**Execution Order:**
|
||||||
@@ -83,6 +91,6 @@ Phases execute in numeric order: 1 → 2 → 3 → 4
|
|||||||
| Phase | Plans Complete | Status | Completed |
|
| Phase | Plans Complete | Status | Completed |
|
||||||
|-------|----------------|--------|-----------|
|
|-------|----------------|--------|-----------|
|
||||||
| 1. Data Integrity | 0/2 | Not started | - |
|
| 1. Data Integrity | 0/2 | Not started | - |
|
||||||
| 2. Backend Refactor | 0/2 | Not started | - |
|
| 2. Backend Refactor | 2/2 | Complete | 2026-03-24 |
|
||||||
| 3. PostgreSQL Support | 0/? | Not started | - |
|
| 3. PostgreSQL Support | 0/2 | Not started | - |
|
||||||
| 4. UX Improvements | 0/? | Not started | - |
|
| 4. UX Improvements | 0/3 | Not started | - |
|
||||||
|
|||||||
@@ -2,14 +2,14 @@
|
|||||||
gsd_state_version: 1.0
|
gsd_state_version: 1.0
|
||||||
milestone: v1.0
|
milestone: v1.0
|
||||||
milestone_name: milestone
|
milestone_name: milestone
|
||||||
status: Ready to execute
|
status: Ready to plan
|
||||||
stopped_at: Completed 02-01-PLAN.md (Store interface, SQLiteStore, migration infrastructure)
|
stopped_at: Phase 4 context gathered
|
||||||
last_updated: "2026-03-23T20:59:13.329Z"
|
last_updated: "2026-03-24T08:28:55.644Z"
|
||||||
progress:
|
progress:
|
||||||
total_phases: 4
|
total_phases: 4
|
||||||
completed_phases: 1
|
completed_phases: 3
|
||||||
total_plans: 4
|
total_plans: 6
|
||||||
completed_plans: 3
|
completed_plans: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
# Project State
|
# Project State
|
||||||
@@ -19,12 +19,12 @@ progress:
|
|||||||
See: .planning/PROJECT.md (updated 2026-03-23)
|
See: .planning/PROJECT.md (updated 2026-03-23)
|
||||||
|
|
||||||
**Core value:** Reliable, persistent visibility into which services need updating — data never disappears, and the dashboard is the one place you trust to show the full picture.
|
**Core value:** Reliable, persistent visibility into which services need updating — data never disappears, and the dashboard is the one place you trust to show the full picture.
|
||||||
**Current focus:** Phase 02 — backend-refactor
|
**Current focus:** Phase 03 — postgresql-support
|
||||||
|
|
||||||
## Current Position
|
## Current Position
|
||||||
|
|
||||||
Phase: 02 (backend-refactor) — EXECUTING
|
Phase: 4
|
||||||
Plan: 2 of 2
|
Plan: Not started
|
||||||
|
|
||||||
## Performance Metrics
|
## Performance Metrics
|
||||||
|
|
||||||
@@ -49,6 +49,9 @@ Plan: 2 of 2
|
|||||||
| Phase 01 P01 | 2 | 2 tasks | 2 files |
|
| Phase 01 P01 | 2 | 2 tasks | 2 files |
|
||||||
| Phase 01-data-integrity P02 | 7 | 2 tasks | 2 files |
|
| Phase 01-data-integrity P02 | 7 | 2 tasks | 2 files |
|
||||||
| Phase 02-backend-refactor P01 | 7min | 2 tasks | 7 files |
|
| Phase 02-backend-refactor P01 | 7min | 2 tasks | 7 files |
|
||||||
|
| Phase 02-backend-refactor P02 | 3min | 2 tasks | 4 files |
|
||||||
|
| Phase 03-postgresql-support P01 | 3min | 2 tasks | 7 files |
|
||||||
|
| Phase 03-postgresql-support P02 | 2min | 2 tasks | 5 files |
|
||||||
|
|
||||||
## Accumulated Context
|
## Accumulated Context
|
||||||
|
|
||||||
@@ -67,6 +70,13 @@ Recent decisions affecting current work:
|
|||||||
- [Phase 01-data-integrity]: Oversized body tests need valid JSON prefix so decoder reads past 1MB limit; all-x bytes fail at byte 1 before MaxBytesReader triggers
|
- [Phase 01-data-integrity]: Oversized body tests need valid JSON prefix so decoder reads past 1MB limit; all-x bytes fail at byte 1 before MaxBytesReader triggers
|
||||||
- [Phase 02-backend-refactor]: Store interface with 9 methods is the persistence abstraction; SQLiteStore holds *sql.DB and sync.Mutex as struct fields (not package globals)
|
- [Phase 02-backend-refactor]: Store interface with 9 methods is the persistence abstraction; SQLiteStore holds *sql.DB and sync.Mutex as struct fields (not package globals)
|
||||||
- [Phase 02-backend-refactor]: golang-migrate v4.19.1 database/sqlite sub-package confirmed to use modernc.org/sqlite (no CGO); single 0001 baseline migration uses CREATE TABLE IF NOT EXISTS for backward compatibility
|
- [Phase 02-backend-refactor]: golang-migrate v4.19.1 database/sqlite sub-package confirmed to use modernc.org/sqlite (no CGO); single 0001 baseline migration uses CREATE TABLE IF NOT EXISTS for backward compatibility
|
||||||
|
- [Phase 02-backend-refactor]: Option B for test store access: internal helpers in export_test.go (TestUpsertEvent, TestGetUpdatesMap) instead of exported Store() accessor - keeps store field unexported
|
||||||
|
- [Phase 02-backend-refactor]: NewTestServer pattern: each test gets its own in-memory SQLite DB (RunMigrations + NewSQLiteStore + NewServer) - eliminates shared global state between tests
|
||||||
|
- [Phase 03-postgresql-support]: PostgresStore uses *sql.DB via pgx/v5/stdlib adapter with no mutex; TEXT timestamps match SQLiteStore scan logic
|
||||||
|
- [Phase 03-postgresql-support]: CreateTag uses RETURNING id in PostgresStore (pgx does not support LastInsertId); AssignTag uses ON CONFLICT DO UPDATE
|
||||||
|
- [Phase 03-postgresql-support]: DATABASE_URL presence-check activates PostgreSQL; absent falls back to SQLite — simpler UX than a separate DB_DRIVER var
|
||||||
|
- [Phase 03-postgresql-support]: postgres Docker service uses profiles: [postgres] with required: false depends_on — default compose up unchanged, SQLite only
|
||||||
|
- [Phase 03-postgresql-support]: UNIQUE constraint detection uses strings.ToLower for case-insensitive matching across SQLite (uppercase UNIQUE) and PostgreSQL (lowercase unique)
|
||||||
|
|
||||||
### Pending Todos
|
### Pending Todos
|
||||||
|
|
||||||
@@ -79,6 +89,6 @@ None yet.
|
|||||||
|
|
||||||
## Session Continuity
|
## Session Continuity
|
||||||
|
|
||||||
Last session: 2026-03-23T20:59:13.327Z
|
Last session: 2026-03-24T08:28:55.642Z
|
||||||
Stopped at: Completed 02-01-PLAN.md (Store interface, SQLiteStore, migration infrastructure)
|
Stopped at: Phase 4 context gathered
|
||||||
Resume file: None
|
Resume file: .planning/phases/04-ux-improvements/04-CONTEXT.md
|
||||||
|
|||||||
125
.planning/phases/02-backend-refactor/02-02-SUMMARY.md
Normal file
125
.planning/phases/02-backend-refactor/02-02-SUMMARY.md
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
---
|
||||||
|
phase: 02-backend-refactor
|
||||||
|
plan: "02"
|
||||||
|
subsystem: http-handlers
|
||||||
|
tags: [server-struct, dependency-injection, store-interface, test-isolation, in-memory-sqlite, refactor]
|
||||||
|
|
||||||
|
# Dependency graph
|
||||||
|
requires:
|
||||||
|
- phase: 02-01
|
||||||
|
provides: Store interface (9 methods), SQLiteStore, RunMigrations
|
||||||
|
provides:
|
||||||
|
- Server struct with Store field and webhookSecret field
|
||||||
|
- NewServer constructor wiring Store and secret
|
||||||
|
- All 6 handlers converted to *Server methods calling s.store.X()
|
||||||
|
- NewTestServer / NewTestServerWithSecret helpers for isolated per-test databases
|
||||||
|
- main.go wiring: sql.Open -> RunMigrations -> NewSQLiteStore -> NewServer -> routes
|
||||||
|
affects:
|
||||||
|
- 03-postgresql (PostgreSQLStore will implement same Store interface; Server struct accepts any Store)
|
||||||
|
|
||||||
|
# Tech tracking
|
||||||
|
tech-stack:
|
||||||
|
added: []
|
||||||
|
patterns:
|
||||||
|
- Server struct pattern - all handler dependencies injected via constructor, no package-level globals
|
||||||
|
- export_test.go internal helpers (TestUpsertEvent, TestGetUpdatesMap) - access unexported fields without exposing Store accessor
|
||||||
|
- Per-test in-memory SQLite database via NewTestServer() - eliminates shared state between tests
|
||||||
|
- NewTestServerWithSecret for auth-enabled test scenarios
|
||||||
|
|
||||||
|
key-files:
|
||||||
|
created: []
|
||||||
|
modified:
|
||||||
|
- pkg/diunwebhook/diunwebhook.go
|
||||||
|
- pkg/diunwebhook/export_test.go
|
||||||
|
- pkg/diunwebhook/diunwebhook_test.go
|
||||||
|
- cmd/diunwebhook/main.go
|
||||||
|
|
||||||
|
key-decisions:
|
||||||
|
- "Option B for test store access: internal helpers in export_test.go (TestUpsertEvent, TestGetUpdatesMap) instead of exported Store() accessor - keeps store field unexported"
|
||||||
|
- "t.Errorf used inside goroutines in TestConcurrentUpdateEvent (t.Fatalf is not safe from non-test goroutines)"
|
||||||
|
- "_ modernc.org/sqlite blank import moved from diunwebhook.go to main.go and migrate.go - driver registration happens where needed"
|
||||||
|
|
||||||
|
patterns-established:
|
||||||
|
- "Server struct: HTTP handlers as methods on *Server, all deps injected at construction"
|
||||||
|
- "NewTestServer pattern: each test creates its own in-memory SQLite DB via RunMigrations + NewSQLiteStore + NewServer"
|
||||||
|
- "export_test.go internal methods: (s *Server) TestUpsertEvent / TestGetUpdatesMap access s.store without exporting Store field"
|
||||||
|
|
||||||
|
requirements-completed: [REFAC-01, REFAC-02, REFAC-03]
|
||||||
|
|
||||||
|
# Metrics
|
||||||
|
duration: 3min
|
||||||
|
completed: "2026-03-23"
|
||||||
|
---
|
||||||
|
|
||||||
|
# Phase 02 Plan 02: Server Struct Refactor and Test Isolation Summary
|
||||||
|
|
||||||
|
**Server struct with Store injection, globals removed, all 6 handlers as *Server methods calling s.store.X(), per-test in-memory databases via NewTestServer**
|
||||||
|
|
||||||
|
## Performance
|
||||||
|
|
||||||
|
- **Duration:** ~3 min
|
||||||
|
- **Started:** 2026-03-23T21:02:53Z
|
||||||
|
- **Completed:** 2026-03-23T21:05:09Z
|
||||||
|
- **Tasks:** 2
|
||||||
|
- **Files modified:** 4
|
||||||
|
|
||||||
|
## Accomplishments
|
||||||
|
|
||||||
|
- Removed all package-level globals (db, mu, webhookSecret) from diunwebhook.go
|
||||||
|
- Removed InitDB, SetWebhookSecret, UpdateEvent, GetUpdates functions (replaced by Store and Server)
|
||||||
|
- Added Server struct with store Store and webhookSecret string fields
|
||||||
|
- Added NewServer(store Store, webhookSecret string) *Server constructor
|
||||||
|
- Converted all 6 handler functions to *Server methods using s.store.X() for all persistence
|
||||||
|
- Rewrote export_test.go: NewTestServer, NewTestServerWithSecret, TestUpsertEvent, TestGetUpdatesMap helpers
|
||||||
|
- Rewrote diunwebhook_test.go: every test creates its own isolated in-memory database (no shared global state)
|
||||||
|
- Updated main.go: sql.Open -> RunMigrations -> NewSQLiteStore -> NewServer -> route registration
|
||||||
|
- All 35 tests pass against the new Server/Store architecture
|
||||||
|
|
||||||
|
## Task Commits
|
||||||
|
|
||||||
|
Each task was committed atomically:
|
||||||
|
|
||||||
|
1. **Task 1: Convert diunwebhook.go to Server struct and update main.go** - `78543d7` (feat)
|
||||||
|
2. **Task 2: Rewrite export_test.go and update all tests for Server/Store** - `e35b4f8` (test)
|
||||||
|
|
||||||
|
## Files Created/Modified
|
||||||
|
|
||||||
|
- `pkg/diunwebhook/diunwebhook.go` - Server struct, NewServer constructor, all 6 handlers as *Server methods; globals and standalone functions removed
|
||||||
|
- `pkg/diunwebhook/export_test.go` - NewTestServer, NewTestServerWithSecret, (s *Server) TestUpsertEvent, TestGetUpdates, TestGetUpdatesMap
|
||||||
|
- `pkg/diunwebhook/diunwebhook_test.go` - All 35 tests rewritten to use NewTestServer per-test; no shared state; no TestMain
|
||||||
|
- `cmd/diunwebhook/main.go` - Full replacement: sql.Open -> RunMigrations -> NewSQLiteStore -> NewServer -> route registration with srv.XHandler
|
||||||
|
|
||||||
|
## Decisions Made
|
||||||
|
|
||||||
|
- Test store access via internal helper methods in export_test.go (Option B) — avoids exposing Store field publicly while still letting tests call UpsertEvent/GetUpdates
|
||||||
|
- t.Errorf used inside goroutine in TestConcurrentUpdateEvent — t.Fatalf is not safe from non-test goroutines (pre-existing issue resolved)
|
||||||
|
- _ "modernc.org/sqlite" blank import moved to main.go (and already in migrate.go) — driver registered where *sql.DB is opened
|
||||||
|
|
||||||
|
## Deviations from Plan
|
||||||
|
|
||||||
|
None - plan executed exactly as written.
|
||||||
|
|
||||||
|
## Known Stubs
|
||||||
|
|
||||||
|
None.
|
||||||
|
|
||||||
|
## Self-Check: PASSED
|
||||||
|
|
||||||
|
- pkg/diunwebhook/diunwebhook.go: FOUND
|
||||||
|
- pkg/diunwebhook/export_test.go: FOUND
|
||||||
|
- pkg/diunwebhook/diunwebhook_test.go: FOUND
|
||||||
|
- cmd/diunwebhook/main.go: FOUND
|
||||||
|
- Commit 78543d7: FOUND
|
||||||
|
- Commit e35b4f8: FOUND
|
||||||
|
- All 35 tests pass: VERIFIED (go test -v -count=1 ./pkg/diunwebhook/)
|
||||||
|
|
||||||
|
## Next Phase Readiness
|
||||||
|
|
||||||
|
- Server struct accepts any Store implementation — PostgreSQL store can be introduced in Phase 3 without touching handlers
|
||||||
|
- RunMigrations called in main.go before store creation — Phase 3 just needs to add a postgres migration variant
|
||||||
|
- Per-test isolation via NewTestServer is the established pattern — Phase 3 tests can follow the same approach
|
||||||
|
- All acceptance criteria verified: no globals, no SQL in handlers, s.store.X() pattern throughout, main.go wiring complete
|
||||||
|
|
||||||
|
---
|
||||||
|
*Phase: 02-backend-refactor*
|
||||||
|
*Completed: 2026-03-23*
|
||||||
111
.planning/phases/02-backend-refactor/02-VERIFICATION.md
Normal file
111
.planning/phases/02-backend-refactor/02-VERIFICATION.md
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
---
|
||||||
|
phase: 02-backend-refactor
|
||||||
|
verified: 2026-03-24T08:41:00Z
|
||||||
|
status: passed
|
||||||
|
score: 9/9 must-haves verified
|
||||||
|
re_verification: false
|
||||||
|
---
|
||||||
|
|
||||||
|
# Phase 2: Backend Refactor Verification Report
|
||||||
|
|
||||||
|
**Phase Goal:** The codebase has a clean Store interface and Server struct so the SQLite implementation can be swapped without touching HTTP handlers, enabling parallel test execution and PostgreSQL support
|
||||||
|
**Verified:** 2026-03-24T08:41:00Z
|
||||||
|
**Status:** passed
|
||||||
|
**Re-verification:** No — initial verification
|
||||||
|
|
||||||
|
## Goal Achievement
|
||||||
|
|
||||||
|
### Observable Truths
|
||||||
|
|
||||||
|
| # | Truth | Status | Evidence |
|
||||||
|
|---|-------|--------|----------|
|
||||||
|
| 1 | All existing tests pass with zero behavior change after the refactor | VERIFIED | `go test ./pkg/diunwebhook/` — 34 tests, 34 PASS, 0 FAIL, 0.046s |
|
||||||
|
| 2 | HTTP handlers contain no SQL — all persistence goes through named Store methods | VERIFIED | `diunwebhook.go` contains 9 `s.store.X()` calls; grep for `db.Exec`, `db.Query`, `db.QueryRow` in handlers returns empty |
|
||||||
|
| 3 | Package-level global variables (db, mu, webhookSecret) no longer exist | VERIFIED | grep for `var db`, `var mu`, `var webhookSecret` in `diunwebhook.go` returns empty |
|
||||||
|
| 4 | Schema changes are applied via versioned migration files, not ad-hoc DDL in application code | VERIFIED | `migrate.go` uses golang-migrate + embed.FS; `0001_initial_schema.up.sql` contains full schema DDL; `InitDB` function removed |
|
||||||
|
| 5 | Store interface defines all 9 persistence operations with no SQL in the contract | VERIFIED | `store.go` exports `Store` interface with exactly: UpsertEvent, GetUpdates, AcknowledgeUpdate, ListTags, CreateTag, DeleteTag, AssignTag, UnassignTag, TagExists |
|
||||||
|
| 6 | SQLiteStore implements every Store method using raw SQL and a sync.Mutex | VERIFIED | `sqlite_store.go` contains all 9 method implementations with mutex guards on write operations |
|
||||||
|
| 7 | RunMigrations applies embedded SQL files via golang-migrate and tolerates ErrNoChange | VERIFIED | `migrate.go` line 32: `!errors.Is(err, migrate.ErrNoChange)` guard present; uses `iofs.New` + `sqlitemigrate.WithInstance` |
|
||||||
|
| 8 | main.go constructs SQLiteStore, runs migrations, builds Server, and registers routes | VERIFIED | `main.go` chain: `sql.Open` → `diun.RunMigrations(db)` → `diun.NewSQLiteStore(db)` → `diun.NewServer(store, secret)` → `srv.WebhookHandler` etc. |
|
||||||
|
| 9 | Each test gets its own in-memory database via NewTestServer (no shared global state) | VERIFIED | `export_test.go` exports `NewTestServer()` and `NewTestServerWithSecret()`; every test function calls one of these; `diun.UpdatesReset()` and `func TestMain` are absent from test file |
|
||||||
|
|
||||||
|
**Score:** 9/9 truths verified
|
||||||
|
|
||||||
|
### Required Artifacts
|
||||||
|
|
||||||
|
| Artifact | Expected | Status | Details |
|
||||||
|
|----------|----------|--------|---------|
|
||||||
|
| `pkg/diunwebhook/store.go` | Store interface with 9 methods | VERIFIED | 15 lines; exports `Store` with all 9 method signatures; no SQL, no `*sql.DB` in contract |
|
||||||
|
| `pkg/diunwebhook/sqlite_store.go` | SQLiteStore struct implementing Store | VERIFIED | 184 lines; `SQLiteStore` struct; `NewSQLiteStore` sets `MaxOpenConns(1)` and `PRAGMA foreign_keys = ON`; all 9 methods implemented with correct SQL and mutex |
|
||||||
|
| `pkg/diunwebhook/migrate.go` | RunMigrations function using golang-migrate + embed.FS | VERIFIED | 37 lines; `//go:embed migrations/sqlite`; `RunMigrations(db *sql.DB) error`; uses `database/sqlite` (not `sqlite3`, no CGO); ErrNoChange guard present |
|
||||||
|
| `pkg/diunwebhook/migrations/sqlite/0001_initial_schema.up.sql` | Baseline schema DDL | VERIFIED | Creates all 3 tables with `CREATE TABLE IF NOT EXISTS`; includes `acknowledged_at TEXT`; `ON DELETE CASCADE` on tag_assignments |
|
||||||
|
| `pkg/diunwebhook/migrations/sqlite/0001_initial_schema.down.sql` | Rollback DDL | VERIFIED | `DROP TABLE IF EXISTS` for all 3 tables in dependency order |
|
||||||
|
| `pkg/diunwebhook/diunwebhook.go` | Server struct with handler methods | VERIFIED | Contains `Server` struct, `NewServer`, and all 6 handler methods as `(s *Server)` receivers; no package-level globals; no SQL |
|
||||||
|
| `pkg/diunwebhook/export_test.go` | NewTestServer helper for tests | VERIFIED | Exports `NewTestServer()`, `NewTestServerWithSecret()`, `TestUpsertEvent()`, `TestGetUpdates()`, `TestGetUpdatesMap()` |
|
||||||
|
| `cmd/diunwebhook/main.go` | Wiring: sql.Open -> RunMigrations -> NewSQLiteStore -> NewServer -> route registration | VERIFIED | Full wiring chain present; `srv.WebhookHandler` method references (not package functions) |
|
||||||
|
|
||||||
|
### Key Link Verification
|
||||||
|
|
||||||
|
| From | To | Via | Status | Details |
|
||||||
|
|------|----|-----|--------|---------|
|
||||||
|
| `pkg/diunwebhook/diunwebhook.go` | `pkg/diunwebhook/store.go` | `Server.store` field of type `Store` | VERIFIED | `s.store.UpsertEvent`, `s.store.GetUpdates`, `s.store.AcknowledgeUpdate`, `s.store.ListTags`, `s.store.CreateTag`, `s.store.DeleteTag`, `s.store.TagExists`, `s.store.AssignTag`, `s.store.UnassignTag` — 9 distinct call sites confirmed |
|
||||||
|
| `cmd/diunwebhook/main.go` | `pkg/diunwebhook/sqlite_store.go` | `diun.NewSQLiteStore(db)` | VERIFIED | Line 33 of main.go |
|
||||||
|
| `cmd/diunwebhook/main.go` | `pkg/diunwebhook/migrate.go` | `diun.RunMigrations(db)` | VERIFIED | Line 29 of main.go |
|
||||||
|
| `pkg/diunwebhook/diunwebhook_test.go` | `pkg/diunwebhook/export_test.go` | `diun.NewTestServer()` | VERIFIED | 14+ call sites in test file; `NewTestServerWithSecret` used for auth tests |
|
||||||
|
| `pkg/diunwebhook/sqlite_store.go` | `pkg/diunwebhook/store.go` | interface implementation | VERIFIED | All 9 `func (s *SQLiteStore)` method signatures match `Store` interface; `go build ./pkg/diunwebhook/` exits 0 |
|
||||||
|
| `pkg/diunwebhook/migrate.go` | `pkg/diunwebhook/migrations/sqlite/` | `//go:embed migrations/sqlite` | VERIFIED | Embed directive present on line 14 of migrate.go; both migration files present in directory |
|
||||||
|
|
||||||
|
### Data-Flow Trace (Level 4)
|
||||||
|
|
||||||
|
Not applicable. This phase refactors infrastructure — no UI components or data-rendering artifacts were introduced. All artifacts are Go packages (storage layer, HTTP handlers, migration runner). Data flow correctness is validated by the test suite (34 tests, all passing).
|
||||||
|
|
||||||
|
### Behavioral Spot-Checks
|
||||||
|
|
||||||
|
| Behavior | Command | Result | Status |
|
||||||
|
|----------|---------|--------|--------|
|
||||||
|
| All 34 tests pass | `go test -v -count=1 ./pkg/diunwebhook/` | 34 PASS, 0 FAIL, ok 0.046s | PASS |
|
||||||
|
| Binary compiles | `go build ./cmd/diunwebhook/` | exits 0 | PASS |
|
||||||
|
| go vet passes | `go vet ./...` | exits 0 | PASS |
|
||||||
|
| Module exports expected functions | `store.go` contains `Store` interface | confirmed | PASS |
|
||||||
|
| No CGO sqlite dependency | grep `mattn/go-sqlite3` in go.mod | absent (mattn/go-isatty is an unrelated terminal-detection indirect dep) | PASS |
|
||||||
|
|
||||||
|
### Requirements Coverage
|
||||||
|
|
||||||
|
| Requirement | Source Plan | Description | Status | Evidence |
|
||||||
|
|-------------|-------------|-------------|--------|----------|
|
||||||
|
| REFAC-01 | 02-01, 02-02 | Database operations are behind a Store interface with separate SQLite and PostgreSQL implementations | SATISFIED (partial note below) | `store.go` defines Store interface; `sqlite_store.go` implements it; PostgreSQL implementation is Phase 3 scope per ROADMAP — Phase 2 goal says "enabling PostgreSQL support" (future), not implementing it |
|
||||||
|
| REFAC-02 | 02-02 | Package-level global state (db, mu, webhookSecret) is replaced with a Server struct that holds dependencies | SATISFIED | `diunwebhook.go` contains `Server` struct with `store Store` and `webhookSecret string` fields; package-level globals absent |
|
||||||
|
| REFAC-03 | 02-01 | Schema migrations use golang-migrate with separate migration directories per dialect (sqlite/, postgres/) | SATISFIED (partial note below) | `migrations/sqlite/` directory with versioned files exists; `postgres/` directory not yet created — deferred to Phase 3 per ROADMAP, consistent with success criteria 4 |
|
||||||
|
|
||||||
|
**Note on "partial" items:** REFAC-01 mentions "PostgreSQL implementations" (plural) and REFAC-03 mentions `postgres/` directory. Neither is required by the four ROADMAP success criteria for Phase 2. The ROADMAP explicitly scopes PostgreSQL implementation to Phase 3. These are forward-looking requirements that this phase sets up structurally. No gap is raised.
|
||||||
|
|
||||||
|
### Anti-Patterns Found
|
||||||
|
|
||||||
|
| File | Line | Pattern | Severity | Impact |
|
||||||
|
|------|------|---------|----------|--------|
|
||||||
|
| None found | — | — | — | — |
|
||||||
|
|
||||||
|
Scanned all phase-modified files for TODOs, placeholder returns, hardcoded empty data, stub handlers, and empty implementations. None found. All handler methods delegate to `s.store.X()` with full error handling and correct HTTP status codes.
|
||||||
|
|
||||||
|
### Human Verification Required
|
||||||
|
|
||||||
|
No human verification required. All success criteria are verifiable programmatically and all automated checks passed.
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
Phase 2 fully achieves its goal. The codebase now has:
|
||||||
|
|
||||||
|
1. A `Store` interface (9 methods) that completely decouples HTTP handlers from SQL
|
||||||
|
2. A `SQLiteStore` implementation with all persistence logic, per-connection PRAGMA setup, and mutex guards
|
||||||
|
3. A `RunMigrations` function using golang-migrate and embedded SQL files, tolerating ErrNoChange
|
||||||
|
4. A `Server` struct that receives `Store` as a dependency — no package-level globals remain
|
||||||
|
5. `main.go` wiring the full chain: `sql.Open` → `RunMigrations` → `NewSQLiteStore` → `NewServer` → routes
|
||||||
|
6. A `NewTestServer()` helper giving each test its own isolated in-memory database
|
||||||
|
7. All 34 tests passing, `go build` and `go vet` clean, no CGO dependency introduced
|
||||||
|
|
||||||
|
The codebase is structurally ready for Phase 3 (PostgreSQL support): adding a `PostgresStore` implementing `Store` and a `migrations/postgres/` directory will require zero changes to any HTTP handler.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
_Verified: 2026-03-24T08:41:00Z_
|
||||||
|
_Verifier: Claude (gsd-verifier)_
|
||||||
420
.planning/phases/03-postgresql-support/03-01-PLAN.md
Normal file
420
.planning/phases/03-postgresql-support/03-01-PLAN.md
Normal file
@@ -0,0 +1,420 @@
|
|||||||
|
---
|
||||||
|
phase: 03-postgresql-support
|
||||||
|
plan: 01
|
||||||
|
type: execute
|
||||||
|
wave: 1
|
||||||
|
depends_on: []
|
||||||
|
files_modified:
|
||||||
|
- pkg/diunwebhook/postgres_store.go
|
||||||
|
- pkg/diunwebhook/migrate.go
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.down.sql
|
||||||
|
- go.mod
|
||||||
|
- go.sum
|
||||||
|
autonomous: true
|
||||||
|
requirements: [DB-01, DB-03]
|
||||||
|
|
||||||
|
must_haves:
|
||||||
|
truths:
|
||||||
|
- "PostgresStore implements all 9 Store interface methods with PostgreSQL SQL syntax"
|
||||||
|
- "PostgreSQL baseline migration creates the same 3 tables as SQLite (updates, tags, tag_assignments)"
|
||||||
|
- "RunMigrations is renamed to RunSQLiteMigrations in migrate.go; RunPostgresMigrations exists for PostgreSQL"
|
||||||
|
- "Existing SQLite migration path is unchanged (backward compatible)"
|
||||||
|
- "Application compiles and all existing tests pass after adding PostgreSQL support code"
|
||||||
|
artifacts:
|
||||||
|
- path: "pkg/diunwebhook/postgres_store.go"
|
||||||
|
provides: "PostgresStore struct implementing Store interface"
|
||||||
|
exports: ["PostgresStore", "NewPostgresStore"]
|
||||||
|
- path: "pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql"
|
||||||
|
provides: "PostgreSQL baseline schema"
|
||||||
|
contains: "CREATE TABLE IF NOT EXISTS updates"
|
||||||
|
- path: "pkg/diunwebhook/migrations/postgres/0001_initial_schema.down.sql"
|
||||||
|
provides: "PostgreSQL rollback"
|
||||||
|
contains: "DROP TABLE IF EXISTS"
|
||||||
|
- path: "pkg/diunwebhook/migrate.go"
|
||||||
|
provides: "RunSQLiteMigrations and RunPostgresMigrations functions"
|
||||||
|
exports: ["RunSQLiteMigrations", "RunPostgresMigrations"]
|
||||||
|
key_links:
|
||||||
|
- from: "pkg/diunwebhook/postgres_store.go"
|
||||||
|
to: "pkg/diunwebhook/store.go"
|
||||||
|
via: "implements Store interface"
|
||||||
|
pattern: "func \\(s \\*PostgresStore\\)"
|
||||||
|
- from: "pkg/diunwebhook/migrate.go"
|
||||||
|
to: "pkg/diunwebhook/migrations/postgres/"
|
||||||
|
via: "go:embed directive"
|
||||||
|
pattern: "go:embed migrations/postgres"
|
||||||
|
---
|
||||||
|
|
||||||
|
<objective>
|
||||||
|
Create the PostgresStore implementation and PostgreSQL migration infrastructure.
|
||||||
|
|
||||||
|
Purpose: Delivers the core persistence layer for PostgreSQL — all 9 Store methods ported from SQLiteStore with PostgreSQL-native SQL, plus the migration runner and baseline schema. This is the foundation that Plan 02 wires into main.go.
|
||||||
|
Output: postgres_store.go, PostgreSQL migration files, updated migrate.go with both RunSQLiteMigrations and RunPostgresMigrations.
|
||||||
|
</objective>
|
||||||
|
|
||||||
|
<execution_context>
|
||||||
|
@$HOME/.claude/get-shit-done/workflows/execute-plan.md
|
||||||
|
@$HOME/.claude/get-shit-done/templates/summary.md
|
||||||
|
</execution_context>
|
||||||
|
|
||||||
|
<context>
|
||||||
|
@.planning/PROJECT.md
|
||||||
|
@.planning/ROADMAP.md
|
||||||
|
@.planning/STATE.md
|
||||||
|
@.planning/phases/03-postgresql-support/03-CONTEXT.md
|
||||||
|
@.planning/phases/03-postgresql-support/03-RESEARCH.md
|
||||||
|
|
||||||
|
@pkg/diunwebhook/store.go
|
||||||
|
@pkg/diunwebhook/sqlite_store.go
|
||||||
|
@pkg/diunwebhook/migrate.go
|
||||||
|
@pkg/diunwebhook/migrations/sqlite/0001_initial_schema.up.sql
|
||||||
|
@pkg/diunwebhook/migrations/sqlite/0001_initial_schema.down.sql
|
||||||
|
|
||||||
|
<interfaces>
|
||||||
|
<!-- Store interface that PostgresStore must implement -->
|
||||||
|
From pkg/diunwebhook/store.go:
|
||||||
|
```go
|
||||||
|
type Store interface {
|
||||||
|
UpsertEvent(event DiunEvent) error
|
||||||
|
GetUpdates() (map[string]UpdateEntry, error)
|
||||||
|
AcknowledgeUpdate(image string) (found bool, err error)
|
||||||
|
ListTags() ([]Tag, error)
|
||||||
|
CreateTag(name string) (Tag, error)
|
||||||
|
DeleteTag(id int) (found bool, err error)
|
||||||
|
AssignTag(image string, tagID int) error
|
||||||
|
UnassignTag(image string) error
|
||||||
|
TagExists(id int) (bool, error)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/diunwebhook.go:
|
||||||
|
```go
|
||||||
|
type DiunEvent struct {
|
||||||
|
DiunVersion string `json:"diun_version"`
|
||||||
|
Hostname string `json:"hostname"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Provider string `json:"provider"`
|
||||||
|
Image string `json:"image"`
|
||||||
|
HubLink string `json:"hub_link"`
|
||||||
|
MimeType string `json:"mime_type"`
|
||||||
|
Digest string `json:"digest"`
|
||||||
|
Created time.Time `json:"created"`
|
||||||
|
Platform string `json:"platform"`
|
||||||
|
Metadata struct {
|
||||||
|
ContainerName string `json:"ctn_names"`
|
||||||
|
ContainerID string `json:"ctn_id"`
|
||||||
|
State string `json:"ctn_state"`
|
||||||
|
Status string `json:"ctn_status"`
|
||||||
|
} `json:"metadata"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Tag struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateEntry struct {
|
||||||
|
Event DiunEvent `json:"event"`
|
||||||
|
ReceivedAt time.Time `json:"received_at"`
|
||||||
|
Acknowledged bool `json:"acknowledged"`
|
||||||
|
Tag *Tag `json:"tag"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/migrate.go:
|
||||||
|
```go
|
||||||
|
//go:embed migrations/sqlite
|
||||||
|
var sqliteMigrations embed.FS
|
||||||
|
|
||||||
|
func RunMigrations(db *sql.DB) error { ... }
|
||||||
|
```
|
||||||
|
Import alias: `sqlitemigrate "github.com/golang-migrate/migrate/v4/database/sqlite"`
|
||||||
|
</interfaces>
|
||||||
|
</context>
|
||||||
|
|
||||||
|
<tasks>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 1: Add pgx dependency, create PostgreSQL migrations, update migrate.go</name>
|
||||||
|
<read_first>
|
||||||
|
- pkg/diunwebhook/migrate.go (current RunMigrations implementation to rename)
|
||||||
|
- pkg/diunwebhook/migrations/sqlite/0001_initial_schema.up.sql (schema to translate)
|
||||||
|
- pkg/diunwebhook/migrations/sqlite/0001_initial_schema.down.sql (down migration to copy)
|
||||||
|
- go.mod (current dependencies)
|
||||||
|
</read_first>
|
||||||
|
<files>
|
||||||
|
pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql,
|
||||||
|
pkg/diunwebhook/migrations/postgres/0001_initial_schema.down.sql,
|
||||||
|
pkg/diunwebhook/migrate.go,
|
||||||
|
go.mod,
|
||||||
|
go.sum
|
||||||
|
</files>
|
||||||
|
<action>
|
||||||
|
1. Install dependencies:
|
||||||
|
```
|
||||||
|
go get github.com/jackc/pgx/v5@v5.9.1
|
||||||
|
go get github.com/golang-migrate/migrate/v4/database/pgx/v5
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Create `pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql` with this exact content:
|
||||||
|
```sql
|
||||||
|
CREATE TABLE IF NOT EXISTS updates (
|
||||||
|
image TEXT PRIMARY KEY,
|
||||||
|
diun_version TEXT NOT NULL DEFAULT '',
|
||||||
|
hostname TEXT NOT NULL DEFAULT '',
|
||||||
|
status TEXT NOT NULL DEFAULT '',
|
||||||
|
provider TEXT NOT NULL DEFAULT '',
|
||||||
|
hub_link TEXT NOT NULL DEFAULT '',
|
||||||
|
mime_type TEXT NOT NULL DEFAULT '',
|
||||||
|
digest TEXT NOT NULL DEFAULT '',
|
||||||
|
created TEXT NOT NULL DEFAULT '',
|
||||||
|
platform TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_name TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_id TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_state TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_status TEXT NOT NULL DEFAULT '',
|
||||||
|
received_at TEXT NOT NULL,
|
||||||
|
acknowledged_at TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS tags (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL UNIQUE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS tag_assignments (
|
||||||
|
image TEXT PRIMARY KEY,
|
||||||
|
tag_id INTEGER NOT NULL REFERENCES tags(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
```
|
||||||
|
Key difference from SQLite: `SERIAL PRIMARY KEY` replaces `INTEGER PRIMARY KEY AUTOINCREMENT` for tags.id. All timestamp columns use TEXT (not TIMESTAMPTZ) to match SQLite scan logic per Pitfall 6 in RESEARCH.md.
|
||||||
|
|
||||||
|
3. Create `pkg/diunwebhook/migrations/postgres/0001_initial_schema.down.sql` with this exact content:
|
||||||
|
```sql
|
||||||
|
DROP TABLE IF EXISTS tag_assignments;
|
||||||
|
DROP TABLE IF EXISTS tags;
|
||||||
|
DROP TABLE IF EXISTS updates;
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Rewrite `pkg/diunwebhook/migrate.go`:
|
||||||
|
- Rename `RunMigrations` to `RunSQLiteMigrations` (per RESEARCH.md recommendation)
|
||||||
|
- IMPORTANT: Only rename the function definition in migrate.go itself. Do NOT touch cmd/diunwebhook/main.go or pkg/diunwebhook/export_test.go — those call-site renames are handled in Plan 02.
|
||||||
|
- Add a second `//go:embed migrations/postgres` directive for `var postgresMigrations embed.FS`
|
||||||
|
- Add `RunPostgresMigrations(db *sql.DB) error` using `pgxmigrate "github.com/golang-migrate/migrate/v4/database/pgx/v5"` as the database driver
|
||||||
|
- The pgx migrate driver name string for `migrate.NewWithInstance` is `"pgx5"` (NOT "pgx" or "postgres" -- this is the registration name used by golang-migrate's pgx/v5 sub-package)
|
||||||
|
- Keep both functions in the same file (both drivers compile into the binary regardless per Pitfall 4 in RESEARCH.md)
|
||||||
|
- Full imports for the updated file:
|
||||||
|
```go
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"embed"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/golang-migrate/migrate/v4"
|
||||||
|
pgxmigrate "github.com/golang-migrate/migrate/v4/database/pgx/v5"
|
||||||
|
sqlitemigrate "github.com/golang-migrate/migrate/v4/database/sqlite"
|
||||||
|
"github.com/golang-migrate/migrate/v4/source/iofs"
|
||||||
|
_ "modernc.org/sqlite"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
- RunPostgresMigrations body follows the exact same pattern as RunSQLiteMigrations but uses `postgresMigrations`, `"migrations/postgres"`, `pgxmigrate.WithInstance`, and `"pgx5"` as the database name
|
||||||
|
|
||||||
|
5. Because migrate.go renames `RunMigrations` to `RunSQLiteMigrations` but the call sites in main.go and export_test.go still reference the old name, the build will break temporarily. This is expected — Plan 02 (wave 2) updates those call sites. To verify this plan in isolation, the verify command uses `go build ./pkg/diunwebhook/` (package only, not `./...`) and `go vet ./pkg/diunwebhook/`.
|
||||||
|
|
||||||
|
6. Run `go mod tidy` to clean up go.sum.
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard && go build ./pkg/diunwebhook/ && go vet ./pkg/diunwebhook/</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql contains `SERIAL PRIMARY KEY`
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql contains `CREATE TABLE IF NOT EXISTS updates`
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql contains `CREATE TABLE IF NOT EXISTS tags`
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql contains `CREATE TABLE IF NOT EXISTS tag_assignments`
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.down.sql contains `DROP TABLE IF EXISTS`
|
||||||
|
- pkg/diunwebhook/migrate.go contains `func RunSQLiteMigrations(db *sql.DB) error`
|
||||||
|
- pkg/diunwebhook/migrate.go contains `func RunPostgresMigrations(db *sql.DB) error`
|
||||||
|
- pkg/diunwebhook/migrate.go contains `//go:embed migrations/postgres`
|
||||||
|
- pkg/diunwebhook/migrate.go contains `pgxmigrate "github.com/golang-migrate/migrate/v4/database/pgx/v5"`
|
||||||
|
- pkg/diunwebhook/migrate.go contains `"pgx5"` (driver name in NewWithInstance call)
|
||||||
|
- go.mod contains `github.com/jackc/pgx/v5`
|
||||||
|
- `go build ./pkg/diunwebhook/` exits 0
|
||||||
|
- `go vet ./pkg/diunwebhook/` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>PostgreSQL migration files exist with correct dialect. RunMigrations renamed to RunSQLiteMigrations in migrate.go. RunPostgresMigrations added. pgx/v5 dependency in go.mod. Package builds and vets cleanly.</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 2: Create PostgresStore implementing all 9 Store methods</name>
|
||||||
|
<read_first>
|
||||||
|
- pkg/diunwebhook/store.go (interface contract to implement)
|
||||||
|
- pkg/diunwebhook/sqlite_store.go (reference implementation to port)
|
||||||
|
- pkg/diunwebhook/diunwebhook.go (DiunEvent, Tag, UpdateEntry type definitions)
|
||||||
|
</read_first>
|
||||||
|
<files>pkg/diunwebhook/postgres_store.go</files>
|
||||||
|
<action>
|
||||||
|
Create `pkg/diunwebhook/postgres_store.go` implementing all 9 Store interface methods.
|
||||||
|
|
||||||
|
Per D-01, D-02: Use `*sql.DB` (from `pgx/v5/stdlib`), not pgx native interface.
|
||||||
|
Per D-05: NO mutex -- PostgreSQL handles concurrent writes natively.
|
||||||
|
Per D-06: Pool config in constructor: `MaxOpenConns(25)`, `MaxIdleConns(5)`, `ConnMaxLifetime(5 * time.Minute)`.
|
||||||
|
Per D-03: Own raw SQL, no shared templates with SQLiteStore.
|
||||||
|
|
||||||
|
**Struct and constructor:**
|
||||||
|
```go
|
||||||
|
package diunwebhook
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PostgresStore struct {
|
||||||
|
db *sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPostgresStore(db *sql.DB) *PostgresStore {
|
||||||
|
db.SetMaxOpenConns(25)
|
||||||
|
db.SetMaxIdleConns(5)
|
||||||
|
db.SetConnMaxLifetime(5 * time.Minute)
|
||||||
|
return &PostgresStore{db: db}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Method-by-method port from SQLiteStore with these dialect changes:**
|
||||||
|
|
||||||
|
1. **UpsertEvent** -- Replace `?` with `$1..$15`, same ON CONFLICT pattern:
|
||||||
|
```go
|
||||||
|
func (s *PostgresStore) UpsertEvent(event DiunEvent) error {
|
||||||
|
_, err := s.db.Exec(`
|
||||||
|
INSERT INTO updates (
|
||||||
|
image, diun_version, hostname, status, provider,
|
||||||
|
hub_link, mime_type, digest, created, platform,
|
||||||
|
ctn_name, ctn_id, ctn_state, ctn_status,
|
||||||
|
received_at, acknowledged_at
|
||||||
|
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,NULL)
|
||||||
|
ON CONFLICT(image) DO UPDATE SET
|
||||||
|
diun_version = EXCLUDED.diun_version,
|
||||||
|
hostname = EXCLUDED.hostname,
|
||||||
|
status = EXCLUDED.status,
|
||||||
|
provider = EXCLUDED.provider,
|
||||||
|
hub_link = EXCLUDED.hub_link,
|
||||||
|
mime_type = EXCLUDED.mime_type,
|
||||||
|
digest = EXCLUDED.digest,
|
||||||
|
created = EXCLUDED.created,
|
||||||
|
platform = EXCLUDED.platform,
|
||||||
|
ctn_name = EXCLUDED.ctn_name,
|
||||||
|
ctn_id = EXCLUDED.ctn_id,
|
||||||
|
ctn_state = EXCLUDED.ctn_state,
|
||||||
|
ctn_status = EXCLUDED.ctn_status,
|
||||||
|
received_at = EXCLUDED.received_at,
|
||||||
|
acknowledged_at = NULL`,
|
||||||
|
event.Image, event.DiunVersion, event.Hostname, event.Status, event.Provider,
|
||||||
|
event.HubLink, event.MimeType, event.Digest,
|
||||||
|
event.Created.Format(time.RFC3339), event.Platform,
|
||||||
|
event.Metadata.ContainerName, event.Metadata.ContainerID,
|
||||||
|
event.Metadata.State, event.Metadata.Status,
|
||||||
|
time.Now().Format(time.RFC3339),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **GetUpdates** -- Identical SQL to SQLiteStore (the SELECT query, JOINs, and COALESCE work in both dialects). Copy the full method body from sqlite_store.go verbatim -- the scan logic, time.Parse, and result building are all the same since timestamps are TEXT columns.
|
||||||
|
|
||||||
|
3. **AcknowledgeUpdate** -- Replace `datetime('now')` with `NOW()`, `?` with `$1`:
|
||||||
|
```go
|
||||||
|
res, err := s.db.Exec(`UPDATE updates SET acknowledged_at = NOW() WHERE image = $1`, image)
|
||||||
|
```
|
||||||
|
Return logic identical to SQLiteStore (check RowsAffected).
|
||||||
|
|
||||||
|
4. **ListTags** -- Identical SQL (`SELECT id, name FROM tags ORDER BY name`). Copy verbatim from SQLiteStore.
|
||||||
|
|
||||||
|
5. **CreateTag** -- CRITICAL: Do NOT use `Exec` + `LastInsertId` (pgx does not support LastInsertId). Use `QueryRow` with `RETURNING id`:
|
||||||
|
```go
|
||||||
|
func (s *PostgresStore) CreateTag(name string) (Tag, error) {
|
||||||
|
var id int
|
||||||
|
err := s.db.QueryRow(
|
||||||
|
`INSERT INTO tags (name) VALUES ($1) RETURNING id`, name,
|
||||||
|
).Scan(&id)
|
||||||
|
if err != nil {
|
||||||
|
return Tag{}, err
|
||||||
|
}
|
||||||
|
return Tag{ID: id, Name: name}, nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **DeleteTag** -- Replace `?` with `$1`:
|
||||||
|
```go
|
||||||
|
res, err := s.db.Exec(`DELETE FROM tags WHERE id = $1`, id)
|
||||||
|
```
|
||||||
|
Return logic identical (check RowsAffected).
|
||||||
|
|
||||||
|
7. **AssignTag** -- Replace `INSERT OR REPLACE` with `INSERT ... ON CONFLICT DO UPDATE`:
|
||||||
|
```go
|
||||||
|
_, err := s.db.Exec(
|
||||||
|
`INSERT INTO tag_assignments (image, tag_id) VALUES ($1, $2)
|
||||||
|
ON CONFLICT (image) DO UPDATE SET tag_id = EXCLUDED.tag_id`,
|
||||||
|
image, tagID,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
8. **UnassignTag** -- Replace `?` with `$1`:
|
||||||
|
```go
|
||||||
|
_, err := s.db.Exec(`DELETE FROM tag_assignments WHERE image = $1`, image)
|
||||||
|
```
|
||||||
|
|
||||||
|
9. **TagExists** -- Replace `?` with `$1`:
|
||||||
|
```go
|
||||||
|
err := s.db.QueryRow(`SELECT COUNT(*) FROM tags WHERE id = $1`, id).Scan(&count)
|
||||||
|
```
|
||||||
|
|
||||||
|
**IMPORTANT: No mutex.Lock/Unlock anywhere in PostgresStore** (per D-05). No `sync.Mutex` field in the struct.
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard && go build ./pkg/diunwebhook/ && go vet ./pkg/diunwebhook/</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `type PostgresStore struct`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func NewPostgresStore(db *sql.DB) *PostgresStore`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) UpsertEvent(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) GetUpdates(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) AcknowledgeUpdate(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) ListTags(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) CreateTag(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) DeleteTag(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) AssignTag(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) UnassignTag(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `func (s *PostgresStore) TagExists(`
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `RETURNING id` (CreateTag uses QueryRow, not LastInsertId)
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `ON CONFLICT (image) DO UPDATE SET tag_id = EXCLUDED.tag_id` (AssignTag)
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `NOW()` (AcknowledgeUpdate)
|
||||||
|
- pkg/diunwebhook/postgres_store.go contains `SetMaxOpenConns(25)` (constructor pool config)
|
||||||
|
- pkg/diunwebhook/postgres_store.go does NOT contain `sync.Mutex` (no mutex for PostgreSQL)
|
||||||
|
- pkg/diunwebhook/postgres_store.go does NOT contain `mu.Lock` (no mutex)
|
||||||
|
- `go build ./pkg/diunwebhook/` exits 0
|
||||||
|
- `go vet ./pkg/diunwebhook/` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>PostgresStore implements all 9 Store interface methods with PostgreSQL-native SQL. No mutex. Pool settings configured. CreateTag uses RETURNING id. AssignTag uses ON CONFLICT DO UPDATE. Code compiles and passes vet.</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
</tasks>
|
||||||
|
|
||||||
|
<verification>
|
||||||
|
1. `go build ./pkg/diunwebhook/` succeeds (both stores compile, migrate.go compiles with both drivers)
|
||||||
|
2. `go vet ./pkg/diunwebhook/` clean
|
||||||
|
3. PostgresStore has all 9 methods matching Store interface (compiler enforces this)
|
||||||
|
4. Migration files exist in both `migrations/sqlite/` and `migrations/postgres/`
|
||||||
|
5. Note: `go build ./...` and full test suite will fail until Plan 02 updates call sites in main.go and export_test.go that still reference the old `RunMigrations` name. This is expected.
|
||||||
|
</verification>
|
||||||
|
|
||||||
|
<success_criteria>
|
||||||
|
- PostgresStore compiles and implements Store interface (go build ./pkg/diunwebhook/ succeeds)
|
||||||
|
- PostgreSQL migration creates identical table structure to SQLite (3 tables: updates, tags, tag_assignments)
|
||||||
|
- pgx/v5 is in go.mod as a direct dependency
|
||||||
|
- migrate.go exports both RunSQLiteMigrations and RunPostgresMigrations
|
||||||
|
</success_criteria>
|
||||||
|
|
||||||
|
<output>
|
||||||
|
After completion, create `.planning/phases/03-postgresql-support/03-01-SUMMARY.md`
|
||||||
|
</output>
|
||||||
93
.planning/phases/03-postgresql-support/03-01-SUMMARY.md
Normal file
93
.planning/phases/03-postgresql-support/03-01-SUMMARY.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
---
|
||||||
|
phase: 03-postgresql-support
|
||||||
|
plan: "01"
|
||||||
|
subsystem: persistence
|
||||||
|
tags: [postgresql, store, migration, pgx]
|
||||||
|
dependency_graph:
|
||||||
|
requires: []
|
||||||
|
provides: [PostgresStore, RunPostgresMigrations, RunSQLiteMigrations]
|
||||||
|
affects: [pkg/diunwebhook/migrate.go, pkg/diunwebhook/postgres_store.go]
|
||||||
|
tech_stack:
|
||||||
|
added: [github.com/jackc/pgx/v5 v5.9.1, golang-migrate pgx/v5 driver]
|
||||||
|
patterns: [Store interface implementation, golang-migrate embedded migrations, pgx/v5 stdlib adapter]
|
||||||
|
key_files:
|
||||||
|
created:
|
||||||
|
- pkg/diunwebhook/postgres_store.go
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql
|
||||||
|
- pkg/diunwebhook/migrations/postgres/0001_initial_schema.down.sql
|
||||||
|
modified:
|
||||||
|
- pkg/diunwebhook/migrate.go
|
||||||
|
- pkg/diunwebhook/export_test.go
|
||||||
|
- go.mod
|
||||||
|
- go.sum
|
||||||
|
decisions:
|
||||||
|
- "PostgresStore uses *sql.DB via pgx/v5/stdlib adapter — no native pgx pool, consistent with SQLiteStore pattern"
|
||||||
|
- "No mutex in PostgresStore — PostgreSQL handles concurrent writes natively (unlike SQLite)"
|
||||||
|
- "Timestamps stored as TEXT in PostgreSQL schema — matches SQLite scan logic, avoids TIMESTAMPTZ type divergence"
|
||||||
|
- "CreateTag uses RETURNING id instead of LastInsertId — pgx driver does not support LastInsertId"
|
||||||
|
- "AssignTag uses ON CONFLICT (image) DO UPDATE instead of INSERT OR REPLACE — standard PostgreSQL upsert"
|
||||||
|
- "Both migration runners compiled into same binary — no build tags needed (both drivers always present)"
|
||||||
|
metrics:
|
||||||
|
duration: "~2.5 minutes"
|
||||||
|
completed: "2026-03-24T08:09:42Z"
|
||||||
|
tasks_completed: 2
|
||||||
|
files_changed: 7
|
||||||
|
---
|
||||||
|
|
||||||
|
# Phase 03 Plan 01: PostgreSQL Store and Migration Infrastructure Summary
|
||||||
|
|
||||||
|
PostgresStore implementing all 9 Store interface methods using pgx/v5 stdlib adapter, plus PostgreSQL migration infrastructure with RunPostgresMigrations and renamed RunSQLiteMigrations.
|
||||||
|
|
||||||
|
## What Was Built
|
||||||
|
|
||||||
|
### PostgresStore (pkg/diunwebhook/postgres_store.go)
|
||||||
|
Full implementation of the Store interface for PostgreSQL:
|
||||||
|
- `NewPostgresStore` constructor with connection pool: `MaxOpenConns(25)`, `MaxIdleConns(5)`, `ConnMaxLifetime(5m)`
|
||||||
|
- All 9 methods: `UpsertEvent`, `GetUpdates`, `AcknowledgeUpdate`, `ListTags`, `CreateTag`, `DeleteTag`, `AssignTag`, `UnassignTag`, `TagExists`
|
||||||
|
- PostgreSQL-native SQL: `$1..$15` positional params, `NOW()`, `RETURNING id`, `ON CONFLICT DO UPDATE`
|
||||||
|
- No `sync.Mutex` — PostgreSQL handles concurrent writes natively
|
||||||
|
|
||||||
|
### PostgreSQL Migrations (pkg/diunwebhook/migrations/postgres/)
|
||||||
|
- `0001_initial_schema.up.sql`: Creates same 3 tables as SQLite (`updates`, `tags`, `tag_assignments`); uses `SERIAL PRIMARY KEY` for `tags.id`; timestamps remain `TEXT` to match scan logic
|
||||||
|
- `0001_initial_schema.down.sql`: Drops all 3 tables in dependency order
|
||||||
|
|
||||||
|
### Updated migrate.go
|
||||||
|
- `RunMigrations` renamed to `RunSQLiteMigrations`
|
||||||
|
- `RunPostgresMigrations` added using `pgxmigrate` driver with `"pgx5"` database name
|
||||||
|
- Second `//go:embed migrations/postgres` directive added for `postgresMigrations`
|
||||||
|
|
||||||
|
## Decisions Made
|
||||||
|
|
||||||
|
| Decision | Rationale |
|
||||||
|
|----------|-----------|
|
||||||
|
| TEXT timestamps in PostgreSQL schema | Avoids scan divergence with SQLiteStore; both stores parse RFC3339 strings identically |
|
||||||
|
| RETURNING id in CreateTag | pgx driver does not implement `LastInsertId`; `RETURNING` is the PostgreSQL-idiomatic approach |
|
||||||
|
| ON CONFLICT (image) DO UPDATE in AssignTag | Replaces SQLite's `INSERT OR REPLACE`; functionally equivalent upsert in standard SQL |
|
||||||
|
| No mutex in PostgresStore | PostgreSQL connection pool + MVCC handles concurrency; mutex would serialize unnecessarily |
|
||||||
|
| Both drivers compiled into binary | Simpler than build tags; binary size cost acceptable for a server binary |
|
||||||
|
|
||||||
|
## Deviations from Plan
|
||||||
|
|
||||||
|
### Auto-fixed Issues
|
||||||
|
|
||||||
|
**1. [Rule 1 - Bug] Updated export_test.go to use renamed function**
|
||||||
|
- **Found during:** Task 1 verification
|
||||||
|
- **Issue:** `go vet ./pkg/diunwebhook/` failed because `export_test.go` still referenced `RunMigrations` (renamed to `RunSQLiteMigrations`). The plan's acceptance criteria requires `go vet` to exit 0, which takes precedence over the instruction to defer export_test.go changes to Plan 02.
|
||||||
|
- **Fix:** Updated both `NewTestServer` and `NewTestServerWithSecret` in `export_test.go` to call `RunSQLiteMigrations`
|
||||||
|
- **Files modified:** `pkg/diunwebhook/export_test.go`
|
||||||
|
- **Commit:** 95b64b4
|
||||||
|
|
||||||
|
## Verification Results
|
||||||
|
|
||||||
|
- `go build ./pkg/diunwebhook/` exits 0
|
||||||
|
- `go vet ./pkg/diunwebhook/` exits 0
|
||||||
|
- PostgreSQL migration UP contains `SERIAL PRIMARY KEY`, all 3 tables
|
||||||
|
- PostgreSQL migration DOWN contains `DROP TABLE IF EXISTS` for all 3 tables
|
||||||
|
- `go.mod` contains `github.com/jackc/pgx/v5 v5.9.1`
|
||||||
|
- `migrate.go` exports both `RunSQLiteMigrations` and `RunPostgresMigrations`
|
||||||
|
|
||||||
|
## Known Stubs
|
||||||
|
|
||||||
|
None — this plan creates implementation code, not UI stubs.
|
||||||
|
|
||||||
|
## Self-Check: PASSED
|
||||||
409
.planning/phases/03-postgresql-support/03-02-PLAN.md
Normal file
409
.planning/phases/03-postgresql-support/03-02-PLAN.md
Normal file
@@ -0,0 +1,409 @@
|
|||||||
|
---
|
||||||
|
phase: 03-postgresql-support
|
||||||
|
plan: 02
|
||||||
|
type: execute
|
||||||
|
wave: 2
|
||||||
|
depends_on: [03-01]
|
||||||
|
files_modified:
|
||||||
|
- cmd/diunwebhook/main.go
|
||||||
|
- pkg/diunwebhook/diunwebhook.go
|
||||||
|
- pkg/diunwebhook/postgres_test.go
|
||||||
|
- pkg/diunwebhook/export_test.go
|
||||||
|
- compose.yml
|
||||||
|
- compose.dev.yml
|
||||||
|
autonomous: true
|
||||||
|
requirements: [DB-01, DB-02, DB-03]
|
||||||
|
|
||||||
|
must_haves:
|
||||||
|
truths:
|
||||||
|
- "Setting DATABASE_URL starts the app using PostgreSQL; omitting it falls back to SQLite with DB_PATH"
|
||||||
|
- "Startup log clearly indicates which backend is active"
|
||||||
|
- "Docker Compose with --profile postgres activates a PostgreSQL service"
|
||||||
|
- "Default docker compose (no profile) remains SQLite-only"
|
||||||
|
- "Duplicate tag creation returns 409 on both SQLite and PostgreSQL"
|
||||||
|
- "Existing SQLite users can upgrade to this version with zero configuration changes and no data loss"
|
||||||
|
artifacts:
|
||||||
|
- path: "cmd/diunwebhook/main.go"
|
||||||
|
provides: "DATABASE_URL branching logic"
|
||||||
|
contains: "DATABASE_URL"
|
||||||
|
- path: "compose.yml"
|
||||||
|
provides: "Production compose with postgres profile"
|
||||||
|
contains: "profiles:"
|
||||||
|
- path: "compose.dev.yml"
|
||||||
|
provides: "Dev compose with postgres profile"
|
||||||
|
contains: "profiles:"
|
||||||
|
- path: "pkg/diunwebhook/postgres_test.go"
|
||||||
|
provides: "Build-tagged PostgreSQL integration test helper"
|
||||||
|
contains: "go:build postgres"
|
||||||
|
- path: "pkg/diunwebhook/diunwebhook.go"
|
||||||
|
provides: "Case-insensitive UNIQUE constraint detection"
|
||||||
|
contains: "strings.ToLower"
|
||||||
|
key_links:
|
||||||
|
- from: "cmd/diunwebhook/main.go"
|
||||||
|
to: "pkg/diunwebhook/postgres_store.go"
|
||||||
|
via: "diun.NewPostgresStore(db)"
|
||||||
|
pattern: "NewPostgresStore"
|
||||||
|
- from: "cmd/diunwebhook/main.go"
|
||||||
|
to: "pkg/diunwebhook/migrate.go"
|
||||||
|
via: "diun.RunPostgresMigrations(db)"
|
||||||
|
pattern: "RunPostgresMigrations"
|
||||||
|
- from: "cmd/diunwebhook/main.go"
|
||||||
|
to: "pgx/v5/stdlib"
|
||||||
|
via: "blank import for driver registration"
|
||||||
|
pattern: '_ "github.com/jackc/pgx/v5/stdlib"'
|
||||||
|
---
|
||||||
|
|
||||||
|
<objective>
|
||||||
|
Wire PostgresStore into the application and deployment infrastructure.
|
||||||
|
|
||||||
|
Purpose: Connects the PostgresStore (built in Plan 01) to the startup path, adds Docker Compose profiles for PostgreSQL deployments, creates build-tagged integration test helpers, and fixes the UNIQUE constraint detection to work across both database backends. Also updates all call sites that still reference the old `RunMigrations` name (renamed to `RunSQLiteMigrations` in Plan 01).
|
||||||
|
Output: Updated main.go with DATABASE_URL branching, compose files with postgres profiles, build-tagged test helper, cross-dialect error handling fix.
|
||||||
|
</objective>
|
||||||
|
|
||||||
|
<execution_context>
|
||||||
|
@$HOME/.claude/get-shit-done/workflows/execute-plan.md
|
||||||
|
@$HOME/.claude/get-shit-done/templates/summary.md
|
||||||
|
</execution_context>
|
||||||
|
|
||||||
|
<context>
|
||||||
|
@.planning/PROJECT.md
|
||||||
|
@.planning/ROADMAP.md
|
||||||
|
@.planning/STATE.md
|
||||||
|
@.planning/phases/03-postgresql-support/03-CONTEXT.md
|
||||||
|
@.planning/phases/03-postgresql-support/03-RESEARCH.md
|
||||||
|
@.planning/phases/03-postgresql-support/03-01-SUMMARY.md
|
||||||
|
|
||||||
|
@cmd/diunwebhook/main.go
|
||||||
|
@pkg/diunwebhook/diunwebhook.go
|
||||||
|
@pkg/diunwebhook/export_test.go
|
||||||
|
@compose.yml
|
||||||
|
@compose.dev.yml
|
||||||
|
|
||||||
|
<interfaces>
|
||||||
|
<!-- From Plan 01 outputs -->
|
||||||
|
From pkg/diunwebhook/postgres_store.go:
|
||||||
|
```go
|
||||||
|
func NewPostgresStore(db *sql.DB) *PostgresStore
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/migrate.go:
|
||||||
|
```go
|
||||||
|
func RunSQLiteMigrations(db *sql.DB) error
|
||||||
|
func RunPostgresMigrations(db *sql.DB) error
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/store.go:
|
||||||
|
```go
|
||||||
|
type Store interface { ... } // 9 methods
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/diunwebhook.go:
|
||||||
|
```go
|
||||||
|
func NewServer(store Store, webhookSecret string) *Server
|
||||||
|
```
|
||||||
|
</interfaces>
|
||||||
|
</context>
|
||||||
|
|
||||||
|
<tasks>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 1: Wire DATABASE_URL branching in main.go, update call sites, and fix cross-dialect UNIQUE detection</name>
|
||||||
|
<read_first>
|
||||||
|
- cmd/diunwebhook/main.go (current SQLite-only startup to rewrite with branching)
|
||||||
|
- pkg/diunwebhook/diunwebhook.go (TagsHandler - UNIQUE detection to fix)
|
||||||
|
- pkg/diunwebhook/export_test.go (calls RunMigrations - must rename to RunSQLiteMigrations)
|
||||||
|
- pkg/diunwebhook/postgres_store.go (verify NewPostgresStore exists from Plan 01)
|
||||||
|
- pkg/diunwebhook/migrate.go (verify RunSQLiteMigrations and RunPostgresMigrations exist from Plan 01)
|
||||||
|
</read_first>
|
||||||
|
<files>cmd/diunwebhook/main.go, pkg/diunwebhook/diunwebhook.go, pkg/diunwebhook/export_test.go</files>
|
||||||
|
<action>
|
||||||
|
**1. Rewrite `cmd/diunwebhook/main.go`** with DATABASE_URL branching per D-07, D-08, D-09.
|
||||||
|
|
||||||
|
Replace the current database setup block with DATABASE_URL branching. The full main function should:
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"errors"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
diun "awesomeProject/pkg/diunwebhook"
|
||||||
|
_ "github.com/jackc/pgx/v5/stdlib"
|
||||||
|
_ "modernc.org/sqlite"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
databaseURL := os.Getenv("DATABASE_URL")
|
||||||
|
var store diun.Store
|
||||||
|
if databaseURL != "" {
|
||||||
|
db, err := sql.Open("pgx", databaseURL)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("sql.Open postgres: %v", err)
|
||||||
|
}
|
||||||
|
if err := diun.RunPostgresMigrations(db); err != nil {
|
||||||
|
log.Fatalf("RunPostgresMigrations: %v", err)
|
||||||
|
}
|
||||||
|
store = diun.NewPostgresStore(db)
|
||||||
|
log.Println("Using PostgreSQL database")
|
||||||
|
} else {
|
||||||
|
dbPath := os.Getenv("DB_PATH")
|
||||||
|
if dbPath == "" {
|
||||||
|
dbPath = "./diun.db"
|
||||||
|
}
|
||||||
|
db, err := sql.Open("sqlite", dbPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("sql.Open sqlite: %v", err)
|
||||||
|
}
|
||||||
|
if err := diun.RunSQLiteMigrations(db); err != nil {
|
||||||
|
log.Fatalf("RunSQLiteMigrations: %v", err)
|
||||||
|
}
|
||||||
|
store = diun.NewSQLiteStore(db)
|
||||||
|
log.Printf("Using SQLite database at %s", dbPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ... rest of main unchanged (secret, server, mux, httpSrv, graceful shutdown)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Key changes:
|
||||||
|
- Add blank import `_ "github.com/jackc/pgx/v5/stdlib"` to register "pgx" driver name
|
||||||
|
- `DATABASE_URL` present -> `sql.Open("pgx", databaseURL)` -> `RunPostgresMigrations` -> `NewPostgresStore`
|
||||||
|
- `DATABASE_URL` absent -> existing SQLite path with `RunSQLiteMigrations` (renamed from `RunMigrations` in Plan 01)
|
||||||
|
- Log `"Using PostgreSQL database"` or `"Using SQLite database at %s"` per D-09
|
||||||
|
- Keep all existing code after the store setup unchanged (secret, server, mux, httpSrv, shutdown)
|
||||||
|
|
||||||
|
**2. Update `pkg/diunwebhook/export_test.go`** to use the renamed function.
|
||||||
|
|
||||||
|
Change all occurrences of `RunMigrations(db)` to `RunSQLiteMigrations(db)` in export_test.go. This completes the rename that Plan 01 started in migrate.go.
|
||||||
|
|
||||||
|
**3. Fix cross-dialect UNIQUE constraint detection in `pkg/diunwebhook/diunwebhook.go`.**
|
||||||
|
|
||||||
|
In the `TagsHandler` method, change:
|
||||||
|
```go
|
||||||
|
if strings.Contains(err.Error(), "UNIQUE") {
|
||||||
|
```
|
||||||
|
to:
|
||||||
|
```go
|
||||||
|
if strings.Contains(strings.ToLower(err.Error()), "unique") {
|
||||||
|
```
|
||||||
|
|
||||||
|
Why: SQLite errors contain uppercase "UNIQUE" (e.g., `UNIQUE constraint failed: tags.name`). PostgreSQL/pgx errors contain lowercase "unique" (e.g., `duplicate key value violates unique constraint "tags_name_key"`). Case-insensitive matching ensures 409 Conflict is returned for both backends.
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard && go build ./... && go test -v -count=1 ./pkg/diunwebhook/ 2>&1 | tail -5</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- cmd/diunwebhook/main.go contains `databaseURL := os.Getenv("DATABASE_URL")`
|
||||||
|
- cmd/diunwebhook/main.go contains `sql.Open("pgx", databaseURL)`
|
||||||
|
- cmd/diunwebhook/main.go contains `diun.RunPostgresMigrations(db)`
|
||||||
|
- cmd/diunwebhook/main.go contains `diun.NewPostgresStore(db)`
|
||||||
|
- cmd/diunwebhook/main.go contains `log.Println("Using PostgreSQL database")`
|
||||||
|
- cmd/diunwebhook/main.go contains `log.Printf("Using SQLite database at %s", dbPath)`
|
||||||
|
- cmd/diunwebhook/main.go contains `_ "github.com/jackc/pgx/v5/stdlib"`
|
||||||
|
- cmd/diunwebhook/main.go contains `diun.RunSQLiteMigrations(db)` (not RunMigrations)
|
||||||
|
- pkg/diunwebhook/export_test.go contains `RunSQLiteMigrations` (not RunMigrations)
|
||||||
|
- pkg/diunwebhook/diunwebhook.go contains `strings.Contains(strings.ToLower(err.Error()), "unique")`
|
||||||
|
- pkg/diunwebhook/diunwebhook.go does NOT contain `strings.Contains(err.Error(), "UNIQUE")` (old pattern removed)
|
||||||
|
- `go build ./...` exits 0
|
||||||
|
- `go test -v -count=1 ./pkg/diunwebhook/` exits 0 (full test suite passes)
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>main.go branches on DATABASE_URL to select PostgreSQL or SQLite. pgx/v5/stdlib is blank-imported to register the driver. Startup log identifies the active backend. export_test.go updated with RunSQLiteMigrations. UNIQUE detection is case-insensitive for cross-dialect compatibility. All existing tests pass.</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 2: Add Docker Compose postgres profiles and build-tagged test helper</name>
|
||||||
|
<read_first>
|
||||||
|
- compose.yml (current production compose to add postgres profile)
|
||||||
|
- compose.dev.yml (current dev compose to add postgres profile)
|
||||||
|
- pkg/diunwebhook/export_test.go (pattern for NewTestPostgresServer)
|
||||||
|
- Dockerfile (verify no changes needed -- pgx/v5 is pure Go, CGO_ENABLED=0 is fine)
|
||||||
|
</read_first>
|
||||||
|
<files>compose.yml, compose.dev.yml, pkg/diunwebhook/postgres_test.go</files>
|
||||||
|
<action>
|
||||||
|
**1. Update `compose.yml`** (production) to add postgres profile per D-14, D-15, D-16:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Minimum Docker Compose v2.20 required for depends_on.required
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
image: gitea.jeanlucmakiola.de/makiolaj/diundashboard:latest
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
environment:
|
||||||
|
- WEBHOOK_SECRET=${WEBHOOK_SECRET:-}
|
||||||
|
- PORT=${PORT:-8080}
|
||||||
|
- DB_PATH=/data/diun.db
|
||||||
|
- DATABASE_URL=${DATABASE_URL:-}
|
||||||
|
volumes:
|
||||||
|
- diun-data:/data
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
required: false
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:17-alpine
|
||||||
|
profiles:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER:-diun}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-diun}
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB:-diundashboard}
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-diun}"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
diun-data:
|
||||||
|
postgres-data:
|
||||||
|
```
|
||||||
|
|
||||||
|
Default `docker compose up` still uses SQLite (DATABASE_URL is empty string, app falls back to DB_PATH).
|
||||||
|
`docker compose --profile postgres up` starts the postgres service; user sets `DATABASE_URL=postgres://diun:diun@postgres:5432/diundashboard?sslmode=disable` in .env.
|
||||||
|
|
||||||
|
**2. Update `compose.dev.yml`** to add postgres profile for local development:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
build: .
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
environment:
|
||||||
|
- WEBHOOK_SECRET=${WEBHOOK_SECRET:-}
|
||||||
|
- DATABASE_URL=${DATABASE_URL:-}
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
required: false
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:17-alpine
|
||||||
|
profiles:
|
||||||
|
- postgres
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER:-diun}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-diun}
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB:-diundashboard}
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-diun}"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres-data:
|
||||||
|
```
|
||||||
|
|
||||||
|
Dev compose exposes port 5432 on host for direct psql access during development.
|
||||||
|
|
||||||
|
**3. Create `pkg/diunwebhook/postgres_test.go`** with build tag per D-17, D-19:
|
||||||
|
|
||||||
|
```go
|
||||||
|
//go:build postgres
|
||||||
|
|
||||||
|
package diunwebhook
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
_ "github.com/jackc/pgx/v5/stdlib"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewTestPostgresServer constructs a Server backed by a PostgreSQL database.
|
||||||
|
// Requires a running PostgreSQL instance. Set TEST_DATABASE_URL to override
|
||||||
|
// the default connection string.
|
||||||
|
func NewTestPostgresServer() (*Server, error) {
|
||||||
|
databaseURL := os.Getenv("TEST_DATABASE_URL")
|
||||||
|
if databaseURL == "" {
|
||||||
|
databaseURL = "postgres://diun:diun@localhost:5432/diundashboard_test?sslmode=disable"
|
||||||
|
}
|
||||||
|
db, err := sql.Open("pgx", databaseURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := RunPostgresMigrations(db); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
store := NewPostgresStore(db)
|
||||||
|
return NewServer(store, ""), nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This file is in the `diunwebhook` package (internal, same as export_test.go pattern). The `//go:build postgres` tag ensures it only compiles when explicitly requested with `go test -tags postgres`. Without the tag, `go test ./pkg/diunwebhook/` skips this file entirely -- no pgx import, no PostgreSQL dependency.
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard && go build ./... && go test -v -count=1 ./pkg/diunwebhook/ 2>&1 | tail -5</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- compose.yml contains `profiles:` under the postgres service
|
||||||
|
- compose.yml contains `- postgres` (profile name)
|
||||||
|
- compose.yml contains `postgres:17-alpine`
|
||||||
|
- compose.yml contains `pg_isready`
|
||||||
|
- compose.yml contains `required: false` (conditional depends_on)
|
||||||
|
- compose.yml contains `DATABASE_URL=${DATABASE_URL:-}`
|
||||||
|
- compose.yml contains `postgres-data:` in volumes
|
||||||
|
- compose.dev.yml contains `profiles:` under the postgres service
|
||||||
|
- compose.dev.yml contains `- postgres` (profile name)
|
||||||
|
- compose.dev.yml contains `"5432:5432"` (exposed for dev)
|
||||||
|
- compose.dev.yml contains `required: false`
|
||||||
|
- compose.dev.yml contains `DATABASE_URL=${DATABASE_URL:-}`
|
||||||
|
- pkg/diunwebhook/postgres_test.go contains `//go:build postgres`
|
||||||
|
- pkg/diunwebhook/postgres_test.go contains `func NewTestPostgresServer()`
|
||||||
|
- pkg/diunwebhook/postgres_test.go contains `sql.Open("pgx", databaseURL)`
|
||||||
|
- pkg/diunwebhook/postgres_test.go contains `RunPostgresMigrations(db)`
|
||||||
|
- pkg/diunwebhook/postgres_test.go contains `NewPostgresStore(db)`
|
||||||
|
- pkg/diunwebhook/postgres_test.go contains `TEST_DATABASE_URL`
|
||||||
|
- `go build ./...` exits 0 (postgres_test.go is not compiled without build tag)
|
||||||
|
- `go test -v -count=1 ./pkg/diunwebhook/` exits 0 (full SQLite test suite passes, postgres_test.go skipped)
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>Docker Compose files support optional PostgreSQL via profiles. Default deploy remains SQLite-only. Build-tagged test helper exists for PostgreSQL integration testing. Dockerfile needs no changes (pgx/v5 is pure Go).</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
</tasks>
|
||||||
|
|
||||||
|
<verification>
|
||||||
|
1. `go build ./...` succeeds
|
||||||
|
2. `go test -v -count=1 ./pkg/diunwebhook/` passes (all existing SQLite tests)
|
||||||
|
3. `docker compose config` validates without errors
|
||||||
|
4. `docker compose --profile postgres config` shows postgres service
|
||||||
|
5. `grep -c "DATABASE_URL" cmd/diunwebhook/main.go` returns at least 1
|
||||||
|
6. `grep "strings.ToLower" pkg/diunwebhook/diunwebhook.go` shows case-insensitive UNIQUE check
|
||||||
|
</verification>
|
||||||
|
|
||||||
|
<success_criteria>
|
||||||
|
- DATABASE_URL present: app opens pgx connection, runs PostgreSQL migrations, creates PostgresStore, logs "Using PostgreSQL database"
|
||||||
|
- DATABASE_URL absent: app opens sqlite connection, runs SQLite migrations, creates SQLiteStore, logs "Using SQLite database at {path}"
|
||||||
|
- `docker compose up` (no profile) works with SQLite only
|
||||||
|
- `docker compose --profile postgres up` starts PostgreSQL service with health check
|
||||||
|
- Build-tagged test helper available for PostgreSQL integration tests
|
||||||
|
- UNIQUE constraint detection works for both SQLite and PostgreSQL error messages
|
||||||
|
- All existing SQLite tests continue to pass
|
||||||
|
</success_criteria>
|
||||||
|
|
||||||
|
<output>
|
||||||
|
After completion, create `.planning/phases/03-postgresql-support/03-02-SUMMARY.md`
|
||||||
|
</output>
|
||||||
88
.planning/phases/03-postgresql-support/03-02-SUMMARY.md
Normal file
88
.planning/phases/03-postgresql-support/03-02-SUMMARY.md
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
---
|
||||||
|
phase: 03-postgresql-support
|
||||||
|
plan: "02"
|
||||||
|
subsystem: wiring
|
||||||
|
tags: [postgresql, sqlite, database, docker-compose, branching]
|
||||||
|
dependency_graph:
|
||||||
|
requires: [03-01]
|
||||||
|
provides: [DATABASE_URL branching, postgres docker profile, NewTestPostgresServer]
|
||||||
|
affects: [cmd/diunwebhook/main.go, compose.yml, compose.dev.yml, pkg/diunwebhook/diunwebhook.go]
|
||||||
|
tech_stack:
|
||||||
|
added: []
|
||||||
|
patterns: [DATABASE_URL env var branching, Docker Compose profiles, build-tagged test helpers]
|
||||||
|
key_files:
|
||||||
|
created:
|
||||||
|
- pkg/diunwebhook/postgres_test.go
|
||||||
|
modified:
|
||||||
|
- cmd/diunwebhook/main.go
|
||||||
|
- pkg/diunwebhook/diunwebhook.go
|
||||||
|
- compose.yml
|
||||||
|
- compose.dev.yml
|
||||||
|
decisions:
|
||||||
|
- "DATABASE_URL present activates PostgreSQL path; absent falls back to SQLite with DB_PATH"
|
||||||
|
- "postgres Docker service uses profiles: [postgres] so default compose up remains SQLite-only"
|
||||||
|
- "UNIQUE detection uses strings.ToLower for case-insensitive matching across SQLite and PostgreSQL"
|
||||||
|
- "Build tag //go:build postgres gates postgres_test.go so standard test runs have no pgx dependency"
|
||||||
|
metrics:
|
||||||
|
duration: "~2 minutes"
|
||||||
|
completed: "2026-03-24T08:13:21Z"
|
||||||
|
tasks_completed: 2
|
||||||
|
files_changed: 5
|
||||||
|
---
|
||||||
|
|
||||||
|
# Phase 03 Plan 02: Wire PostgreSQL Support and Deployment Infrastructure Summary
|
||||||
|
|
||||||
|
DATABASE_URL branching in main.go routes to PostgresStore or SQLiteStore at startup; Docker Compose postgres profile enables optional PostgreSQL; build-tagged test helper and cross-dialect UNIQUE detection complete the integration.
|
||||||
|
|
||||||
|
## What Was Built
|
||||||
|
|
||||||
|
### Updated main.go (cmd/diunwebhook/main.go)
|
||||||
|
- `DATABASE_URL` env var check: when set, opens pgx connection, runs `RunPostgresMigrations`, creates `NewPostgresStore`, logs `"Using PostgreSQL database"`
|
||||||
|
- When absent: existing SQLite path using `RunSQLiteMigrations` (renamed in Plan 01), `NewSQLiteStore`, logs `"Using SQLite database at {path}"`
|
||||||
|
- Blank import `_ "github.com/jackc/pgx/v5/stdlib"` registers the `"pgx"` driver name
|
||||||
|
- All route wiring and graceful shutdown logic unchanged
|
||||||
|
|
||||||
|
### Cross-dialect UNIQUE detection (pkg/diunwebhook/diunwebhook.go)
|
||||||
|
- `TagsHandler` now uses `strings.Contains(strings.ToLower(err.Error()), "unique")` for 409 Conflict detection
|
||||||
|
- SQLite errors: `UNIQUE constraint failed: tags.name` (uppercase UNIQUE)
|
||||||
|
- PostgreSQL errors: `duplicate key value violates unique constraint "tags_name_key"` (lowercase unique)
|
||||||
|
- Both backends now return 409 correctly
|
||||||
|
|
||||||
|
### Docker Compose postgres profiles
|
||||||
|
- `compose.yml`: postgres service added with `profiles: [postgres]`, healthcheck via `pg_isready`, `DATABASE_URL` env var in app service, conditional `depends_on` with `required: false`, `postgres-data` volume
|
||||||
|
- `compose.dev.yml`: same postgres service with port 5432 exposed on host for direct psql access during development
|
||||||
|
- Default `docker compose up` (no profile) unchanged — SQLite only, no new services start
|
||||||
|
|
||||||
|
### Build-tagged test helper (pkg/diunwebhook/postgres_test.go)
|
||||||
|
- `//go:build postgres` tag — only compiled with `go test -tags postgres`
|
||||||
|
- `NewTestPostgresServer()` constructs a `*Server` backed by PostgreSQL using `TEST_DATABASE_URL` env var (defaults to `postgres://diun:diun@localhost:5432/diundashboard_test?sslmode=disable`)
|
||||||
|
- Calls `RunPostgresMigrations` and `NewPostgresStore` — mirrors the production startup path
|
||||||
|
|
||||||
|
## Decisions Made
|
||||||
|
|
||||||
|
| Decision | Rationale |
|
||||||
|
|----------|-----------|
|
||||||
|
| DATABASE_URL presence-check (not a separate DB_DRIVER var) | Simpler UX; empty string = SQLite, any value = PostgreSQL |
|
||||||
|
| profiles: [postgres] in compose files | Standard Docker Compose pattern for optional services; default deploy unchanged |
|
||||||
|
| required: false in depends_on | App can start without postgres service (SQLite fallback); Docker Compose v2.20+ required |
|
||||||
|
| //go:build postgres tag on test helper | Prevents pgx import at test time for standard `go test ./...` runs; explicit opt-in |
|
||||||
|
| strings.ToLower for UNIQUE check | SQLite and PostgreSQL use different cases in constraint error messages |
|
||||||
|
|
||||||
|
## Deviations from Plan
|
||||||
|
|
||||||
|
None — plan executed exactly as written. The `export_test.go` rename (RunMigrations -> RunSQLiteMigrations) was already completed as a deviation in Plan 01, as noted in the objective.
|
||||||
|
|
||||||
|
## Verification Results
|
||||||
|
|
||||||
|
- `go build ./...` exits 0
|
||||||
|
- `go test -count=1 ./pkg/diunwebhook/` passes (all 20+ SQLite tests, postgres_test.go skipped)
|
||||||
|
- `docker compose config` validates without errors
|
||||||
|
- `docker compose --profile postgres config` shows postgres service
|
||||||
|
- `grep -c "DATABASE_URL" cmd/diunwebhook/main.go` returns 1
|
||||||
|
- `grep "strings.ToLower" pkg/diunwebhook/diunwebhook.go` shows case-insensitive UNIQUE check
|
||||||
|
|
||||||
|
## Known Stubs
|
||||||
|
|
||||||
|
None — this plan wires implementation code, no UI stubs.
|
||||||
|
|
||||||
|
## Self-Check: PASSED
|
||||||
127
.planning/phases/03-postgresql-support/03-CONTEXT.md
Normal file
127
.planning/phases/03-postgresql-support/03-CONTEXT.md
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
# Phase 3: PostgreSQL Support - Context
|
||||||
|
|
||||||
|
**Gathered:** 2026-03-24
|
||||||
|
**Status:** Ready for planning
|
||||||
|
|
||||||
|
<domain>
|
||||||
|
## Phase Boundary
|
||||||
|
|
||||||
|
Add PostgreSQL as an alternative database backend alongside SQLite. Users with PostgreSQL infrastructure can point DiunDashboard at a Postgres database via `DATABASE_URL` and the dashboard works identically to the SQLite deployment. Existing SQLite users upgrade without data loss.
|
||||||
|
|
||||||
|
</domain>
|
||||||
|
|
||||||
|
<decisions>
|
||||||
|
## Implementation Decisions
|
||||||
|
|
||||||
|
### PostgreSQL driver interface
|
||||||
|
- **D-01:** Use `pgx/v5/stdlib` as the database/sql adapter — matches SQLiteStore's `*sql.DB` pattern so PostgresStore has the same constructor signature (`*sql.DB` in, Store out)
|
||||||
|
- **D-02:** Do NOT use pgx native interface directly — keeping both stores on `database/sql` means the Store interface stays unchanged and `NewServer(store Store, ...)` works identically
|
||||||
|
|
||||||
|
### SQL dialect handling
|
||||||
|
- **D-03:** Each store implementation has its own raw SQL — no runtime dialect switching, no query builder, no shared SQL templates
|
||||||
|
- **D-04:** PostgreSQL-specific syntax differences handled in PostgresStore methods:
|
||||||
|
- `SERIAL` instead of `INTEGER PRIMARY KEY AUTOINCREMENT` for tags.id
|
||||||
|
- `$1, $2, $3` positional params instead of `?` placeholders
|
||||||
|
- `NOW()` or `CURRENT_TIMESTAMP` instead of `datetime('now')` for acknowledged_at
|
||||||
|
- `ON CONFLICT ... DO UPDATE SET` syntax is compatible (PostgreSQL 9.5+)
|
||||||
|
- `INSERT ... ON CONFLICT DO UPDATE` for UPSERT (same pattern, different param style)
|
||||||
|
- `INSERT ... ON CONFLICT` for tag assignments instead of `INSERT OR REPLACE`
|
||||||
|
|
||||||
|
### Connection pooling
|
||||||
|
- **D-05:** PostgresStore does NOT use a mutex — PostgreSQL handles concurrent writes natively
|
||||||
|
- **D-06:** Use `database/sql` default pool settings with sensible overrides: `MaxOpenConns(25)`, `MaxIdleConns(5)`, `ConnMaxLifetime(5 * time.Minute)` — appropriate for a low-traffic self-hosted dashboard
|
||||||
|
|
||||||
|
### Database selection logic (main.go)
|
||||||
|
- **D-07:** `DATABASE_URL` env var present → PostgreSQL; absent → SQLite with `DB_PATH` (already decided in STATE.md)
|
||||||
|
- **D-08:** No separate `DB_DRIVER` variable — the presence of `DATABASE_URL` is the switch
|
||||||
|
- **D-09:** Startup log clearly indicates which backend is active: `"Using PostgreSQL database"` vs `"Using SQLite database at {path}"`
|
||||||
|
|
||||||
|
### Migration structure
|
||||||
|
- **D-10:** Separate migration directories: `migrations/sqlite/` (exists) and `migrations/postgres/` (new)
|
||||||
|
- **D-11:** PostgreSQL baseline migration `0001_initial_schema.up.sql` creates the same 3 tables with PostgreSQL-native types
|
||||||
|
- **D-12:** `RunMigrations` becomes dialect-aware or split into `RunSQLiteMigrations`/`RunPostgresMigrations` — researcher should determine best approach
|
||||||
|
- **D-13:** PostgreSQL migrations embedded via separate `//go:embed migrations/postgres` directive
|
||||||
|
|
||||||
|
### Docker Compose integration
|
||||||
|
- **D-14:** Use Docker Compose profiles — `docker compose --profile postgres up` activates the postgres service
|
||||||
|
- **D-15:** Default compose (no profile) remains SQLite-only for simple deploys
|
||||||
|
- **D-16:** Compose file includes a `postgres` service with health check, and the app service gets `DATABASE_URL` when the profile is active
|
||||||
|
|
||||||
|
### Testing strategy
|
||||||
|
- **D-17:** PostgresStore integration tests use a `//go:build postgres` build tag — they only run when a PostgreSQL instance is available
|
||||||
|
- **D-18:** CI can optionally run `-tags postgres` with a postgres service container; SQLite tests always run
|
||||||
|
- **D-19:** Test helper `NewTestPostgresServer()` creates a test database and runs migrations, similar to `NewTestServer()` for SQLite
|
||||||
|
|
||||||
|
### Claude's Discretion
|
||||||
|
- Exact PostgreSQL connection pool tuning beyond the defaults in D-06
|
||||||
|
- Whether to split RunMigrations into two functions or use a dialect parameter
|
||||||
|
- Error message formatting for PostgreSQL connection failures
|
||||||
|
- Whether to add a health check endpoint that verifies database connectivity
|
||||||
|
|
||||||
|
</decisions>
|
||||||
|
|
||||||
|
<canonical_refs>
|
||||||
|
## Canonical References
|
||||||
|
|
||||||
|
**Downstream agents MUST read these before planning or implementing.**
|
||||||
|
|
||||||
|
### Store interface and patterns
|
||||||
|
- `pkg/diunwebhook/store.go` — Store interface definition (9 methods that PostgresStore must implement)
|
||||||
|
- `pkg/diunwebhook/sqlite_store.go` — Reference implementation with exact SQL operations to port
|
||||||
|
- `pkg/diunwebhook/migrate.go` — Current migration runner (SQLite-only, needs PostgreSQL support)
|
||||||
|
|
||||||
|
### Schema
|
||||||
|
- `pkg/diunwebhook/migrations/sqlite/0001_initial_schema.up.sql` — Baseline schema to translate to PostgreSQL dialect
|
||||||
|
|
||||||
|
### Wiring
|
||||||
|
- `cmd/diunwebhook/main.go` — Current startup wiring (SQLite-only, needs DATABASE_URL branching)
|
||||||
|
- `pkg/diunwebhook/export_test.go` — Test server helpers (pattern for NewTestPostgresServer)
|
||||||
|
|
||||||
|
### Deployment
|
||||||
|
- `Dockerfile` — Current build (may need postgres client libs or build tag)
|
||||||
|
- `compose.yml` — Production compose (needs postgres profile)
|
||||||
|
- `compose.dev.yml` — Dev compose (needs postgres profile for local dev)
|
||||||
|
|
||||||
|
</canonical_refs>
|
||||||
|
|
||||||
|
<code_context>
|
||||||
|
## Existing Code Insights
|
||||||
|
|
||||||
|
### Reusable Assets
|
||||||
|
- `Store` interface in `store.go`: PostgresStore implements the same 9 methods — no handler changes needed
|
||||||
|
- `SQLiteStore` in `sqlite_store.go`: Reference for all SQL operations — port each method to PostgreSQL dialect
|
||||||
|
- `RunMigrations` in `migrate.go`: Pattern for migration runner with `embed.FS` + `iofs` + `golang-migrate`
|
||||||
|
- `NewTestServer()` in `export_test.go`: Pattern for test helper — clone for PostgreSQL variant
|
||||||
|
|
||||||
|
### Established Patterns
|
||||||
|
- `database/sql` as the DB abstraction layer — PostgresStore follows the same pattern
|
||||||
|
- `sync.Mutex` for SQLite write serialization — NOT needed for PostgreSQL (native concurrent writes)
|
||||||
|
- `//go:embed` for migration files — same pattern for `migrations/postgres/`
|
||||||
|
- Constructor returns concrete type implementing Store: `NewSQLiteStore(*sql.DB) *SQLiteStore` → `NewPostgresStore(*sql.DB) *PostgresStore`
|
||||||
|
|
||||||
|
### Integration Points
|
||||||
|
- `main.go` line 24: `sql.Open("sqlite", dbPath)` — add conditional for `sql.Open("pgx", databaseURL)`
|
||||||
|
- `main.go` line 29: `diun.RunMigrations(db)` — needs to call the right migration runner
|
||||||
|
- `main.go` line 33: `diun.NewSQLiteStore(db)` — needs to call `diun.NewPostgresStore(db)` when using PostgreSQL
|
||||||
|
- `Dockerfile` Stage 2: May need `CGO_ENABLED=0` to remain — verify pgx/v5/stdlib is pure Go
|
||||||
|
|
||||||
|
</code_context>
|
||||||
|
|
||||||
|
<specifics>
|
||||||
|
## Specific Ideas
|
||||||
|
|
||||||
|
No specific requirements — open to standard approaches. The core constraint is functional parity: every operation that works on SQLite must work identically on PostgreSQL.
|
||||||
|
|
||||||
|
</specifics>
|
||||||
|
|
||||||
|
<deferred>
|
||||||
|
## Deferred Ideas
|
||||||
|
|
||||||
|
None — discussion stayed within phase scope.
|
||||||
|
|
||||||
|
</deferred>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Phase: 03-postgresql-support*
|
||||||
|
*Context gathered: 2026-03-24 via auto mode*
|
||||||
87
.planning/phases/03-postgresql-support/03-DISCUSSION-LOG.md
Normal file
87
.planning/phases/03-postgresql-support/03-DISCUSSION-LOG.md
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# Phase 3: PostgreSQL Support - Discussion Log
|
||||||
|
|
||||||
|
> **Audit trail only.** Do not use as input to planning, research, or execution agents.
|
||||||
|
> Decisions are captured in CONTEXT.md — this log preserves the alternatives considered.
|
||||||
|
|
||||||
|
**Date:** 2026-03-24
|
||||||
|
**Phase:** 03-postgresql-support
|
||||||
|
**Areas discussed:** PostgreSQL driver interface, SQL dialect handling, Connection pooling, Docker Compose integration, Testing strategy
|
||||||
|
**Mode:** Auto (all selections made by Claude using recommended defaults)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## PostgreSQL Driver Interface
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| pgx/v5/stdlib (database/sql adapter) | Matches SQLiteStore's *sql.DB pattern, Store interface unchanged | ✓ |
|
||||||
|
| pgx native interface | More features (COPY, batch), but different API from SQLiteStore | |
|
||||||
|
| lib/pq | Legacy driver, less maintained | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] pgx/v5/stdlib — recommended for consistency with existing database/sql pattern
|
||||||
|
**Notes:** Keeping both stores on database/sql means identical constructor signatures and no Store interface changes.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## SQL Dialect Handling
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Separate SQL per store | Each store has its own raw SQL, no shared templates | ✓ |
|
||||||
|
| Runtime dialect switching | Single store with if/else for dialect differences | |
|
||||||
|
| Query builder (squirrel/goqu) | Abstract SQL differences behind builder API | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Separate SQL per store — recommended per project constraint (no ORM/query builder)
|
||||||
|
**Notes:** PROJECT.md explicitly states "No ORM or query builder — raw SQL per store implementation."
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Connection Pooling
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Standard pool defaults | MaxOpenConns(25), MaxIdleConns(5), ConnMaxLifetime(5m) | ✓ |
|
||||||
|
| Minimal single-connection | Match SQLite's MaxOpenConns(1) | |
|
||||||
|
| Configurable via env vars | Let users tune pool settings | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Standard pool defaults — appropriate for low-traffic self-hosted dashboard
|
||||||
|
**Notes:** PostgreSQL handles concurrent writes natively, so no mutex needed unlike SQLiteStore.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Docker Compose Integration
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Docker Compose profiles | `--profile postgres` activates postgres service | ✓ |
|
||||||
|
| Separate compose file | compose.postgres.yml alongside compose.yml | |
|
||||||
|
| Always include postgres | Postgres service always defined, user enables via DATABASE_URL | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Docker Compose profiles — keeps simple deploys unchanged, opt-in for postgres
|
||||||
|
**Notes:** ROADMAP success criterion #4 states "optional postgres service profile."
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Build tag `//go:build postgres` | Tests only run when postgres available | ✓ |
|
||||||
|
| Testcontainers (auto-start postgres) | No external dependency needed | |
|
||||||
|
| Mock store for postgres tests | No real postgres needed, but less confidence | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Build tag — simplest approach, CI optionally runs with `-tags postgres`
|
||||||
|
**Notes:** Matches existing test pattern where SQLite tests always run. PostgreSQL tests are additive.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Claude's Discretion
|
||||||
|
|
||||||
|
- Exact PostgreSQL connection pool tuning beyond defaults
|
||||||
|
- RunMigrations split strategy (two functions vs dialect parameter)
|
||||||
|
- Error message formatting for connection failures
|
||||||
|
- Health check endpoint (optional)
|
||||||
|
|
||||||
|
## Deferred Ideas
|
||||||
|
|
||||||
|
None — discussion stayed within phase scope.
|
||||||
575
.planning/phases/03-postgresql-support/03-RESEARCH.md
Normal file
575
.planning/phases/03-postgresql-support/03-RESEARCH.md
Normal file
@@ -0,0 +1,575 @@
|
|||||||
|
# Phase 3: PostgreSQL Support - Research
|
||||||
|
|
||||||
|
**Researched:** 2026-03-24
|
||||||
|
**Domain:** Go database/sql with pgx/v5 + golang-migrate PostgreSQL dialect
|
||||||
|
**Confidence:** HIGH
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
Phase 3 adds PostgreSQL as an alternative backend alongside SQLite. The Store interface and all HTTP handlers are already dialect-neutral (Phase 2 delivered this). The work is entirely in three areas: (1) a new `PostgresStore` struct that implements the existing `Store` interface using PostgreSQL SQL syntax, (2) a separate migration runner for PostgreSQL using `golang-migrate`'s dedicated `pgx/v5` database driver, and (3) wiring in `main.go` to branch on `DATABASE_URL`.
|
||||||
|
|
||||||
|
The critical dialect difference is `CreateTag`: PostgreSQL does not support `LastInsertId()` via `pgx/stdlib`. The `PostgresStore.CreateTag` method must use `QueryRow` with `RETURNING id` instead of `Exec` + `LastInsertId`. Every other SQL translation is mechanical (positional params, `NOW()`, `SERIAL`, `ON CONFLICT ... DO UPDATE` instead of `INSERT OR REPLACE`).
|
||||||
|
|
||||||
|
The golang-migrate ecosystem ships a dedicated `database/pgx/v5` sub-package that wraps a `*sql.DB` opened via `pgx/v5/stdlib`. This fits the established pattern in `migrate.go` exactly — a new `RunPostgresMigrations(db *sql.DB) error` function using the same `iofs` source with an embedded `migrations/postgres` directory.
|
||||||
|
|
||||||
|
**Primary recommendation:** Follow the locked decisions in CONTEXT.md verbatim. The implementation is a straightforward port of `SQLiteStore` with dialect adjustments; the only non-obvious trap is the `LastInsertId` incompatibility in `CreateTag`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<user_constraints>
|
||||||
|
## User Constraints (from CONTEXT.md)
|
||||||
|
|
||||||
|
### Locked Decisions
|
||||||
|
|
||||||
|
**D-01:** Use `pgx/v5/stdlib` as the database/sql adapter — matches SQLiteStore's `*sql.DB` pattern so PostgresStore has the same constructor signature (`*sql.DB` in, Store out)
|
||||||
|
**D-02:** Do NOT use pgx native interface directly — keeping both stores on `database/sql` means the Store interface stays unchanged and `NewServer(store Store, ...)` works identically
|
||||||
|
**D-03:** Each store implementation has its own raw SQL — no runtime dialect switching, no query builder, no shared SQL templates
|
||||||
|
**D-04:** PostgreSQL-specific syntax differences handled in PostgresStore methods:
|
||||||
|
- `SERIAL` instead of `INTEGER PRIMARY KEY AUTOINCREMENT` for tags.id
|
||||||
|
- `$1, $2, $3` positional params instead of `?` placeholders
|
||||||
|
- `NOW()` or `CURRENT_TIMESTAMP` instead of `datetime('now')` for acknowledged_at
|
||||||
|
- `ON CONFLICT ... DO UPDATE SET` syntax is compatible (PostgreSQL 9.5+)
|
||||||
|
- `INSERT ... ON CONFLICT DO UPDATE` for UPSERT (same pattern, different param style)
|
||||||
|
- `INSERT ... ON CONFLICT` for tag assignments instead of `INSERT OR REPLACE`
|
||||||
|
**D-05:** PostgresStore does NOT use a mutex — PostgreSQL handles concurrent writes natively
|
||||||
|
**D-06:** Use `database/sql` default pool settings with sensible overrides: `MaxOpenConns(25)`, `MaxIdleConns(5)`, `ConnMaxLifetime(5 * time.Minute)`
|
||||||
|
**D-07:** `DATABASE_URL` env var present → PostgreSQL; absent → SQLite with `DB_PATH`
|
||||||
|
**D-08:** No separate `DB_DRIVER` variable — the presence of `DATABASE_URL` is the switch
|
||||||
|
**D-09:** Startup log clearly indicates which backend is active: `"Using PostgreSQL database"` vs `"Using SQLite database at {path}"`
|
||||||
|
**D-10:** Separate migration directories: `migrations/sqlite/` (exists) and `migrations/postgres/` (new)
|
||||||
|
**D-11:** PostgreSQL baseline migration `0001_initial_schema.up.sql` creates the same 3 tables with PostgreSQL-native types
|
||||||
|
**D-12:** `RunMigrations` becomes dialect-aware or split into `RunSQLiteMigrations`/`RunPostgresMigrations` — researcher should determine best approach (see Architecture Patterns below)
|
||||||
|
**D-13:** PostgreSQL migrations embedded via separate `//go:embed migrations/postgres` directive
|
||||||
|
**D-14:** Use Docker Compose profiles — `docker compose --profile postgres up` activates the postgres service
|
||||||
|
**D-15:** Default compose (no profile) remains SQLite-only for simple deploys
|
||||||
|
**D-16:** Compose file includes a `postgres` service with health check, and the app service gets `DATABASE_URL` when the profile is active
|
||||||
|
**D-17:** PostgresStore integration tests use a `//go:build postgres` build tag — they only run when a PostgreSQL instance is available
|
||||||
|
**D-18:** CI can optionally run `-tags postgres` with a postgres service container; SQLite tests always run
|
||||||
|
**D-19:** Test helper `NewTestPostgresServer()` creates a test database and runs migrations, similar to `NewTestServer()` for SQLite
|
||||||
|
|
||||||
|
### Claude's Discretion
|
||||||
|
- Exact PostgreSQL connection pool tuning beyond the defaults in D-06
|
||||||
|
- Whether to split RunMigrations into two functions or use a dialect parameter
|
||||||
|
- Error message formatting for PostgreSQL connection failures
|
||||||
|
- Whether to add a health check endpoint that verifies database connectivity
|
||||||
|
|
||||||
|
### Deferred Ideas (OUT OF SCOPE)
|
||||||
|
None — discussion stayed within phase scope.
|
||||||
|
</user_constraints>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<phase_requirements>
|
||||||
|
## Phase Requirements
|
||||||
|
|
||||||
|
| ID | Description | Research Support |
|
||||||
|
|----|-------------|------------------|
|
||||||
|
| DB-01 | PostgreSQL is supported as an alternative to SQLite via pgx v5 driver | `pgx/v5/stdlib` confirmed pure-Go, `*sql.DB` compatible; `PostgresStore` implements all 9 Store methods |
|
||||||
|
| DB-02 | Database backend is selected via DATABASE_URL env var (present = PostgreSQL, absent = SQLite with DB_PATH) | main.go branching pattern documented; driver registration names confirmed: `"sqlite"` and `"pgx"` |
|
||||||
|
| DB-03 | Existing SQLite users can upgrade without data loss (baseline migration represents current schema) | SQLite migration already uses `CREATE TABLE IF NOT EXISTS`; PostgreSQL migration is a fresh baseline for new deployments; no cross-dialect migration needed |
|
||||||
|
</phase_requirements>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Standard Stack
|
||||||
|
|
||||||
|
### Core
|
||||||
|
| Library | Version | Purpose | Why Standard |
|
||||||
|
|---------|---------|---------|--------------|
|
||||||
|
| `github.com/jackc/pgx/v5` | v5.9.1 (Mar 22 2026) | PostgreSQL driver + `database/sql` adapter via `pgx/v5/stdlib` | De-facto standard Go PostgreSQL driver; pure Go (no CGO); actively maintained; 8,394 packages import it |
|
||||||
|
| `github.com/golang-migrate/migrate/v4/database/pgx/v5` | v4.19.1 (same module as existing golang-migrate) | golang-migrate database driver for pgx v5 | Already in project; dedicated pgx/v5 sub-package fits existing `migrate.go` pattern exactly |
|
||||||
|
|
||||||
|
### Supporting
|
||||||
|
| Library | Version | Purpose | When to Use |
|
||||||
|
|---------|---------|---------|-------------|
|
||||||
|
| `github.com/golang-migrate/migrate/v4/source/iofs` | v4.19.1 (already imported) | Serve embedded FS migration files | Reuse existing pattern from `migrate.go` |
|
||||||
|
|
||||||
|
### Alternatives Considered
|
||||||
|
| Instead of | Could Use | Tradeoff |
|
||||||
|
|------------|-----------|----------|
|
||||||
|
| `pgx/v5/stdlib` (`database/sql`) | pgx native interface | Native pgx is faster but breaks `Store` interface — rejected by D-02 |
|
||||||
|
| `golang-migrate database/pgx/v5` | `golang-migrate database/postgres` | `database/postgres` uses `lib/pq` internally; `database/pgx/v5` uses pgx consistently — use pgx/v5 sub-package |
|
||||||
|
| Two separate `RunMigrations` functions | Single function with dialect param | Two functions is simpler, avoids string-switch, each can be `go:embed`-scoped independently — use two functions (see Architecture) |
|
||||||
|
|
||||||
|
**Installation:**
|
||||||
|
```bash
|
||||||
|
go get github.com/jackc/pgx/v5@v5.9.1
|
||||||
|
go get github.com/golang-migrate/migrate/v4/database/pgx/v5
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: `golang-migrate/migrate/v4` is already in `go.mod` at v4.19.1. Adding the `database/pgx/v5` sub-package pulls from the same module version — no module version conflict.
|
||||||
|
|
||||||
|
**Version verification (current as of 2026-03-24):**
|
||||||
|
- `pgx/v5`: v5.9.1 — verified via pkg.go.dev versions tab
|
||||||
|
- `golang-migrate/v4`: v4.19.1 — already in go.mod
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architecture Patterns
|
||||||
|
|
||||||
|
### Recommended Project Structure
|
||||||
|
```
|
||||||
|
pkg/diunwebhook/
|
||||||
|
├── store.go # Store interface (unchanged)
|
||||||
|
├── sqlite_store.go # SQLiteStore (unchanged)
|
||||||
|
├── postgres_store.go # PostgresStore (new)
|
||||||
|
├── migrate.go # Split: RunSQLiteMigrations + RunPostgresMigrations
|
||||||
|
├── migrations/
|
||||||
|
│ ├── sqlite/
|
||||||
|
│ │ ├── 0001_initial_schema.up.sql (exists)
|
||||||
|
│ │ └── 0001_initial_schema.down.sql (exists)
|
||||||
|
│ └── postgres/
|
||||||
|
│ ├── 0001_initial_schema.up.sql (new)
|
||||||
|
│ └── 0001_initial_schema.down.sql (new)
|
||||||
|
├── diunwebhook.go (unchanged)
|
||||||
|
└── export_test.go # Add NewTestPostgresServer (build-tagged)
|
||||||
|
cmd/diunwebhook/
|
||||||
|
└── main.go # Add DATABASE_URL branching
|
||||||
|
compose.yml # Add postgres profile
|
||||||
|
compose.dev.yml # Add postgres profile
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 1: PostgresStore Constructor (no mutex, pool config)
|
||||||
|
|
||||||
|
**What:** Constructor opens pool, sets sensible limits, no mutex (PostgreSQL serializes writes natively).
|
||||||
|
**When to use:** Called from `main.go` when `DATABASE_URL` is present.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Source: CONTEXT.md D-05, D-06 + established SQLiteStore pattern in sqlite_store.go
|
||||||
|
package diunwebhook
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PostgresStore struct {
|
||||||
|
db *sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewPostgresStore(db *sql.DB) *PostgresStore {
|
||||||
|
db.SetMaxOpenConns(25)
|
||||||
|
db.SetMaxIdleConns(5)
|
||||||
|
db.SetConnMaxLifetime(5 * time.Minute)
|
||||||
|
return &PostgresStore{db: db}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 2: RunPostgresMigrations (separate function, separate embed)
|
||||||
|
|
||||||
|
**What:** A dedicated migration runner for PostgreSQL using `golang-migrate`'s `database/pgx/v5` driver. Mirrors `RunMigrations` (which becomes `RunSQLiteMigrations`) exactly.
|
||||||
|
**When to use:** Called from `main.go` after `sql.Open("pgx", databaseURL)` when `DATABASE_URL` is set.
|
||||||
|
|
||||||
|
Decision D-12 leaves the split-vs-param choice to researcher. **Recommendation: two separate functions** (`RunSQLiteMigrations` and `RunPostgresMigrations`). Rationale: each function has its own `//go:embed` scope, there's no shared logic to deduplicate, and a string-switch approach adds a code path that can fail at runtime. Rename the existing `RunMigrations` to `RunSQLiteMigrations` for symmetry.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Source: migrate.go (existing pattern) + golang-migrate pgx/v5 docs
|
||||||
|
//go:embed migrations/postgres
|
||||||
|
var postgresMigrations embed.FS
|
||||||
|
|
||||||
|
func RunPostgresMigrations(db *sql.DB) error {
|
||||||
|
src, err := iofs.New(postgresMigrations, "migrations/postgres")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
driver, err := pgxmigrate.WithInstance(db, &pgxmigrate.Config{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
m, err := migrate.NewWithInstance("iofs", src, "pgx5", driver)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := m.Up(); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Import alias: `pgxmigrate "github.com/golang-migrate/migrate/v4/database/pgx/v5"`.
|
||||||
|
Driver name string for `NewWithInstance` is `"pgx5"` (matches the registration name in the pgx/v5 driver).
|
||||||
|
|
||||||
|
### Pattern 3: CreateTag — RETURNING id (CRITICAL)
|
||||||
|
|
||||||
|
**What:** PostgreSQL's pgx driver does not support `LastInsertId()`. `CreateTag` must use `QueryRow` with `RETURNING id`.
|
||||||
|
**When to use:** In every `PostgresStore.CreateTag` implementation — this is the most error-prone difference from SQLiteStore.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Source: pgx issue #1483 + pkg.go.dev pgx/v5/stdlib docs
|
||||||
|
func (s *PostgresStore) CreateTag(name string) (Tag, error) {
|
||||||
|
var id int
|
||||||
|
err := s.db.QueryRow(
|
||||||
|
`INSERT INTO tags (name) VALUES ($1) RETURNING id`, name,
|
||||||
|
).Scan(&id)
|
||||||
|
if err != nil {
|
||||||
|
return Tag{}, err
|
||||||
|
}
|
||||||
|
return Tag{ID: id, Name: name}, nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 4: AssignTag — ON CONFLICT DO UPDATE (replaces INSERT OR REPLACE)
|
||||||
|
|
||||||
|
**What:** PostgreSQL does not have `INSERT OR REPLACE`. Use `INSERT ... ON CONFLICT (image) DO UPDATE SET tag_id = EXCLUDED.tag_id`.
|
||||||
|
**When to use:** `PostgresStore.AssignTag`.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Source: CONTEXT.md D-04
|
||||||
|
_, err := s.db.Exec(
|
||||||
|
`INSERT INTO tag_assignments (image, tag_id) VALUES ($1, $2)
|
||||||
|
ON CONFLICT (image) DO UPDATE SET tag_id = EXCLUDED.tag_id`,
|
||||||
|
image, tagID,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 5: main.go DATABASE_URL branching
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Source: CONTEXT.md D-07, D-08, D-09
|
||||||
|
databaseURL := os.Getenv("DATABASE_URL")
|
||||||
|
var store diun.Store
|
||||||
|
if databaseURL != "" {
|
||||||
|
db, err := sql.Open("pgx", databaseURL)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("sql.Open postgres: %v", err)
|
||||||
|
}
|
||||||
|
if err := diun.RunPostgresMigrations(db); err != nil {
|
||||||
|
log.Fatalf("RunPostgresMigrations: %v", err)
|
||||||
|
}
|
||||||
|
store = diun.NewPostgresStore(db)
|
||||||
|
log.Println("Using PostgreSQL database")
|
||||||
|
} else {
|
||||||
|
dbPath := os.Getenv("DB_PATH")
|
||||||
|
if dbPath == "" {
|
||||||
|
dbPath = "./diun.db"
|
||||||
|
}
|
||||||
|
db, err := sql.Open("sqlite", dbPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("sql.Open sqlite: %v", err)
|
||||||
|
}
|
||||||
|
if err := diun.RunSQLiteMigrations(db); err != nil {
|
||||||
|
log.Fatalf("RunSQLiteMigrations: %v", err)
|
||||||
|
}
|
||||||
|
store = diun.NewSQLiteStore(db)
|
||||||
|
log.Printf("Using SQLite database at %s", dbPath)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Add `_ "github.com/jackc/pgx/v5/stdlib"` import to `main.go` (blank import registers the `"pgx"` driver name).
|
||||||
|
|
||||||
|
### Pattern 6: Docker Compose postgres profile
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# compose.yml — adds postgres profile without breaking default SQLite deploy
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
image: gitea.jeanlucmakiola.de/makiolaj/diundashboard:latest
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
environment:
|
||||||
|
- WEBHOOK_SECRET=${WEBHOOK_SECRET:-}
|
||||||
|
- PORT=${PORT:-8080}
|
||||||
|
- DB_PATH=/data/diun.db
|
||||||
|
- DATABASE_URL=${DATABASE_URL:-}
|
||||||
|
volumes:
|
||||||
|
- diun-data:/data
|
||||||
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
required: false # only enforced when postgres profile is active
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:17-alpine
|
||||||
|
profiles:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER:-diun}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-diun}
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB:-diundashboard}
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-diun}"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
diun-data:
|
||||||
|
postgres-data:
|
||||||
|
profiles:
|
||||||
|
- postgres
|
||||||
|
```
|
||||||
|
|
||||||
|
Activate with: `docker compose --profile postgres up -d`
|
||||||
|
|
||||||
|
### Pattern 7: Build-tagged PostgreSQL integration tests
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Source: CONTEXT.md D-17, D-19 + export_test.go pattern
|
||||||
|
//go:build postgres
|
||||||
|
|
||||||
|
package diunwebhook
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"os"
|
||||||
|
_ "github.com/jackc/pgx/v5/stdlib"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewTestPostgresServer() (*Server, error) {
|
||||||
|
databaseURL := os.Getenv("TEST_DATABASE_URL")
|
||||||
|
if databaseURL == "" {
|
||||||
|
databaseURL = "postgres://diun:diun@localhost:5432/diundashboard_test?sslmode=disable"
|
||||||
|
}
|
||||||
|
db, err := sql.Open("pgx", databaseURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := RunPostgresMigrations(db); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
store := NewPostgresStore(db)
|
||||||
|
return NewServer(store, ""), nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Anti-Patterns to Avoid
|
||||||
|
- **Using `res.LastInsertId()` after `db.Exec`**: pgx does not implement this — returns an error at runtime. Use `QueryRow(...).Scan(&id)` with `RETURNING id` instead.
|
||||||
|
- **Sharing the mutex with PostgresStore**: PostgreSQL handles concurrent writes; adding a mutex is unnecessary and hurts performance.
|
||||||
|
- **Using `INSERT OR REPLACE`**: Not valid PostgreSQL syntax. Use `INSERT ... ON CONFLICT ... DO UPDATE SET`.
|
||||||
|
- **Using `datetime('now')`**: SQLite function — not valid in PostgreSQL. Use `NOW()` or `CURRENT_TIMESTAMP`.
|
||||||
|
- **Using `?` placeholders**: Not valid in PostgreSQL. Use `$1`, `$2`, etc.
|
||||||
|
- **Using `INTEGER PRIMARY KEY AUTOINCREMENT`**: Not valid in PostgreSQL. Use `SERIAL` or `BIGSERIAL`.
|
||||||
|
- **Forgetting `//go:build postgres` on test files**: Without the build tag, the test file will be compiled for all builds — `pgx/v5/stdlib` import will fail on SQLite-only CI runs.
|
||||||
|
- **Calling `RunSQLiteMigrations` on a PostgreSQL connection**: The sqlite migration driver will fail to initialize against a PostgreSQL database.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Don't Hand-Roll
|
||||||
|
|
||||||
|
| Problem | Don't Build | Use Instead | Why |
|
||||||
|
|---------|-------------|-------------|-----|
|
||||||
|
| PostgreSQL migration tracking | Custom `schema_version` table | `golang-migrate/v4/database/pgx/v5` | Handles dirty state, locking, version history, rollbacks — all already solved |
|
||||||
|
| Connection pooling | Custom pool implementation | `database/sql` built-in pool + `pgx/v5/stdlib` | `database/sql` pool is production-grade; pgx stdlib wraps it correctly |
|
||||||
|
| Connection string parsing | Custom URL parser | Pass `DATABASE_URL` directly to `sql.Open("pgx", url)` | pgx parses standard PostgreSQL URI format natively |
|
||||||
|
| Dialect detection at runtime | Inspect driver name at query time | Separate store structs with their own SQL | Runtime dialect switching creates test surface, runtime failures; two structs is simpler |
|
||||||
|
|
||||||
|
**Key insight:** The existing `Store` interface already separates the concern — `PostgresStore` is just another implementation. There is nothing to invent.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Common Pitfalls
|
||||||
|
|
||||||
|
### Pitfall 1: LastInsertId on PostgreSQL
|
||||||
|
**What goes wrong:** `CreateTag` calls `res.LastInsertId()` — pgx returns `ErrNoLastInsertId` at runtime, not compile time.
|
||||||
|
**Why it happens:** The `database/sql` `Result` interface defines `LastInsertId()` but pgx does not support it. SQLite does.
|
||||||
|
**How to avoid:** In `PostgresStore.CreateTag`, use `QueryRow(...RETURNING id...).Scan(&id)` instead of `Exec` + `LastInsertId`.
|
||||||
|
**Warning signs:** Test passes compile, panics or returns error at runtime on tag creation.
|
||||||
|
|
||||||
|
### Pitfall 2: golang-migrate driver name mismatch
|
||||||
|
**What goes wrong:** Passing the wrong database name string to `migrate.NewWithInstance` causes "unknown driver" errors.
|
||||||
|
**Why it happens:** The `golang-migrate/database/pgx/v5` driver registers as `"pgx5"`, not `"pgx"` or `"postgres"`.
|
||||||
|
**How to avoid:** Use `"pgx5"` as the database name arg to `migrate.NewWithInstance("iofs", src, "pgx5", driver)`.
|
||||||
|
**Warning signs:** `migrate.NewWithInstance` returns an error mentioning an unknown driver.
|
||||||
|
|
||||||
|
### Pitfall 3: pgx/v5/stdlib import not registered
|
||||||
|
**What goes wrong:** `sql.Open("pgx", url)` fails with `"unknown driver pgx"`.
|
||||||
|
**Why it happens:** The `"pgx"` driver is only registered when `pgx/v5/stdlib` is imported (blank import side effect).
|
||||||
|
**How to avoid:** Add `_ "github.com/jackc/pgx/v5/stdlib"` to `main.go` and to any test files that open a `"pgx"` connection.
|
||||||
|
**Warning signs:** Runtime error "unknown driver pgx" despite pgx being in go.mod.
|
||||||
|
|
||||||
|
### Pitfall 4: SQLite `migrate.go` import conflict
|
||||||
|
**What goes wrong:** Adding the pgx/v5 migrate driver import to `migrate.go` introduces pgx as a dependency of the SQLite migration path.
|
||||||
|
**Why it happens:** Go imports are file-scoped; putting both drivers in one file compiles both.
|
||||||
|
**How to avoid:** Put `RunSQLiteMigrations` and `RunPostgresMigrations` in separate files, or at minimum keep the blank driver import for pgx only in the PostgreSQL branch. Alternatively, keep both in `migrate.go` — both drivers are compiled into the binary regardless; this is a binary size trade-off, not a correctness issue.
|
||||||
|
**Warning signs:** `modernc.org/sqlite` and `pgx` both appear in a file that should only need one.
|
||||||
|
|
||||||
|
### Pitfall 5: Docker Compose `required: false` on depends_on
|
||||||
|
**What goes wrong:** `app` service fails to start when postgres profile is inactive because `depends_on.postgres` is unconditional.
|
||||||
|
**Why it happens:** `depends_on` without `required: false` makes the dependency mandatory even when the postgres profile is not active.
|
||||||
|
**How to avoid:** Use `depends_on.postgres.required: false` so the health check dependency is only enforced when the postgres service is actually started. Requires Docker Compose v2.20+.
|
||||||
|
**Warning signs:** `docker compose up` (no profile) fails with "service postgres not found".
|
||||||
|
|
||||||
|
### Pitfall 6: GetUpdates timestamp scanning differences
|
||||||
|
**What goes wrong:** `GetUpdates` scans `received_at` and `created` as strings (`createdStr`, `receivedStr`) and then calls `time.Parse(time.RFC3339, ...)`. In the PostgreSQL schema these columns are `TEXT` (by design), so scanning behaves the same. If someone types them as `TIMESTAMPTZ` instead, scanning into a string breaks.
|
||||||
|
**Why it happens:** The SQLiteStore scans timestamps as strings because SQLite stores them as TEXT. If the PostgreSQL migration uses `TEXT` for these columns (matching the SQLite schema), the existing scan logic works unchanged in `PostgresStore`.
|
||||||
|
**How to avoid:** Use `TEXT NOT NULL` for `received_at`, `acknowledged_at`, and `created` in the PostgreSQL migration, mirroring the SQLite schema exactly. Do not use `TIMESTAMPTZ` unless you also update the scan/format logic.
|
||||||
|
**Warning signs:** `sql: Scan error ... converting driver.Value type time.Time into *string`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Code Examples
|
||||||
|
|
||||||
|
### PostgreSQL baseline migration (0001_initial_schema.up.sql)
|
||||||
|
```sql
|
||||||
|
-- Source: sqlite/0001_initial_schema.up.sql translated to PostgreSQL dialect
|
||||||
|
CREATE TABLE IF NOT EXISTS updates (
|
||||||
|
image TEXT PRIMARY KEY,
|
||||||
|
diun_version TEXT NOT NULL DEFAULT '',
|
||||||
|
hostname TEXT NOT NULL DEFAULT '',
|
||||||
|
status TEXT NOT NULL DEFAULT '',
|
||||||
|
provider TEXT NOT NULL DEFAULT '',
|
||||||
|
hub_link TEXT NOT NULL DEFAULT '',
|
||||||
|
mime_type TEXT NOT NULL DEFAULT '',
|
||||||
|
digest TEXT NOT NULL DEFAULT '',
|
||||||
|
created TEXT NOT NULL DEFAULT '',
|
||||||
|
platform TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_name TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_id TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_state TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_status TEXT NOT NULL DEFAULT '',
|
||||||
|
received_at TEXT NOT NULL,
|
||||||
|
acknowledged_at TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS tags (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL UNIQUE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS tag_assignments (
|
||||||
|
image TEXT PRIMARY KEY,
|
||||||
|
tag_id INTEGER NOT NULL REFERENCES tags(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
Key differences from SQLite version:
|
||||||
|
- `SERIAL PRIMARY KEY` replaces `INTEGER PRIMARY KEY AUTOINCREMENT`
|
||||||
|
- All other columns are identical (`TEXT` type used throughout)
|
||||||
|
- `ON DELETE CASCADE` is the same — PostgreSQL enforces FK constraints by default (no equivalent of `PRAGMA foreign_keys = ON` needed)
|
||||||
|
|
||||||
|
### PostgreSQL down migration (0001_initial_schema.down.sql)
|
||||||
|
```sql
|
||||||
|
DROP TABLE IF EXISTS tag_assignments;
|
||||||
|
DROP TABLE IF EXISTS tags;
|
||||||
|
DROP TABLE IF EXISTS updates;
|
||||||
|
```
|
||||||
|
Identical to SQLite version.
|
||||||
|
|
||||||
|
### UpsertEvent (PostgreSQL)
|
||||||
|
```go
|
||||||
|
// Positional params $1..$15, acknowledged_at reset to NULL on conflict
|
||||||
|
_, err := s.db.Exec(`
|
||||||
|
INSERT INTO updates (
|
||||||
|
image, diun_version, hostname, status, provider,
|
||||||
|
hub_link, mime_type, digest, created, platform,
|
||||||
|
ctn_name, ctn_id, ctn_state, ctn_status,
|
||||||
|
received_at, acknowledged_at
|
||||||
|
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,NULL)
|
||||||
|
ON CONFLICT(image) DO UPDATE SET
|
||||||
|
diun_version = EXCLUDED.diun_version,
|
||||||
|
hostname = EXCLUDED.hostname,
|
||||||
|
status = EXCLUDED.status,
|
||||||
|
provider = EXCLUDED.provider,
|
||||||
|
hub_link = EXCLUDED.hub_link,
|
||||||
|
mime_type = EXCLUDED.mime_type,
|
||||||
|
digest = EXCLUDED.digest,
|
||||||
|
created = EXCLUDED.created,
|
||||||
|
platform = EXCLUDED.platform,
|
||||||
|
ctn_name = EXCLUDED.ctn_name,
|
||||||
|
ctn_id = EXCLUDED.ctn_id,
|
||||||
|
ctn_state = EXCLUDED.ctn_state,
|
||||||
|
ctn_status = EXCLUDED.ctn_status,
|
||||||
|
received_at = EXCLUDED.received_at,
|
||||||
|
acknowledged_at = NULL`,
|
||||||
|
event.Image, event.DiunVersion, ...
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### AcknowledgeUpdate (PostgreSQL)
|
||||||
|
```go
|
||||||
|
// NOW() replaces datetime('now'), $1 replaces ?
|
||||||
|
res, err := s.db.Exec(`UPDATE updates SET acknowledged_at = NOW() WHERE image = $1`, image)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## State of the Art
|
||||||
|
|
||||||
|
| Old Approach | Current Approach | When Changed | Impact |
|
||||||
|
|--------------|------------------|--------------|--------|
|
||||||
|
| `lib/pq` (archived) | `pgx/v5/stdlib` | pgx v4→v5, lib/pq archived ~2023 | pgx is now the consensus standard Go PostgreSQL driver |
|
||||||
|
| `golang-migrate database/postgres` (uses lib/pq) | `golang-migrate database/pgx/v5` | golang-migrate added pgx/v5 sub-package | Use the pgx-native driver to avoid a lib/pq dependency |
|
||||||
|
| Single global `RunMigrations` | Separate `RunSQLiteMigrations` / `RunPostgresMigrations` | This phase | Each function owns its embed directive and driver import |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Open Questions
|
||||||
|
|
||||||
|
1. **Rename `RunMigrations` to `RunSQLiteMigrations`**
|
||||||
|
- What we know: `RunMigrations` is only called in `main.go` and `export_test.go`. Renaming breaks two call sites.
|
||||||
|
- What's unclear: Whether to rename (consistency) or keep old name and add a new `RunPostgresMigrations` (backward compatible for hypothetical external callers).
|
||||||
|
- Recommendation: Rename to `RunSQLiteMigrations` — this is internal-only code and symmetry aids comprehension. Update the two call sites.
|
||||||
|
|
||||||
|
2. **`depends_on.required: false` Docker Compose version requirement**
|
||||||
|
- What we know: `required: false` under `depends_on` was added in Docker Compose v2.20.
|
||||||
|
- What's unclear: Whether the target deployment environment has Compose v2.20+. Docker 29.0.0 (confirmed present) ships with Compose v2.29+ — this is not a concern for the dev machine. Production deployments depend on the user's Docker version.
|
||||||
|
- Recommendation: Use `required: false`; document minimum Docker Compose v2.20 in compose.yml comment.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Environment Availability
|
||||||
|
|
||||||
|
| Dependency | Required By | Available | Version | Fallback |
|
||||||
|
|------------|------------|-----------|---------|----------|
|
||||||
|
| Docker | Compose postgres profile, integration tests | ✓ | 29.0.0 | — |
|
||||||
|
| PostgreSQL server | Integration test execution (`-tags postgres`) | ✗ | — | Tests skip via build tag; Docker Compose spins up postgres for CI |
|
||||||
|
| `pg_isready` / psql client | Health check inside postgres container | ✗ (host) | — | `pg_isready` is inside the `postgres:17-alpine` image — not needed on host |
|
||||||
|
| Go 1.26 | Build | Not directly measurable from this shell | go.mod specifies 1.26 | — |
|
||||||
|
|
||||||
|
**Missing dependencies with no fallback:**
|
||||||
|
- None that block development. PostgreSQL integration tests require a live server but are gated behind `//go:build postgres`.
|
||||||
|
|
||||||
|
**Missing dependencies with fallback:**
|
||||||
|
- PostgreSQL server (host): not installed, but not required — tests use build tags, Docker Compose provides the server for integration runs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Project Constraints (from CLAUDE.md)
|
||||||
|
|
||||||
|
Directives the planner must verify compliance with:
|
||||||
|
|
||||||
|
- **No CGO**: `CGO_ENABLED=0` in Dockerfile Stage 2. `pgx/v5` is pure Go — this constraint is satisfied. Verify that adding `pgx/v5` does not transitively pull in any CGO package.
|
||||||
|
- **Pure Go SQLite driver**: `modernc.org/sqlite` must remain. Adding pgx does not replace it — both coexist.
|
||||||
|
- **Database must support both SQLite and PostgreSQL**: This is exactly what Phase 3 delivers via the Store interface.
|
||||||
|
- **`database/sql` abstraction**: Both stores use `*sql.DB`. No pgx native interface in handlers.
|
||||||
|
- **`net/http` only, no router framework**: No impact from this phase.
|
||||||
|
- **`gofmt` enforced**: All new `.go` files must be `gofmt`-clean.
|
||||||
|
- **Naming conventions**: New file `postgres_store.go`, new type `PostgresStore`, new constructor `NewPostgresStore`. Test helper `NewTestPostgresServer`. Functions `RunSQLiteMigrations` / `RunPostgresMigrations`.
|
||||||
|
- **Error handling**: `http.Error(w, ..., status)` with lowercase messages. Not directly affected — PostgresStore is storage-layer only. `log.Fatalf` in `main.go` for connection/migration failures (matches existing pattern).
|
||||||
|
- **No global state**: `PostgresStore` holds `*sql.DB` as struct field, no package-level vars — consistent with Phase 2 refactor.
|
||||||
|
- **GSD workflow**: Do not make direct edits outside a GSD phase.
|
||||||
|
- **Module name**: `awesomeProject` (in go.mod). Import as `diun "awesomeProject/pkg/diunwebhook"` in main.go.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sources
|
||||||
|
|
||||||
|
### Primary (HIGH confidence)
|
||||||
|
- pkg.go.dev/github.com/jackc/pgx/v5 — version confirmed v5.9.1 (Mar 22 2026), stdlib package import path, driver name `"pgx"`, pure Go confirmed
|
||||||
|
- pkg.go.dev/github.com/jackc/pgx/v5/stdlib — `sql.Open("pgx", url)` pattern, `LastInsertId` not supported
|
||||||
|
- pkg.go.dev/github.com/golang-migrate/migrate/v4/database/pgx/v5 — `WithInstance(*sql.DB, *Config)`, driver registers as `"pgx5"`, v4.19.1
|
||||||
|
- github.com/golang-migrate/migrate/blob/master/database/pgx/v5/pgx.go — confirmed `database.Register("pgx5", &db)` registration name
|
||||||
|
- Existing codebase: `store.go`, `sqlite_store.go`, `migrate.go`, `export_test.go`, `main.go` — all read directly
|
||||||
|
|
||||||
|
### Secondary (MEDIUM confidence)
|
||||||
|
- github.com/jackc/pgx/issues/1483 — `LastInsertId` not supported by pgx, confirmed by multiple sources
|
||||||
|
- Docker Compose docs (docs.docker.com/reference/compose-file/services/) — profiles syntax, depends_on with required: false
|
||||||
|
|
||||||
|
### Tertiary (LOW confidence)
|
||||||
|
- WebSearch results re: Docker Compose `required: false` version requirement — states Compose v2.20; not independently verified against official changelog. However, Docker 29.0.0 (installed) ships Compose v2.29+, so this is moot for the dev machine.
|
||||||
|
|
||||||
|
## Metadata
|
||||||
|
|
||||||
|
**Confidence breakdown:**
|
||||||
|
- Standard stack: HIGH — versions verified via pkg.go.dev on 2026-03-24
|
||||||
|
- Architecture: HIGH — based on existing codebase patterns + confirmed library APIs
|
||||||
|
- Pitfalls: HIGH for LastInsertId, driver name, import registration (all verified via official sources); MEDIUM for Docker Compose `required: false` version boundary
|
||||||
|
|
||||||
|
**Research date:** 2026-03-24
|
||||||
|
**Valid until:** 2026-05-24 (stable ecosystem; pgx and golang-migrate release infrequently)
|
||||||
140
.planning/phases/03-postgresql-support/03-VERIFICATION.md
Normal file
140
.planning/phases/03-postgresql-support/03-VERIFICATION.md
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
---
|
||||||
|
phase: 03-postgresql-support
|
||||||
|
verified: 2026-03-24T10:00:00Z
|
||||||
|
status: gaps_found
|
||||||
|
score: 9/10 must-haves verified
|
||||||
|
re_verification: false
|
||||||
|
gaps:
|
||||||
|
- truth: "pgx/v5 is a direct dependency in go.mod"
|
||||||
|
status: failed
|
||||||
|
reason: "github.com/jackc/pgx/v5 v5.9.1 is listed as // indirect in go.mod, but main.go has a direct blank import _ \"github.com/jackc/pgx/v5/stdlib\". go mod tidy confirms it should be in the direct require block."
|
||||||
|
artifacts:
|
||||||
|
- path: "go.mod"
|
||||||
|
issue: "pgx/v5 v5.9.1 appears in the indirect block; should be in the direct block alongside github.com/golang-migrate/migrate/v4 and modernc.org/sqlite"
|
||||||
|
missing:
|
||||||
|
- "Run go mod tidy to move github.com/jackc/pgx/v5 v5.9.1 from indirect to direct require block in go.mod"
|
||||||
|
human_verification:
|
||||||
|
- test: "PostgreSQL end-to-end: start app with DATABASE_URL pointing to a real Postgres instance and send a webhook"
|
||||||
|
expected: "Startup logs 'Using PostgreSQL database', webhook stores to Postgres, GET /api/updates returns the event"
|
||||||
|
why_human: "No PostgreSQL instance available in automated environment; cannot test actual DB connectivity"
|
||||||
|
- test: "docker compose --profile postgres up starts correctly with DATABASE_URL set in .env"
|
||||||
|
expected: "PostgreSQL container starts, passes health check, app connects to it, dashboard shows data"
|
||||||
|
why_human: "Full compose stack requires running Docker daemon and network routing between containers"
|
||||||
|
- test: "Existing SQLite user upgrade: start new binary against an old diun.db with existing rows"
|
||||||
|
expected: "golang-migrate detects schema is already at version 1, logs ErrNoChange (no-op), all existing rows visible in dashboard"
|
||||||
|
why_human: "Requires a pre-existing SQLite database file with data from a previous binary version"
|
||||||
|
---
|
||||||
|
|
||||||
|
# Phase 03: PostgreSQL Support Verification Report
|
||||||
|
|
||||||
|
**Phase Goal:** Users running PostgreSQL infrastructure can point DiunDashboard at a Postgres database via DATABASE_URL and the dashboard works identically to the SQLite deployment
|
||||||
|
**Verified:** 2026-03-24T10:00:00Z
|
||||||
|
**Status:** gaps_found
|
||||||
|
**Re-verification:** No — initial verification
|
||||||
|
|
||||||
|
## Goal Achievement
|
||||||
|
|
||||||
|
### Observable Truths
|
||||||
|
|
||||||
|
| # | Truth | Status | Evidence |
|
||||||
|
|----|---------------------------------------------------------------------------------------|------------|----------------------------------------------------------------------------------------------|
|
||||||
|
| 1 | Setting DATABASE_URL starts the app using PostgreSQL; omitting it falls back to SQLite | ✓ VERIFIED | main.go L20-46: branches on os.Getenv("DATABASE_URL"), correct startup log for each path |
|
||||||
|
| 2 | A fresh PostgreSQL deployment receives all schema tables via automatic migration | ✓ VERIFIED | RunPostgresMigrations wired in main.go L27; migrations/postgres/0001_initial_schema.up.sql creates all 3 tables |
|
||||||
|
| 3 | Existing SQLite users upgrade without data loss (baseline migration = current schema) | ✓ VERIFIED | SQLite migration unchanged; RunSQLiteMigrations called in else branch; `CREATE TABLE IF NOT EXISTS` pattern is idempotent |
|
||||||
|
| 4 | App can be run with Docker Compose using an optional postgres service profile | ✓ VERIFIED | compose.yml and compose.dev.yml both have `profiles: [postgres]`; docker compose config validates |
|
||||||
|
| 5 | PostgresStore implements all 9 Store interface methods | ✓ VERIFIED | 9 methods found; go build ./pkg/diunwebhook/ succeeds (compiler enforces interface compliance) |
|
||||||
|
| 6 | PostgreSQL migration creates identical 3-table schema to SQLite | ✓ VERIFIED | 0001_initial_schema.up.sql: updates, tags (SERIAL PK), tag_assignments with FK cascade |
|
||||||
|
| 7 | Duplicate tag creation returns 409 on both backends | ✓ VERIFIED | diunwebhook.go L172: strings.Contains(strings.ToLower(err.Error()), "unique") — case-insensitive |
|
||||||
|
| 8 | All existing SQLite tests pass | ✓ VERIFIED | go test -count=1 ./pkg/diunwebhook/ — 22 tests, all PASS, 0 failures |
|
||||||
|
| 9 | Startup log identifies active backend | ✓ VERIFIED | main.go L31: "Using PostgreSQL database" / L45: "Using SQLite database at %s" |
|
||||||
|
| 10 | pgx/v5 is a direct dependency in go.mod | ✗ FAILED | Listed as `// indirect` in go.mod; go mod tidy shows it should be in the direct require block |
|
||||||
|
|
||||||
|
**Score:** 9/10 truths verified
|
||||||
|
|
||||||
|
### Required Artifacts
|
||||||
|
|
||||||
|
| Artifact | Expected | Status | Details |
|
||||||
|
|-----------------------------------------------------------------------|---------------------------------------------|-------------|----------------------------------------------------------------------------------------------------------|
|
||||||
|
| `pkg/diunwebhook/postgres_store.go` | PostgresStore implementing all 9 methods | ✓ VERIFIED | 9 methods, no mutex, SetMaxOpenConns(25), RETURNING id in CreateTag, ON CONFLICT DO UPDATE in AssignTag |
|
||||||
|
| `pkg/diunwebhook/migrate.go` | RunSQLiteMigrations + RunPostgresMigrations | ✓ VERIFIED | Both functions present, both go:embed directives present, pgx5 driver name correct |
|
||||||
|
| `pkg/diunwebhook/migrations/postgres/0001_initial_schema.up.sql` | PostgreSQL baseline schema (3 tables) | ✓ VERIFIED | SERIAL PRIMARY KEY, all 3 tables, TEXT timestamps matching scan logic |
|
||||||
|
| `pkg/diunwebhook/migrations/postgres/0001_initial_schema.down.sql` | PostgreSQL rollback | ✓ VERIFIED | DROP TABLE IF EXISTS for all 3 in dependency order |
|
||||||
|
| `cmd/diunwebhook/main.go` | DATABASE_URL branching logic | ✓ VERIFIED | Full branching logic, both startup paths, pgx/v5/stdlib blank import |
|
||||||
|
| `compose.yml` | Production compose with postgres profile | ✓ VERIFIED | profiles: [postgres], pg_isready healthcheck, required: false, postgres-data volume |
|
||||||
|
| `compose.dev.yml` | Dev compose with postgres profile | ✓ VERIFIED | profiles: [postgres], port 5432 exposed, required: false |
|
||||||
|
| `pkg/diunwebhook/postgres_test.go` | Build-tagged PostgreSQL integration helper | ✓ VERIFIED | //go:build postgres, NewTestPostgresServer, TEST_DATABASE_URL env var |
|
||||||
|
| `pkg/diunwebhook/diunwebhook.go` | Case-insensitive UNIQUE detection | ✓ VERIFIED | strings.Contains(strings.ToLower(err.Error()), "unique") at L172 |
|
||||||
|
| `go.mod` | pgx/v5 as direct dependency | ✗ GAP | github.com/jackc/pgx/v5 v5.9.1 in indirect block; go mod tidy diff confirms direct block is required |
|
||||||
|
|
||||||
|
### Key Link Verification
|
||||||
|
|
||||||
|
| From | To | Via | Status | Details |
|
||||||
|
|-----------------------------------|--------------------------------------|----------------------------------|-------------|----------------------------------------------------------------------|
|
||||||
|
| `cmd/diunwebhook/main.go` | `pkg/diunwebhook/postgres_store.go` | `diun.NewPostgresStore(db)` | ✓ WIRED | Line 30: `store = diun.NewPostgresStore(db)` |
|
||||||
|
| `cmd/diunwebhook/main.go` | `pkg/diunwebhook/migrate.go` | `diun.RunPostgresMigrations(db)` | ✓ WIRED | Line 27: `diun.RunPostgresMigrations(db)` — also RunSQLiteMigrations at L41 |
|
||||||
|
| `cmd/diunwebhook/main.go` | `pgx/v5/stdlib` | blank import for driver reg | ✓ WIRED | Line 15: `_ "github.com/jackc/pgx/v5/stdlib"` |
|
||||||
|
| `pkg/diunwebhook/postgres_store.go` | `pkg/diunwebhook/store.go` | implements Store interface | ✓ WIRED | Compiler-enforced: go build succeeds; 9 method signatures match interface |
|
||||||
|
| `pkg/diunwebhook/migrate.go` | `migrations/postgres/` | go:embed directive | ✓ WIRED | `//go:embed migrations/postgres` with `var postgresMigrations embed.FS` |
|
||||||
|
|
||||||
|
### Data-Flow Trace (Level 4)
|
||||||
|
|
||||||
|
Not applicable. This phase delivers persistence infrastructure (store, migrations, startup wiring) — no new UI components or data-rendering paths were added. The existing frontend polls the same `/api/updates` endpoint; the data source change is at the backend store layer, which is verified via interface compliance and compilation.
|
||||||
|
|
||||||
|
### Behavioral Spot-Checks
|
||||||
|
|
||||||
|
| Behavior | Command | Result | Status |
|
||||||
|
|---------------------------------------------------|--------------------------------------------------------------------------|-------------|---------|
|
||||||
|
| Full project compiles (both stores + drivers) | go build ./... | Exit 0 | ✓ PASS |
|
||||||
|
| go vet clean (no suspicious constructs) | go vet ./... | Exit 0 | ✓ PASS |
|
||||||
|
| All 22 SQLite tests pass | go test -count=1 ./pkg/diunwebhook/ | ok (0.046s) | ✓ PASS |
|
||||||
|
| postgres_test.go excluded without build tag | go test -count=1 ./pkg/diunwebhook/ (no -tags postgres) | Passes (no pgx import error) | ✓ PASS |
|
||||||
|
| compose.yml validates | docker compose config --quiet | Exit 0 | ✓ PASS |
|
||||||
|
| compose --profile postgres validates | docker compose --profile postgres config --quiet | Exit 0 | ✓ PASS |
|
||||||
|
| go mod tidy reports pgx/v5 indirect as wrong | go mod tidy -diff | Diff shows pgx/v5 should be direct | ✗ FAIL |
|
||||||
|
|
||||||
|
### Requirements Coverage
|
||||||
|
|
||||||
|
| Requirement | Source Plans | Description | Status | Evidence |
|
||||||
|
|-------------|---------------|---------------------------------------------------------------------------------------|-------------|-----------------------------------------------------------------------|
|
||||||
|
| DB-01 | 03-01, 03-02 | PostgreSQL is supported as an alternative to SQLite via pgx v5 driver | ✓ SATISFIED | PostgresStore implements Store, pgx/v5/stdlib blank-imported in main.go, builds and vets cleanly |
|
||||||
|
| DB-02 | 03-02 | Database backend is selected via DATABASE_URL env var (present=PG, absent=SQLite) | ✓ SATISFIED | main.go L20-46: os.Getenv("DATABASE_URL") branches to correct store and migration runner |
|
||||||
|
| DB-03 | 03-01, 03-02 | Existing SQLite users can upgrade without data loss (baseline migration = current schema) | ✓ SATISFIED | SQLite migration path unchanged; RunSQLiteMigrations called when DATABASE_URL absent; schema tables match |
|
||||||
|
|
||||||
|
**Orphaned requirements check:** No requirements assigned to Phase 3 in REQUIREMENTS.md beyond DB-01, DB-02, DB-03. None are orphaned.
|
||||||
|
|
||||||
|
### Anti-Patterns Found
|
||||||
|
|
||||||
|
| File | Line | Pattern | Severity | Impact |
|
||||||
|
|---------|------|--------------------------------------|-----------|-------------------------------------------------------------------------------------------------------|
|
||||||
|
| go.mod | 16 | `pgx/v5 v5.9.1 // indirect` | ⚠️ Warning | go mod tidy flags this as incorrect. Direct blank import in main.go means it should be in the direct require block. Does not affect compilation or runtime, but violates Go module hygiene conventions and the plan's stated acceptance criteria. |
|
||||||
|
|
||||||
|
### Human Verification Required
|
||||||
|
|
||||||
|
#### 1. PostgreSQL End-to-End Connectivity
|
||||||
|
|
||||||
|
**Test:** Start the app with a real PostgreSQL instance (e.g., `docker compose --profile postgres up -d`), set `DATABASE_URL=postgres://diun:diun@localhost:5432/diundashboard?sslmode=disable`, send a webhook POST, then fetch `/api/updates`
|
||||||
|
**Expected:** App logs "Using PostgreSQL database", webhook stores data in Postgres, GET /api/updates returns the event with correct fields, tags and acknowledgments work identically to SQLite
|
||||||
|
**Why human:** No PostgreSQL instance available in automated environment
|
||||||
|
|
||||||
|
#### 2. Docker Compose postgres profile end-to-end
|
||||||
|
|
||||||
|
**Test:** Run `docker compose --profile postgres up` with a `.env` containing `DATABASE_URL=postgres://diun:diun@postgres:5432/diundashboard?sslmode=disable`, confirm app waits for postgres health check, connects, and serves the dashboard
|
||||||
|
**Expected:** postgres service starts, pg_isready passes, app container starts after it, dashboard loads in browser
|
||||||
|
**Why human:** Full compose stack requires running Docker daemon and inter-container networking
|
||||||
|
|
||||||
|
#### 3. SQLite backward-compatibility upgrade
|
||||||
|
|
||||||
|
**Test:** Take a `diun.db` file created by a pre-Phase-3 binary (with existing rows in updates, tags, tag_assignments), start the new binary pointing at it (DATABASE_URL unset, DB_PATH set to that file)
|
||||||
|
**Expected:** golang-migrate detects schema is already at migration version 1 (ErrNoChange, no-op), all existing rows appear in the dashboard without any manual schema changes
|
||||||
|
**Why human:** Requires a pre-existing SQLite database from a previous binary version
|
||||||
|
|
||||||
|
### Gaps Summary
|
||||||
|
|
||||||
|
One gap found: `github.com/jackc/pgx/v5` is marked `// indirect` in `go.mod` even though `cmd/diunwebhook/main.go` directly imports `_ "github.com/jackc/pgx/v5/stdlib"`. Running `go mod tidy` moves it to the direct require block. This is a module hygiene issue — the binary compiles and runs correctly — but it violates the DB-01 plan acceptance criterion ("pgx/v5 is in go.mod as a direct dependency") and will cause confusion for anyone reading go.mod expecting to understand the project's direct dependencies.
|
||||||
|
|
||||||
|
**Fix:** Run `go mod tidy` in the project root. This requires no code changes and takes under 1 second.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
_Verified: 2026-03-24T10:00:00Z_
|
||||||
|
_Verifier: Claude (gsd-verifier)_
|
||||||
346
.planning/phases/04-ux-improvements/04-01-PLAN.md
Normal file
346
.planning/phases/04-ux-improvements/04-01-PLAN.md
Normal file
@@ -0,0 +1,346 @@
|
|||||||
|
---
|
||||||
|
phase: 04-ux-improvements
|
||||||
|
plan: 01
|
||||||
|
type: execute
|
||||||
|
wave: 1
|
||||||
|
depends_on: []
|
||||||
|
files_modified:
|
||||||
|
- pkg/diunwebhook/store.go
|
||||||
|
- pkg/diunwebhook/sqlite_store.go
|
||||||
|
- pkg/diunwebhook/postgres_store.go
|
||||||
|
- pkg/diunwebhook/diunwebhook.go
|
||||||
|
- pkg/diunwebhook/diunwebhook_test.go
|
||||||
|
- pkg/diunwebhook/export_test.go
|
||||||
|
- cmd/diunwebhook/main.go
|
||||||
|
autonomous: true
|
||||||
|
requirements:
|
||||||
|
- BULK-01
|
||||||
|
- BULK-02
|
||||||
|
|
||||||
|
must_haves:
|
||||||
|
truths:
|
||||||
|
- "POST /api/updates/acknowledge-all marks all unacknowledged updates and returns the count"
|
||||||
|
- "POST /api/updates/acknowledge-by-tag marks only unacknowledged updates in the given tag and returns the count"
|
||||||
|
- "Both endpoints return 200 with {count: 0} when nothing matches (not 404)"
|
||||||
|
artifacts:
|
||||||
|
- path: "pkg/diunwebhook/store.go"
|
||||||
|
provides: "Extended Store interface with AcknowledgeAll and AcknowledgeByTag"
|
||||||
|
contains: "AcknowledgeAll"
|
||||||
|
- path: "pkg/diunwebhook/sqlite_store.go"
|
||||||
|
provides: "SQLiteStore bulk acknowledge implementations"
|
||||||
|
contains: "func (s *SQLiteStore) AcknowledgeAll"
|
||||||
|
- path: "pkg/diunwebhook/postgres_store.go"
|
||||||
|
provides: "PostgresStore bulk acknowledge implementations"
|
||||||
|
contains: "func (s *PostgresStore) AcknowledgeAll"
|
||||||
|
- path: "pkg/diunwebhook/diunwebhook.go"
|
||||||
|
provides: "HTTP handlers for bulk acknowledge endpoints"
|
||||||
|
contains: "AcknowledgeAllHandler"
|
||||||
|
- path: "cmd/diunwebhook/main.go"
|
||||||
|
provides: "Route registration for new endpoints"
|
||||||
|
contains: "/api/updates/acknowledge-all"
|
||||||
|
key_links:
|
||||||
|
- from: "cmd/diunwebhook/main.go"
|
||||||
|
to: "pkg/diunwebhook/diunwebhook.go"
|
||||||
|
via: "mux.HandleFunc registration"
|
||||||
|
pattern: "HandleFunc.*acknowledge"
|
||||||
|
- from: "pkg/diunwebhook/diunwebhook.go"
|
||||||
|
to: "pkg/diunwebhook/store.go"
|
||||||
|
via: "s.store.AcknowledgeAll() and s.store.AcknowledgeByTag()"
|
||||||
|
pattern: "s\\.store\\.Acknowledge(All|ByTag)"
|
||||||
|
---
|
||||||
|
|
||||||
|
<objective>
|
||||||
|
Add backend support for bulk acknowledge operations: acknowledge all pending updates at once, and acknowledge all pending updates within a specific tag group.
|
||||||
|
|
||||||
|
Purpose: Enables the frontend (Plan 03) to offer "Dismiss All" and "Dismiss Group" buttons.
|
||||||
|
Output: Two new Store interface methods, implementations for both SQLite and PostgreSQL, two new HTTP handlers, route registrations, and tests.
|
||||||
|
</objective>
|
||||||
|
|
||||||
|
<execution_context>
|
||||||
|
@$HOME/.claude/get-shit-done/workflows/execute-plan.md
|
||||||
|
@$HOME/.claude/get-shit-done/templates/summary.md
|
||||||
|
</execution_context>
|
||||||
|
|
||||||
|
<context>
|
||||||
|
@.planning/PROJECT.md
|
||||||
|
@.planning/ROADMAP.md
|
||||||
|
@.planning/STATE.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-CONTEXT.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-RESEARCH.md
|
||||||
|
|
||||||
|
<interfaces>
|
||||||
|
<!-- Store interface the executor must extend -->
|
||||||
|
From pkg/diunwebhook/store.go:
|
||||||
|
```go
|
||||||
|
type Store interface {
|
||||||
|
UpsertEvent(event DiunEvent) error
|
||||||
|
GetUpdates() (map[string]UpdateEntry, error)
|
||||||
|
AcknowledgeUpdate(image string) (found bool, err error)
|
||||||
|
ListTags() ([]Tag, error)
|
||||||
|
CreateTag(name string) (Tag, error)
|
||||||
|
DeleteTag(id int) (found bool, err error)
|
||||||
|
AssignTag(image string, tagID int) error
|
||||||
|
UnassignTag(image string) error
|
||||||
|
TagExists(id int) (bool, error)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/sqlite_store.go (AcknowledgeUpdate pattern to follow):
|
||||||
|
```go
|
||||||
|
func (s *SQLiteStore) AcknowledgeUpdate(image string) (found bool, err error) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
res, err := s.db.Exec(`UPDATE updates SET acknowledged_at = datetime('now') WHERE image = ?`, image)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return n > 0, nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/postgres_store.go (same method, PostgreSQL dialect):
|
||||||
|
```go
|
||||||
|
func (s *PostgresStore) AcknowledgeUpdate(image string) (found bool, err error) {
|
||||||
|
res, err := s.db.Exec(`UPDATE updates SET acknowledged_at = NOW() WHERE image = $1`, image)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return n > 0, nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From pkg/diunwebhook/diunwebhook.go (DismissHandler pattern to follow):
|
||||||
|
```go
|
||||||
|
func (s *Server) DismissHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPatch {
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Current route registration order in cmd/diunwebhook/main.go:
|
||||||
|
```go
|
||||||
|
mux.HandleFunc("/api/updates/", srv.DismissHandler)
|
||||||
|
mux.HandleFunc("/api/updates", srv.UpdatesHandler)
|
||||||
|
```
|
||||||
|
</interfaces>
|
||||||
|
</context>
|
||||||
|
|
||||||
|
<tasks>
|
||||||
|
|
||||||
|
<task type="auto" tdd="true">
|
||||||
|
<name>Task 1: Extend Store interface and implement AcknowledgeAll + AcknowledgeByTag with store-level tests</name>
|
||||||
|
<files>pkg/diunwebhook/store.go, pkg/diunwebhook/sqlite_store.go, pkg/diunwebhook/postgres_store.go, pkg/diunwebhook/diunwebhook_test.go, pkg/diunwebhook/export_test.go</files>
|
||||||
|
<read_first>
|
||||||
|
- pkg/diunwebhook/store.go
|
||||||
|
- pkg/diunwebhook/sqlite_store.go
|
||||||
|
- pkg/diunwebhook/postgres_store.go
|
||||||
|
- pkg/diunwebhook/diunwebhook_test.go
|
||||||
|
- pkg/diunwebhook/export_test.go
|
||||||
|
</read_first>
|
||||||
|
<behavior>
|
||||||
|
- Test 1: AcknowledgeAll on empty DB returns count=0, no error
|
||||||
|
- Test 2: AcknowledgeAll with 3 unacknowledged updates returns count=3; subsequent GetUpdates shows all acknowledged
|
||||||
|
- Test 3: AcknowledgeAll with 2 unacknowledged + 1 already acknowledged returns count=2
|
||||||
|
- Test 4: AcknowledgeByTag with valid tag_id returns count of matching unacknowledged updates in that tag
|
||||||
|
- Test 5: AcknowledgeByTag with non-existent tag_id returns count=0, no error
|
||||||
|
- Test 6: AcknowledgeByTag does not affect updates in other tags or untagged updates
|
||||||
|
</behavior>
|
||||||
|
<action>
|
||||||
|
TDD approach -- write tests first, then implement:
|
||||||
|
|
||||||
|
1. Add test helper exports to `export_test.go`:
|
||||||
|
```go
|
||||||
|
func (s *Server) TestAcknowledgeAll() (int, error) {
|
||||||
|
return s.Store().AcknowledgeAll()
|
||||||
|
}
|
||||||
|
func (s *Server) TestAcknowledgeByTag(tagID int) (int, error) {
|
||||||
|
return s.Store().AcknowledgeByTag(tagID)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
(Add a `Store() Store` accessor method on Server if not already present, or access the store field directly via an existing test export pattern.)
|
||||||
|
|
||||||
|
2. Write store-level tests in `diunwebhook_test.go` following existing `Test<Function>_<Scenario>` convention:
|
||||||
|
- `TestAcknowledgeAll_Empty`: create server, call TestAcknowledgeAll, assert count=0, no error
|
||||||
|
- `TestAcknowledgeAll_AllUnacknowledged`: upsert 3 events via TestUpsertEvent, call TestAcknowledgeAll, assert count=3, then call GetUpdates and verify all have acknowledged=true
|
||||||
|
- `TestAcknowledgeAll_MixedState`: upsert 3 events, acknowledge 1 via existing dismiss, call TestAcknowledgeAll, assert count=2
|
||||||
|
- `TestAcknowledgeByTag_MatchingTag`: upsert 2 events, create tag, assign both to tag, call TestAcknowledgeByTag(tagID), assert count=2
|
||||||
|
- `TestAcknowledgeByTag_NonExistentTag`: call TestAcknowledgeByTag(9999), assert count=0, no error
|
||||||
|
- `TestAcknowledgeByTag_OnlyAffectsTargetTag`: upsert 3 events, create 2 tags, assign 2 events to tag1 and 1 to tag2, call TestAcknowledgeByTag(tag1.ID), assert count=2, verify tag2's event is still unacknowledged via GetUpdates
|
||||||
|
|
||||||
|
Run tests -- they must FAIL (RED) since methods don't exist yet.
|
||||||
|
|
||||||
|
3. Add two methods to the Store interface in `store.go` (per D-01):
|
||||||
|
```go
|
||||||
|
AcknowledgeAll() (count int, err error)
|
||||||
|
AcknowledgeByTag(tagID int) (count int, err error)
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Implement in `sqlite_store.go` (following AcknowledgeUpdate pattern with mutex):
|
||||||
|
- `AcknowledgeAll`: `s.mu.Lock()`, `s.db.Exec("UPDATE updates SET acknowledged_at = datetime('now') WHERE acknowledged_at IS NULL")`, return `int(RowsAffected())`
|
||||||
|
- `AcknowledgeByTag`: `s.mu.Lock()`, `s.db.Exec("UPDATE updates SET acknowledged_at = datetime('now') WHERE acknowledged_at IS NULL AND image IN (SELECT image FROM tag_assignments WHERE tag_id = ?)", tagID)`, return `int(RowsAffected())`
|
||||||
|
|
||||||
|
5. Implement in `postgres_store.go` (no mutex, use NOW() and $1 positional param):
|
||||||
|
- `AcknowledgeAll`: `s.db.Exec("UPDATE updates SET acknowledged_at = NOW() WHERE acknowledged_at IS NULL")`, return `int(RowsAffected())`
|
||||||
|
- `AcknowledgeByTag`: `s.db.Exec("UPDATE updates SET acknowledged_at = NOW() WHERE acknowledged_at IS NULL AND image IN (SELECT image FROM tag_assignments WHERE tag_id = $1)", tagID)`, return `int(RowsAffected())`
|
||||||
|
|
||||||
|
6. Run tests again -- they must PASS (GREEN).
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard && go test -v -run "TestAcknowledge(All|ByTag)_" ./pkg/diunwebhook/</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- store.go contains `AcknowledgeAll() (count int, err error)` in the Store interface
|
||||||
|
- store.go contains `AcknowledgeByTag(tagID int) (count int, err error)` in the Store interface
|
||||||
|
- sqlite_store.go contains `func (s *SQLiteStore) AcknowledgeAll() (int, error)`
|
||||||
|
- sqlite_store.go contains `func (s *SQLiteStore) AcknowledgeByTag(tagID int) (int, error)`
|
||||||
|
- sqlite_store.go AcknowledgeAll contains `s.mu.Lock()`
|
||||||
|
- sqlite_store.go AcknowledgeAll contains `WHERE acknowledged_at IS NULL`
|
||||||
|
- sqlite_store.go AcknowledgeByTag contains `SELECT image FROM tag_assignments WHERE tag_id = ?`
|
||||||
|
- postgres_store.go contains `func (s *PostgresStore) AcknowledgeAll() (int, error)`
|
||||||
|
- postgres_store.go contains `func (s *PostgresStore) AcknowledgeByTag(tagID int) (int, error)`
|
||||||
|
- postgres_store.go AcknowledgeByTag contains `$1` (positional param)
|
||||||
|
- diunwebhook_test.go contains `TestAcknowledgeAll_Empty`
|
||||||
|
- diunwebhook_test.go contains `TestAcknowledgeByTag_OnlyAffectsTargetTag`
|
||||||
|
- `go test -v -run "TestAcknowledge(All|ByTag)_" ./pkg/diunwebhook/` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>Store interface extended with 2 new methods; both SQLiteStore and PostgresStore compile and implement the interface; 6 store-level tests pass</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
<task type="auto" tdd="true">
|
||||||
|
<name>Task 2: Add HTTP handlers, route registration, and handler tests for bulk acknowledge endpoints</name>
|
||||||
|
<files>pkg/diunwebhook/diunwebhook.go, pkg/diunwebhook/diunwebhook_test.go, pkg/diunwebhook/export_test.go, cmd/diunwebhook/main.go</files>
|
||||||
|
<read_first>
|
||||||
|
- pkg/diunwebhook/diunwebhook.go
|
||||||
|
- pkg/diunwebhook/diunwebhook_test.go
|
||||||
|
- pkg/diunwebhook/export_test.go
|
||||||
|
- cmd/diunwebhook/main.go
|
||||||
|
</read_first>
|
||||||
|
<behavior>
|
||||||
|
- Test: POST /api/updates/acknowledge-all with no updates returns 200 + {"count":0}
|
||||||
|
- Test: POST /api/updates/acknowledge-all with 2 pending updates returns 200 + {"count":2}
|
||||||
|
- Test: GET /api/updates/acknowledge-all returns 405
|
||||||
|
- Test: POST /api/updates/acknowledge-by-tag with valid tag_id returns 200 + {"count":N}
|
||||||
|
- Test: POST /api/updates/acknowledge-by-tag with tag_id=0 returns 400
|
||||||
|
- Test: POST /api/updates/acknowledge-by-tag with missing body returns 400
|
||||||
|
- Test: POST /api/updates/acknowledge-by-tag with non-existent tag returns 200 + {"count":0}
|
||||||
|
- Test: GET /api/updates/acknowledge-by-tag returns 405
|
||||||
|
</behavior>
|
||||||
|
<action>
|
||||||
|
1. Add `AcknowledgeAllHandler` to `diunwebhook.go` (per D-02):
|
||||||
|
```go
|
||||||
|
func (s *Server) AcknowledgeAllHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
count, err := s.store.AcknowledgeAll()
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("AcknowledgeAllHandler: %v", err)
|
||||||
|
http.Error(w, "internal error", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]int{"count": count})
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add `AcknowledgeByTagHandler` to `diunwebhook.go` (per D-02):
|
||||||
|
```go
|
||||||
|
func (s *Server) AcknowledgeByTagHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.Body = http.MaxBytesReader(w, r.Body, maxBodyBytes)
|
||||||
|
var req struct {
|
||||||
|
TagID int `json:"tag_id"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, "bad request", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.TagID <= 0 {
|
||||||
|
http.Error(w, "bad request: tag_id required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
count, err := s.store.AcknowledgeByTag(req.TagID)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("AcknowledgeByTagHandler: %v", err)
|
||||||
|
http.Error(w, "internal error", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]int{"count": count})
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Register routes in `main.go` -- CRITICAL: new specific paths BEFORE the existing `/api/updates/` subtree pattern:
|
||||||
|
```go
|
||||||
|
mux.HandleFunc("/api/updates/acknowledge-all", srv.AcknowledgeAllHandler)
|
||||||
|
mux.HandleFunc("/api/updates/acknowledge-by-tag", srv.AcknowledgeByTagHandler)
|
||||||
|
mux.HandleFunc("/api/updates/", srv.DismissHandler) // existing -- must remain after
|
||||||
|
mux.HandleFunc("/api/updates", srv.UpdatesHandler) // existing
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Add test helper to `export_test.go`:
|
||||||
|
```go
|
||||||
|
func (s *Server) TestCreateTag(name string) (Tag, error) {
|
||||||
|
return s.store.CreateTag(name)
|
||||||
|
}
|
||||||
|
func (s *Server) TestAssignTag(image string, tagID int) error {
|
||||||
|
return s.store.AssignTag(image, tagID)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Write handler tests in `diunwebhook_test.go` following the existing `Test<Handler>_<Scenario>` naming convention. Use `NewTestServer()` for each test. Setup: use `TestUpsertEvent` to create events, `TestCreateTag` + `TestAssignTag` to setup tag assignments.
|
||||||
|
|
||||||
|
6. Also add the Vite dev proxy for the two new endpoints in `frontend/vite.config.ts` -- NOT needed, the existing proxy config already proxies all `/api` requests to `:8080`.
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard && go test -v -run "TestAcknowledge(All|ByTag)Handler" ./pkg/diunwebhook/</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- diunwebhook.go contains `func (s *Server) AcknowledgeAllHandler(`
|
||||||
|
- diunwebhook.go contains `func (s *Server) AcknowledgeByTagHandler(`
|
||||||
|
- diunwebhook.go AcknowledgeAllHandler contains `r.Method != http.MethodPost`
|
||||||
|
- diunwebhook.go AcknowledgeByTagHandler contains `http.MaxBytesReader`
|
||||||
|
- diunwebhook.go AcknowledgeByTagHandler contains `req.TagID <= 0`
|
||||||
|
- main.go contains `"/api/updates/acknowledge-all"` BEFORE `"/api/updates/"`
|
||||||
|
- main.go contains `"/api/updates/acknowledge-by-tag"` BEFORE `"/api/updates/"`
|
||||||
|
- diunwebhook_test.go contains `TestAcknowledgeAllHandler_Empty`
|
||||||
|
- diunwebhook_test.go contains `TestAcknowledgeByTagHandler`
|
||||||
|
- `go test -run "TestAcknowledge" ./pkg/diunwebhook/` exits 0
|
||||||
|
- `go vet ./...` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>Both bulk acknowledge endpoints respond correctly; all new tests pass; route order verified</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
</tasks>
|
||||||
|
|
||||||
|
<verification>
|
||||||
|
```bash
|
||||||
|
cd /home/jean-luc-makiola/Development/projects/DiunDashboard
|
||||||
|
go build ./...
|
||||||
|
go vet ./...
|
||||||
|
go test -v -run "TestAcknowledge" ./pkg/diunwebhook/
|
||||||
|
go test -v ./pkg/diunwebhook/ # all existing tests still pass
|
||||||
|
```
|
||||||
|
</verification>
|
||||||
|
|
||||||
|
<success_criteria>
|
||||||
|
- Store interface has 11 methods (9 existing + 2 new)
|
||||||
|
- Both SQLiteStore and PostgresStore implement all 11 methods
|
||||||
|
- POST /api/updates/acknowledge-all returns 200 + {"count": N}
|
||||||
|
- POST /api/updates/acknowledge-by-tag returns 200 + {"count": N}
|
||||||
|
- All existing tests continue to pass
|
||||||
|
- Route registration order prevents DismissHandler from shadowing new endpoints
|
||||||
|
</success_criteria>
|
||||||
|
|
||||||
|
<output>
|
||||||
|
After completion, create `.planning/phases/04-ux-improvements/04-01-SUMMARY.md`
|
||||||
|
</output>
|
||||||
411
.planning/phases/04-ux-improvements/04-02-PLAN.md
Normal file
411
.planning/phases/04-ux-improvements/04-02-PLAN.md
Normal file
@@ -0,0 +1,411 @@
|
|||||||
|
---
|
||||||
|
phase: 04-ux-improvements
|
||||||
|
plan: 02
|
||||||
|
type: execute
|
||||||
|
wave: 1
|
||||||
|
depends_on: []
|
||||||
|
files_modified:
|
||||||
|
- frontend/src/main.tsx
|
||||||
|
- frontend/src/index.css
|
||||||
|
- frontend/src/components/ServiceCard.tsx
|
||||||
|
- frontend/src/components/FilterBar.tsx
|
||||||
|
- frontend/src/components/Header.tsx
|
||||||
|
- frontend/src/App.tsx
|
||||||
|
- frontend/src/lib/utils.ts
|
||||||
|
autonomous: true
|
||||||
|
requirements:
|
||||||
|
- SRCH-01
|
||||||
|
- SRCH-02
|
||||||
|
- SRCH-03
|
||||||
|
- SRCH-04
|
||||||
|
- A11Y-01
|
||||||
|
- A11Y-02
|
||||||
|
|
||||||
|
must_haves:
|
||||||
|
truths:
|
||||||
|
- "User can search updates by image name and results filter instantly"
|
||||||
|
- "User can filter updates by status (all/pending/acknowledged)"
|
||||||
|
- "User can filter updates by tag (all/specific tag/untagged)"
|
||||||
|
- "User can sort updates by date, name, or registry"
|
||||||
|
- "User can toggle between light and dark themes"
|
||||||
|
- "Theme preference persists across page reloads via localStorage"
|
||||||
|
- "System prefers-color-scheme is respected on first visit"
|
||||||
|
- "Drag handle is always visible on ServiceCard (not hover-only)"
|
||||||
|
artifacts:
|
||||||
|
- path: "frontend/src/components/FilterBar.tsx"
|
||||||
|
provides: "Search input + 3 filter/sort dropdowns"
|
||||||
|
min_lines: 40
|
||||||
|
- path: "frontend/src/main.tsx"
|
||||||
|
provides: "Theme initialization from localStorage + prefers-color-scheme"
|
||||||
|
- path: "frontend/src/App.tsx"
|
||||||
|
provides: "Filter state, filtered/sorted entries, FilterBar integration"
|
||||||
|
contains: "FilterBar"
|
||||||
|
- path: "frontend/src/components/Header.tsx"
|
||||||
|
provides: "Theme toggle button with sun/moon icon"
|
||||||
|
contains: "toggleTheme"
|
||||||
|
- path: "frontend/src/lib/utils.ts"
|
||||||
|
provides: "Shared getRegistry function"
|
||||||
|
contains: "export function getRegistry"
|
||||||
|
key_links:
|
||||||
|
- from: "frontend/src/App.tsx"
|
||||||
|
to: "frontend/src/components/FilterBar.tsx"
|
||||||
|
via: "FilterBar component with onChange callbacks"
|
||||||
|
pattern: "<FilterBar"
|
||||||
|
- from: "frontend/src/main.tsx"
|
||||||
|
to: "localStorage"
|
||||||
|
via: "theme init reads localStorage('theme')"
|
||||||
|
pattern: "localStorage.getItem.*theme"
|
||||||
|
- from: "frontend/src/components/Header.tsx"
|
||||||
|
to: "document.documentElement.classList"
|
||||||
|
via: "toggleTheme toggles dark class and writes localStorage"
|
||||||
|
pattern: "classList.toggle.*dark"
|
||||||
|
---
|
||||||
|
|
||||||
|
<objective>
|
||||||
|
Add client-side search/filter/sort controls, light/dark theme toggle, and fix the hover-only drag handle to be always visible.
|
||||||
|
|
||||||
|
Purpose: Makes the dashboard usable at scale (finding specific images) and accessible (theme choice, visible drag handles).
|
||||||
|
Output: New FilterBar component, theme toggle in Header, updated ServiceCard drag handle, filter logic in App.tsx.
|
||||||
|
</objective>
|
||||||
|
|
||||||
|
<execution_context>
|
||||||
|
@$HOME/.claude/get-shit-done/workflows/execute-plan.md
|
||||||
|
@$HOME/.claude/get-shit-done/templates/summary.md
|
||||||
|
</execution_context>
|
||||||
|
|
||||||
|
<context>
|
||||||
|
@.planning/PROJECT.md
|
||||||
|
@.planning/ROADMAP.md
|
||||||
|
@.planning/STATE.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-CONTEXT.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-RESEARCH.md
|
||||||
|
|
||||||
|
<interfaces>
|
||||||
|
From frontend/src/types/diun.ts:
|
||||||
|
```typescript
|
||||||
|
export interface Tag {
|
||||||
|
id: number
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
export interface UpdateEntry {
|
||||||
|
event: DiunEvent
|
||||||
|
received_at: string
|
||||||
|
acknowledged: boolean
|
||||||
|
tag: Tag | null
|
||||||
|
}
|
||||||
|
export type UpdatesMap = Record<string, UpdateEntry>
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/App.tsx (current entries derivation):
|
||||||
|
```typescript
|
||||||
|
const entries = Object.entries(updates)
|
||||||
|
const taggedSections = tags.map(tag => ({
|
||||||
|
tag,
|
||||||
|
rows: entries
|
||||||
|
.filter(([, e]) => e.tag?.id === tag.id)
|
||||||
|
.map(([image, entry]) => ({ image, entry })),
|
||||||
|
}))
|
||||||
|
const untaggedRows = entries
|
||||||
|
.filter(([, e]) => !e.tag)
|
||||||
|
.map(([image, entry]) => ({ image, entry }))
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/components/Header.tsx:
|
||||||
|
```typescript
|
||||||
|
interface HeaderProps {
|
||||||
|
onRefresh: () => void
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/components/ServiceCard.tsx (drag handle - current opacity pattern):
|
||||||
|
```tsx
|
||||||
|
<button
|
||||||
|
{...attributes}
|
||||||
|
{...listeners}
|
||||||
|
className="text-muted-foreground opacity-0 group-hover:opacity-100 transition-opacity cursor-grab active:cursor-grabbing shrink-0 touch-none"
|
||||||
|
>
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/lib/utils.ts:
|
||||||
|
```typescript
|
||||||
|
export function cn(...inputs: ClassValue[]) {
|
||||||
|
return twMerge(clsx(inputs))
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/main.tsx (current hardcoded dark mode):
|
||||||
|
```typescript
|
||||||
|
document.documentElement.classList.add('dark')
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/index.css (CSS vars - note: no --destructive or --card defined):
|
||||||
|
```css
|
||||||
|
:root {
|
||||||
|
--background: 0 0% 100%;
|
||||||
|
--foreground: 222.2 84% 4.9%;
|
||||||
|
/* ... light theme vars ... */
|
||||||
|
}
|
||||||
|
.dark {
|
||||||
|
--background: 240 10% 3.9%;
|
||||||
|
--foreground: 0 0% 98%;
|
||||||
|
/* ... dark theme vars ... */
|
||||||
|
}
|
||||||
|
```
|
||||||
|
</interfaces>
|
||||||
|
</context>
|
||||||
|
|
||||||
|
<tasks>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 1: Theme toggle, drag handle fix, and shared getRegistry utility</name>
|
||||||
|
<files>frontend/src/main.tsx, frontend/src/index.css, frontend/src/components/Header.tsx, frontend/src/components/ServiceCard.tsx, frontend/src/lib/utils.ts</files>
|
||||||
|
<read_first>
|
||||||
|
- frontend/src/main.tsx
|
||||||
|
- frontend/src/index.css
|
||||||
|
- frontend/src/components/Header.tsx
|
||||||
|
- frontend/src/components/ServiceCard.tsx
|
||||||
|
- frontend/src/lib/utils.ts
|
||||||
|
</read_first>
|
||||||
|
<action>
|
||||||
|
1. **main.tsx** (per D-15): Replace `document.documentElement.classList.add('dark')` with theme initialization:
|
||||||
|
```typescript
|
||||||
|
const stored = localStorage.getItem('theme')
|
||||||
|
if (stored === 'dark' || (!stored && window.matchMedia('(prefers-color-scheme: dark)').matches)) {
|
||||||
|
document.documentElement.classList.add('dark')
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **index.css**: Add `--destructive` and `--destructive-foreground` CSS variables to both `:root` and `.dark` blocks (needed for destructive button variant used in Plan 03). Also add `--card` and `--card-foreground` if missing:
|
||||||
|
In `:root` block, add:
|
||||||
|
```css
|
||||||
|
--destructive: 0 84.2% 60.2%;
|
||||||
|
--destructive-foreground: 0 0% 98%;
|
||||||
|
--card: 0 0% 100%;
|
||||||
|
--card-foreground: 222.2 84% 4.9%;
|
||||||
|
```
|
||||||
|
In `.dark` block, add:
|
||||||
|
```css
|
||||||
|
--destructive: 0 62.8% 30.6%;
|
||||||
|
--destructive-foreground: 0 85.7% 97.3%;
|
||||||
|
--card: 240 10% 3.9%;
|
||||||
|
--card-foreground: 0 0% 98%;
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Header.tsx** (per D-14): Add theme toggle button. Import `Sun, Moon` from `lucide-react`. Add a `toggleTheme` function:
|
||||||
|
```typescript
|
||||||
|
function toggleTheme() {
|
||||||
|
const isDark = document.documentElement.classList.toggle('dark')
|
||||||
|
localStorage.setItem('theme', isDark ? 'dark' : 'light')
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Add a second Button next to the refresh button:
|
||||||
|
```tsx
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={toggleTheme}
|
||||||
|
className="h-8 w-8 p-0 text-muted-foreground hover:text-foreground"
|
||||||
|
title="Toggle theme"
|
||||||
|
>
|
||||||
|
<Sun className="h-4 w-4 hidden dark:block" />
|
||||||
|
<Moon className="h-4 w-4 block dark:hidden" />
|
||||||
|
</Button>
|
||||||
|
```
|
||||||
|
Wrap both buttons in a `<div className="flex items-center gap-1">`.
|
||||||
|
|
||||||
|
4. **ServiceCard.tsx** (per D-16): Change the drag handle button's className from `opacity-0 group-hover:opacity-100` to `opacity-40 hover:opacity-100`. The full className becomes:
|
||||||
|
```
|
||||||
|
text-muted-foreground opacity-40 hover:opacity-100 transition-opacity cursor-grab active:cursor-grabbing shrink-0 touch-none
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **lib/utils.ts**: Extract `getRegistry` function from ServiceCard.tsx and add it as a named export in utils.ts:
|
||||||
|
```typescript
|
||||||
|
export function getRegistry(image: string): string {
|
||||||
|
const parts = image.split('/')
|
||||||
|
if (parts.length === 1) return 'Docker Hub'
|
||||||
|
const first = parts[0]
|
||||||
|
if (!first.includes('.') && !first.includes(':') && first !== 'localhost') return 'Docker Hub'
|
||||||
|
if (first === 'ghcr.io') return 'GitHub'
|
||||||
|
if (first === 'gcr.io') return 'GCR'
|
||||||
|
return first
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Then in ServiceCard.tsx, remove the local `getRegistry` function and add `import { getRegistry } from '@/lib/utils'` (alongside the existing `cn` import: `import { cn, getRegistry } from '@/lib/utils'`).
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard/frontend && bunx tsc --noEmit</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- main.tsx contains `localStorage.getItem('theme')` and `prefers-color-scheme`
|
||||||
|
- main.tsx does NOT contain `classList.add('dark')` as a standalone statement (only inside the conditional)
|
||||||
|
- index.css `:root` block contains `--destructive: 0 84.2% 60.2%`
|
||||||
|
- index.css `.dark` block contains `--destructive: 0 62.8% 30.6%`
|
||||||
|
- Header.tsx contains `import` with `Sun` and `Moon`
|
||||||
|
- Header.tsx contains `toggleTheme`
|
||||||
|
- Header.tsx contains `localStorage.setItem('theme'`
|
||||||
|
- ServiceCard.tsx drag handle button contains `opacity-40 hover:opacity-100`
|
||||||
|
- ServiceCard.tsx does NOT contain `opacity-0 group-hover:opacity-100` on the drag handle
|
||||||
|
- lib/utils.ts contains `export function getRegistry`
|
||||||
|
- ServiceCard.tsx contains `import` with `getRegistry` from `@/lib/utils`
|
||||||
|
- `bunx tsc --noEmit` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>Theme toggle works (sun/moon icon in header, persists to localStorage, respects system preference on first visit); drag handle always visible at 40% opacity; getRegistry is a shared utility</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 2: FilterBar component and client-side search/filter/sort logic in App.tsx</name>
|
||||||
|
<files>frontend/src/components/FilterBar.tsx, frontend/src/App.tsx</files>
|
||||||
|
<read_first>
|
||||||
|
- frontend/src/App.tsx
|
||||||
|
- frontend/src/types/diun.ts
|
||||||
|
- frontend/src/lib/utils.ts
|
||||||
|
- frontend/src/components/TagSection.tsx
|
||||||
|
</read_first>
|
||||||
|
<action>
|
||||||
|
1. **Create FilterBar.tsx** (per D-06, D-07): New component placed above sections list, below stats row. Uses native `<select>` elements styled with Tailwind (no Radix Select dependency). Props interface:
|
||||||
|
```typescript
|
||||||
|
interface FilterBarProps {
|
||||||
|
search: string
|
||||||
|
onSearchChange: (value: string) => void
|
||||||
|
statusFilter: 'all' | 'pending' | 'acknowledged'
|
||||||
|
onStatusFilterChange: (value: 'all' | 'pending' | 'acknowledged') => void
|
||||||
|
tagFilter: 'all' | 'untagged' | number
|
||||||
|
onTagFilterChange: (value: 'all' | 'untagged' | number) => void
|
||||||
|
sortOrder: 'date-desc' | 'date-asc' | 'name' | 'registry'
|
||||||
|
onSortOrderChange: (value: 'date-desc' | 'date-asc' | 'name' | 'registry') => void
|
||||||
|
tags: Tag[]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Layout: flex row with wrap, gap-3. Responsive: on small screens wraps to multiple rows.
|
||||||
|
- Search input: `<input type="text" placeholder="Search images..." />` with magnifying glass icon (import `Search` from lucide-react). Full width on mobile, `w-64` on desktop.
|
||||||
|
- Status select: options "All Status", "Pending", "Acknowledged"
|
||||||
|
- Tag select: options "All Tags", "Untagged", then one option per tag (tag.name, value=tag.id)
|
||||||
|
- Sort select: options "Newest First" (date-desc), "Oldest First" (date-asc), "Name A-Z" (name), "Registry" (registry)
|
||||||
|
|
||||||
|
Style all selects with: `h-9 rounded-md border border-border bg-background px-3 text-sm focus:outline-none focus:ring-2 focus:ring-primary/50`
|
||||||
|
|
||||||
|
Tag select onChange handler must parse value: `"all"` and `"untagged"` stay as strings, numeric values become `parseInt(value, 10)`.
|
||||||
|
|
||||||
|
2. **App.tsx** (per D-05, D-08): Add filter state and filtering logic.
|
||||||
|
|
||||||
|
Add imports:
|
||||||
|
```typescript
|
||||||
|
import { useMemo } from 'react'
|
||||||
|
import { FilterBar } from '@/components/FilterBar'
|
||||||
|
import { getRegistry } from '@/lib/utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
Add filter state (per D-08 -- no persistence, resets on reload):
|
||||||
|
```typescript
|
||||||
|
const [search, setSearch] = useState('')
|
||||||
|
const [statusFilter, setStatusFilter] = useState<'all' | 'pending' | 'acknowledged'>('all')
|
||||||
|
const [tagFilter, setTagFilter] = useState<'all' | 'untagged' | number>('all')
|
||||||
|
const [sortOrder, setSortOrder] = useState<'date-desc' | 'date-asc' | 'name' | 'registry'>('date-desc')
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace the direct `entries` usage with a `filteredEntries` useMemo:
|
||||||
|
```typescript
|
||||||
|
const filteredEntries = useMemo(() => {
|
||||||
|
let result = Object.entries(updates) as [string, UpdateEntry][]
|
||||||
|
if (search) {
|
||||||
|
const q = search.toLowerCase()
|
||||||
|
result = result.filter(([image]) => image.toLowerCase().includes(q))
|
||||||
|
}
|
||||||
|
if (statusFilter === 'pending') result = result.filter(([, e]) => !e.acknowledged)
|
||||||
|
if (statusFilter === 'acknowledged') result = result.filter(([, e]) => e.acknowledged)
|
||||||
|
if (tagFilter === 'untagged') result = result.filter(([, e]) => !e.tag)
|
||||||
|
if (typeof tagFilter === 'number') result = result.filter(([, e]) => e.tag?.id === tagFilter)
|
||||||
|
result.sort(([ia, ea], [ib, eb]) => {
|
||||||
|
switch (sortOrder) {
|
||||||
|
case 'date-asc': return ea.received_at < eb.received_at ? -1 : 1
|
||||||
|
case 'name': return ia.localeCompare(ib)
|
||||||
|
case 'registry': return getRegistry(ia).localeCompare(getRegistry(ib))
|
||||||
|
default: return ea.received_at > eb.received_at ? -1 : 1
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
}, [updates, search, statusFilter, tagFilter, sortOrder])
|
||||||
|
```
|
||||||
|
|
||||||
|
Update stats to use `entries` (unfiltered) for total counts but `filteredEntries` for display. The `pending` and `acknowledgedCount` and `lastReceived` remain computed from the unfiltered `entries` (dashboard stats always show global counts).
|
||||||
|
|
||||||
|
Update `taggedSections` and `untaggedRows` derivation to use `filteredEntries` instead of `entries`:
|
||||||
|
```typescript
|
||||||
|
const taggedSections = tags.map(tag => ({
|
||||||
|
tag,
|
||||||
|
rows: filteredEntries
|
||||||
|
.filter(([, e]) => e.tag?.id === tag.id)
|
||||||
|
.map(([image, entry]) => ({ image, entry })),
|
||||||
|
}))
|
||||||
|
const untaggedRows = filteredEntries
|
||||||
|
.filter(([, e]) => !e.tag)
|
||||||
|
.map(([image, entry]) => ({ image, entry }))
|
||||||
|
```
|
||||||
|
|
||||||
|
Add `<FilterBar>` in the JSX between the stats grid and the loading state, wrapped in `{!loading && entries.length > 0 && (...)}`:
|
||||||
|
```tsx
|
||||||
|
{!loading && entries.length > 0 && (
|
||||||
|
<FilterBar
|
||||||
|
search={search}
|
||||||
|
onSearchChange={setSearch}
|
||||||
|
statusFilter={statusFilter}
|
||||||
|
onStatusFilterChange={setStatusFilter}
|
||||||
|
tagFilter={tagFilter}
|
||||||
|
onTagFilterChange={setTagFilter}
|
||||||
|
sortOrder={sortOrder}
|
||||||
|
onSortOrderChange={setSortOrder}
|
||||||
|
tags={tags}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
```
|
||||||
|
|
||||||
|
Import `UpdateEntry` type if needed for the `as` cast.
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard/frontend && bunx tsc --noEmit && bun run build</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- FilterBar.tsx exists and exports `FilterBar` component
|
||||||
|
- FilterBar.tsx contains `Search images` (placeholder text)
|
||||||
|
- FilterBar.tsx contains `<select` elements (native selects, not Radix)
|
||||||
|
- FilterBar.tsx contains `All Status` and `Pending` and `Acknowledged` as option labels
|
||||||
|
- FilterBar.tsx contains `Newest First` and `Name A-Z` as option labels
|
||||||
|
- App.tsx contains `import { FilterBar }` from `@/components/FilterBar`
|
||||||
|
- App.tsx contains `const [search, setSearch] = useState`
|
||||||
|
- App.tsx contains `const [statusFilter, setStatusFilter] = useState`
|
||||||
|
- App.tsx contains `const [sortOrder, setSortOrder] = useState`
|
||||||
|
- App.tsx contains `useMemo` for filteredEntries
|
||||||
|
- App.tsx contains `<FilterBar` JSX element
|
||||||
|
- App.tsx taggedSections uses `filteredEntries` (not raw `entries`)
|
||||||
|
- `bun run build` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>FilterBar renders above sections; searching by image name filters instantly; status/tag/sort dropdowns work; default sort is newest-first; filters reset on page reload</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
</tasks>
|
||||||
|
|
||||||
|
<verification>
|
||||||
|
```bash
|
||||||
|
cd /home/jean-luc-makiola/Development/projects/DiunDashboard/frontend
|
||||||
|
bunx tsc --noEmit
|
||||||
|
bun run build
|
||||||
|
```
|
||||||
|
</verification>
|
||||||
|
|
||||||
|
<success_criteria>
|
||||||
|
- FilterBar component renders search input and 3 dropdowns
|
||||||
|
- Filtering by image name is case-insensitive substring match
|
||||||
|
- Status filter shows only pending or acknowledged updates
|
||||||
|
- Tag filter shows only updates in a specific tag or untagged
|
||||||
|
- Sort order changes entry display order
|
||||||
|
- Theme toggle button visible in header
|
||||||
|
- Theme persists in localStorage
|
||||||
|
- First visit respects prefers-color-scheme
|
||||||
|
- Drag handle visible at 40% opacity without hover
|
||||||
|
- Frontend builds without errors
|
||||||
|
</success_criteria>
|
||||||
|
|
||||||
|
<output>
|
||||||
|
After completion, create `.planning/phases/04-ux-improvements/04-02-SUMMARY.md`
|
||||||
|
</output>
|
||||||
558
.planning/phases/04-ux-improvements/04-03-PLAN.md
Normal file
558
.planning/phases/04-ux-improvements/04-03-PLAN.md
Normal file
@@ -0,0 +1,558 @@
|
|||||||
|
---
|
||||||
|
phase: 04-ux-improvements
|
||||||
|
plan: 03
|
||||||
|
type: execute
|
||||||
|
wave: 2
|
||||||
|
depends_on:
|
||||||
|
- 04-01
|
||||||
|
- 04-02
|
||||||
|
files_modified:
|
||||||
|
- frontend/src/hooks/useUpdates.ts
|
||||||
|
- frontend/src/components/Header.tsx
|
||||||
|
- frontend/src/components/TagSection.tsx
|
||||||
|
- frontend/src/components/ServiceCard.tsx
|
||||||
|
- frontend/src/components/Toast.tsx
|
||||||
|
- frontend/src/App.tsx
|
||||||
|
autonomous: true
|
||||||
|
requirements:
|
||||||
|
- BULK-01
|
||||||
|
- BULK-02
|
||||||
|
- INDIC-01
|
||||||
|
- INDIC-02
|
||||||
|
- INDIC-03
|
||||||
|
- INDIC-04
|
||||||
|
|
||||||
|
must_haves:
|
||||||
|
truths:
|
||||||
|
- "User can dismiss all pending updates with a Dismiss All button in the header area"
|
||||||
|
- "User can dismiss all pending updates within a tag group via a per-section button"
|
||||||
|
- "Dismiss All requires an inline two-click confirmation before executing (matching tag delete UX pattern)"
|
||||||
|
- "A pending-count badge is always visible in the Header"
|
||||||
|
- "The browser tab title shows 'DiunDash (N)' when N > 0 and 'DiunDash' when 0"
|
||||||
|
- "A toast notification appears when new updates arrive during polling"
|
||||||
|
- "Updates received since the user's last visit have a visible amber left border highlight"
|
||||||
|
artifacts:
|
||||||
|
- path: "frontend/src/hooks/useUpdates.ts"
|
||||||
|
provides: "acknowledgeAll, acknowledgeByTag callbacks; newArrivals state; tab title effect"
|
||||||
|
contains: "acknowledgeAll"
|
||||||
|
- path: "frontend/src/components/Header.tsx"
|
||||||
|
provides: "Pending badge, dismiss-all button with inline two-click confirm"
|
||||||
|
contains: "pendingCount"
|
||||||
|
- path: "frontend/src/components/TagSection.tsx"
|
||||||
|
provides: "Per-group dismiss button"
|
||||||
|
contains: "onAcknowledgeGroup"
|
||||||
|
- path: "frontend/src/components/Toast.tsx"
|
||||||
|
provides: "Custom toast notification component"
|
||||||
|
min_lines: 20
|
||||||
|
- path: "frontend/src/components/ServiceCard.tsx"
|
||||||
|
provides: "New-since-last-visit highlight via isNewSinceLastVisit prop"
|
||||||
|
contains: "isNewSinceLastVisit"
|
||||||
|
- path: "frontend/src/App.tsx"
|
||||||
|
provides: "Wiring: bulk callbacks, toast state, lastVisit ref, tab title, new props"
|
||||||
|
contains: "acknowledgeAll"
|
||||||
|
key_links:
|
||||||
|
- from: "frontend/src/hooks/useUpdates.ts"
|
||||||
|
to: "/api/updates/acknowledge-all"
|
||||||
|
via: "fetch POST in acknowledgeAll callback"
|
||||||
|
pattern: "fetch.*acknowledge-all"
|
||||||
|
- from: "frontend/src/hooks/useUpdates.ts"
|
||||||
|
to: "/api/updates/acknowledge-by-tag"
|
||||||
|
via: "fetch POST in acknowledgeByTag callback"
|
||||||
|
pattern: "fetch.*acknowledge-by-tag"
|
||||||
|
- from: "frontend/src/App.tsx"
|
||||||
|
to: "frontend/src/components/Header.tsx"
|
||||||
|
via: "pendingCount and onDismissAll props"
|
||||||
|
pattern: "pendingCount=|onDismissAll="
|
||||||
|
- from: "frontend/src/App.tsx"
|
||||||
|
to: "frontend/src/components/TagSection.tsx"
|
||||||
|
via: "onAcknowledgeGroup prop"
|
||||||
|
pattern: "onAcknowledgeGroup="
|
||||||
|
- from: "frontend/src/App.tsx"
|
||||||
|
to: "frontend/src/components/ServiceCard.tsx"
|
||||||
|
via: "isNewSinceLastVisit prop passed through TagSection"
|
||||||
|
pattern: "isNewSinceLastVisit"
|
||||||
|
---
|
||||||
|
|
||||||
|
<objective>
|
||||||
|
Wire bulk dismiss UI (frontend) to the backend endpoints from Plan 01, add update indicators (pending badge, tab title, toast, new-since-last-visit highlight).
|
||||||
|
|
||||||
|
Purpose: Completes the UX improvements by giving users bulk actions and visual awareness of new updates.
|
||||||
|
Output: Updated useUpdates hook with bulk callbacks and toast detection, Header with badge + dismiss-all, TagSection with per-group dismiss, Toast component, ServiceCard with highlight.
|
||||||
|
</objective>
|
||||||
|
|
||||||
|
<execution_context>
|
||||||
|
@$HOME/.claude/get-shit-done/workflows/execute-plan.md
|
||||||
|
@$HOME/.claude/get-shit-done/templates/summary.md
|
||||||
|
</execution_context>
|
||||||
|
|
||||||
|
<context>
|
||||||
|
@.planning/PROJECT.md
|
||||||
|
@.planning/ROADMAP.md
|
||||||
|
@.planning/STATE.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-CONTEXT.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-RESEARCH.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-01-SUMMARY.md
|
||||||
|
@.planning/phases/04-ux-improvements/04-02-SUMMARY.md
|
||||||
|
|
||||||
|
<interfaces>
|
||||||
|
<!-- From Plan 01: new backend endpoints -->
|
||||||
|
POST /api/updates/acknowledge-all -> {"count": N}
|
||||||
|
POST /api/updates/acknowledge-by-tag (body: {"tag_id": N}) -> {"count": N}
|
||||||
|
|
||||||
|
<!-- From Plan 02: Header already has theme toggle, App.tsx has filter state -->
|
||||||
|
From frontend/src/components/Header.tsx (after Plan 02):
|
||||||
|
```typescript
|
||||||
|
interface HeaderProps {
|
||||||
|
onRefresh: () => void
|
||||||
|
}
|
||||||
|
// Header now has theme toggle button, refresh button
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/hooks/useUpdates.ts:
|
||||||
|
```typescript
|
||||||
|
export function useUpdates() {
|
||||||
|
// Returns: updates, loading, error, lastRefreshed, secondsUntilRefresh, fetchUpdates, acknowledge, assignTag
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/components/TagSection.tsx:
|
||||||
|
```typescript
|
||||||
|
interface TagSectionProps {
|
||||||
|
tag: Tag | null
|
||||||
|
rows: TagSectionRow[]
|
||||||
|
onAcknowledge: (image: string) => void
|
||||||
|
onDeleteTag?: (id: number) => void
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/components/ServiceCard.tsx:
|
||||||
|
```typescript
|
||||||
|
interface ServiceCardProps {
|
||||||
|
image: string
|
||||||
|
entry: UpdateEntry
|
||||||
|
onAcknowledge: (image: string) => void
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
From frontend/src/App.tsx (after Plan 02):
|
||||||
|
```typescript
|
||||||
|
// Has: filteredEntries useMemo, FilterBar, filter state
|
||||||
|
// Uses: useUpdates() destructured for updates, acknowledge, etc.
|
||||||
|
// Stats: pending, acknowledgedCount computed from unfiltered entries
|
||||||
|
```
|
||||||
|
</interfaces>
|
||||||
|
</context>
|
||||||
|
|
||||||
|
<tasks>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 1: Extend useUpdates with bulk acknowledge callbacks, toast detection, and tab title effect</name>
|
||||||
|
<files>frontend/src/hooks/useUpdates.ts</files>
|
||||||
|
<read_first>
|
||||||
|
- frontend/src/hooks/useUpdates.ts
|
||||||
|
- frontend/src/types/diun.ts
|
||||||
|
</read_first>
|
||||||
|
<action>
|
||||||
|
1. **Add acknowledgeAll callback** (per D-01, D-02) using optimistic update pattern matching existing `acknowledge`:
|
||||||
|
```typescript
|
||||||
|
const acknowledgeAll = useCallback(async () => {
|
||||||
|
setUpdates(prev =>
|
||||||
|
Object.fromEntries(
|
||||||
|
Object.entries(prev).map(([img, entry]) => [
|
||||||
|
img,
|
||||||
|
entry.acknowledged ? entry : { ...entry, acknowledged: true },
|
||||||
|
])
|
||||||
|
) as UpdatesMap
|
||||||
|
)
|
||||||
|
try {
|
||||||
|
await fetch('/api/updates/acknowledge-all', { method: 'POST' })
|
||||||
|
} catch (e) {
|
||||||
|
console.error('acknowledgeAll failed:', e)
|
||||||
|
fetchUpdates()
|
||||||
|
}
|
||||||
|
}, [fetchUpdates])
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Add acknowledgeByTag callback** (per D-01, D-02):
|
||||||
|
```typescript
|
||||||
|
const acknowledgeByTag = useCallback(async (tagID: number) => {
|
||||||
|
setUpdates(prev =>
|
||||||
|
Object.fromEntries(
|
||||||
|
Object.entries(prev).map(([img, entry]) => [
|
||||||
|
img,
|
||||||
|
entry.tag?.id === tagID && !entry.acknowledged
|
||||||
|
? { ...entry, acknowledged: true }
|
||||||
|
: entry,
|
||||||
|
])
|
||||||
|
) as UpdatesMap
|
||||||
|
)
|
||||||
|
try {
|
||||||
|
await fetch('/api/updates/acknowledge-by-tag', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ tag_id: tagID }),
|
||||||
|
})
|
||||||
|
} catch (e) {
|
||||||
|
console.error('acknowledgeByTag failed:', e)
|
||||||
|
fetchUpdates()
|
||||||
|
}
|
||||||
|
}, [fetchUpdates])
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Add toast detection** (per D-11): Track previous update keys with a ref. After each successful fetch, compare new keys vs previous. Only fire after initial load (guard: `prevKeysRef.current.size > 0`). State is `newArrivals: string[]`, replaced (not appended) each time.
|
||||||
|
```typescript
|
||||||
|
const prevKeysRef = useRef<Set<string>>(new Set())
|
||||||
|
const [newArrivals, setNewArrivals] = useState<string[]>([])
|
||||||
|
|
||||||
|
// Inside fetchUpdates, after setUpdates(data):
|
||||||
|
const currentKeys = Object.keys(data)
|
||||||
|
const newKeys = currentKeys.filter(k => !prevKeysRef.current.has(k))
|
||||||
|
if (newKeys.length > 0 && prevKeysRef.current.size > 0) {
|
||||||
|
setNewArrivals(newKeys)
|
||||||
|
}
|
||||||
|
prevKeysRef.current = new Set(currentKeys)
|
||||||
|
```
|
||||||
|
|
||||||
|
Add a `clearNewArrivals` callback:
|
||||||
|
```typescript
|
||||||
|
const clearNewArrivals = useCallback(() => setNewArrivals([]), [])
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Update return value** to include new fields:
|
||||||
|
```typescript
|
||||||
|
return {
|
||||||
|
updates, loading, error, lastRefreshed, secondsUntilRefresh,
|
||||||
|
fetchUpdates, acknowledge, assignTag,
|
||||||
|
acknowledgeAll, acknowledgeByTag,
|
||||||
|
newArrivals, clearNewArrivals,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard/frontend && bunx tsc --noEmit</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- useUpdates.ts contains `const acknowledgeAll = useCallback`
|
||||||
|
- useUpdates.ts contains `fetch('/api/updates/acknowledge-all'`
|
||||||
|
- useUpdates.ts contains `const acknowledgeByTag = useCallback`
|
||||||
|
- useUpdates.ts contains `fetch('/api/updates/acknowledge-by-tag'`
|
||||||
|
- useUpdates.ts contains `const prevKeysRef = useRef<Set<string>>`
|
||||||
|
- useUpdates.ts contains `const [newArrivals, setNewArrivals] = useState<string[]>`
|
||||||
|
- useUpdates.ts contains `clearNewArrivals` in the return object
|
||||||
|
- useUpdates.ts return object includes `acknowledgeAll` and `acknowledgeByTag`
|
||||||
|
- `bunx tsc --noEmit` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>useUpdates hook returns acknowledgeAll, acknowledgeByTag, newArrivals, and clearNewArrivals; toast detection fires on new images during polling</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
<task type="auto">
|
||||||
|
<name>Task 2: Toast component, Header updates, TagSection per-group dismiss, ServiceCard highlight, and App.tsx wiring</name>
|
||||||
|
<files>frontend/src/components/Toast.tsx, frontend/src/components/Header.tsx, frontend/src/components/TagSection.tsx, frontend/src/components/ServiceCard.tsx, frontend/src/App.tsx</files>
|
||||||
|
<read_first>
|
||||||
|
- frontend/src/App.tsx
|
||||||
|
- frontend/src/components/Header.tsx
|
||||||
|
- frontend/src/components/TagSection.tsx
|
||||||
|
- frontend/src/components/ServiceCard.tsx
|
||||||
|
- frontend/src/hooks/useUpdates.ts
|
||||||
|
- frontend/src/types/diun.ts
|
||||||
|
</read_first>
|
||||||
|
<action>
|
||||||
|
1. **Create Toast.tsx** (per D-11): Custom toast component. Auto-dismiss after 5 seconds. Non-stacking (shows latest message only). Props:
|
||||||
|
```typescript
|
||||||
|
interface ToastProps {
|
||||||
|
message: string
|
||||||
|
onDismiss: () => void
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Implementation: fixed position bottom-right (`fixed bottom-4 right-4 z-50`), dark card style, shows message + X dismiss button. Uses `useEffect` with a 5-second `setTimeout` that calls `onDismiss`. Renders `null` if `message` is empty.
|
||||||
|
```tsx
|
||||||
|
export function Toast({ message, onDismiss }: ToastProps) {
|
||||||
|
useEffect(() => {
|
||||||
|
const timer = setTimeout(onDismiss, 5000)
|
||||||
|
return () => clearTimeout(timer)
|
||||||
|
}, [message, onDismiss])
|
||||||
|
|
||||||
|
if (!message) return null
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="fixed bottom-4 right-4 z-50 max-w-sm rounded-lg border border-border bg-card px-4 py-3 shadow-lg flex items-center gap-3">
|
||||||
|
<p className="text-sm flex-1">{message}</p>
|
||||||
|
<button
|
||||||
|
onClick={onDismiss}
|
||||||
|
className="text-muted-foreground hover:text-foreground text-xs font-medium shrink-0"
|
||||||
|
>
|
||||||
|
Dismiss
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Header.tsx** (per D-03, D-04, D-09): Extend HeaderProps and add pending badge + dismiss-all button with inline two-click confirm pattern (per D-04, matching existing tag delete UX -- no modal needed).
|
||||||
|
Update the interface:
|
||||||
|
```typescript
|
||||||
|
interface HeaderProps {
|
||||||
|
onRefresh: () => void
|
||||||
|
pendingCount: number
|
||||||
|
onDismissAll: () => void
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Add `Badge` import from `@/components/ui/badge`. Add `CheckCheck` import from `lucide-react`.
|
||||||
|
After "Diun Dashboard" title span, add the pending badge (per D-09):
|
||||||
|
```tsx
|
||||||
|
{pendingCount > 0 && (
|
||||||
|
<Badge variant="secondary" className="text-xs font-bold px-2 py-0.5 bg-amber-500/15 text-amber-500 border-amber-500/25">
|
||||||
|
{pendingCount}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
```
|
||||||
|
Add dismiss-all button with inline two-click confirm pattern (per D-04). Add local state `const [confirmDismissAll, setConfirmDismissAll] = useState(false)`. The button:
|
||||||
|
```tsx
|
||||||
|
{pendingCount > 0 && (
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => {
|
||||||
|
if (!confirmDismissAll) { setConfirmDismissAll(true); return }
|
||||||
|
onDismissAll()
|
||||||
|
setConfirmDismissAll(false)
|
||||||
|
}}
|
||||||
|
onBlur={() => setConfirmDismissAll(false)}
|
||||||
|
className={cn(
|
||||||
|
'h-8 px-3 text-xs font-medium',
|
||||||
|
confirmDismissAll
|
||||||
|
? 'text-destructive hover:bg-destructive/10'
|
||||||
|
: 'text-muted-foreground hover:text-foreground'
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<CheckCheck className="h-3.5 w-3.5 mr-1" />
|
||||||
|
{confirmDismissAll ? 'Sure? Dismiss all' : 'Dismiss All'}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
```
|
||||||
|
Import `useState` from react and `cn` from `@/lib/utils`.
|
||||||
|
|
||||||
|
3. **TagSection.tsx** (per D-03): Add optional `onAcknowledgeGroup` prop. Update interface:
|
||||||
|
```typescript
|
||||||
|
interface TagSectionProps {
|
||||||
|
tag: Tag | null
|
||||||
|
rows: TagSectionRow[]
|
||||||
|
onAcknowledge: (image: string) => void
|
||||||
|
onDeleteTag?: (id: number) => void
|
||||||
|
onAcknowledgeGroup?: (tagId: number) => void
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Add a "Dismiss Group" button in the section header, next to the delete button, only when `tag !== null` and `onAcknowledgeGroup` is provided and at least one row is unacknowledged. Use inline two-click confirm pattern (per D-04):
|
||||||
|
```typescript
|
||||||
|
const [confirmDismissGroup, setConfirmDismissGroup] = useState(false)
|
||||||
|
const hasPending = rows.some(r => !r.entry.acknowledged)
|
||||||
|
```
|
||||||
|
Button (placed before the delete button):
|
||||||
|
```tsx
|
||||||
|
{tag && onAcknowledgeGroup && hasPending && (
|
||||||
|
<button
|
||||||
|
onClick={() => {
|
||||||
|
if (!confirmDismissGroup) { setConfirmDismissGroup(true); return }
|
||||||
|
onAcknowledgeGroup(tag.id)
|
||||||
|
setConfirmDismissGroup(false)
|
||||||
|
}}
|
||||||
|
onBlur={() => setConfirmDismissGroup(false)}
|
||||||
|
className={cn(
|
||||||
|
'flex items-center gap-1 px-2 py-1 rounded text-[11px] font-medium transition-colors',
|
||||||
|
confirmDismissGroup
|
||||||
|
? 'text-destructive hover:bg-destructive/10'
|
||||||
|
: 'text-muted-foreground hover:text-foreground'
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<CheckCheck className="h-3.5 w-3.5" />
|
||||||
|
{confirmDismissGroup ? 'Sure?' : 'Dismiss Group'}
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
```
|
||||||
|
Import `CheckCheck` from `lucide-react`.
|
||||||
|
|
||||||
|
4. **ServiceCard.tsx** (per D-12, D-13): Add `isNewSinceLastVisit` prop. Update interface:
|
||||||
|
```typescript
|
||||||
|
interface ServiceCardProps {
|
||||||
|
image: string
|
||||||
|
entry: UpdateEntry
|
||||||
|
onAcknowledge: (image: string) => void
|
||||||
|
isNewSinceLastVisit?: boolean
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Update the outer div's className to include highlight when `isNewSinceLastVisit`:
|
||||||
|
```tsx
|
||||||
|
className={cn(
|
||||||
|
'group p-4 rounded-xl border border-border bg-card hover:border-muted-foreground/30 transition-all flex flex-col justify-between gap-4',
|
||||||
|
isNewSinceLastVisit && 'border-l-4 border-l-amber-500',
|
||||||
|
isDragging && 'opacity-30',
|
||||||
|
)}
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **App.tsx**: Wire everything together.
|
||||||
|
|
||||||
|
a. Destructure new values from useUpdates:
|
||||||
|
```typescript
|
||||||
|
const {
|
||||||
|
updates, loading, error, lastRefreshed, secondsUntilRefresh,
|
||||||
|
fetchUpdates, acknowledge, assignTag,
|
||||||
|
acknowledgeAll, acknowledgeByTag,
|
||||||
|
newArrivals, clearNewArrivals,
|
||||||
|
} = useUpdates()
|
||||||
|
```
|
||||||
|
|
||||||
|
b. Add tab title effect (per D-10):
|
||||||
|
```typescript
|
||||||
|
useEffect(() => {
|
||||||
|
document.title = pending > 0 ? `DiunDash (${pending})` : 'DiunDash'
|
||||||
|
}, [pending])
|
||||||
|
```
|
||||||
|
Add `useEffect` to the React import.
|
||||||
|
|
||||||
|
c. Add last-visit tracking (per D-12):
|
||||||
|
```typescript
|
||||||
|
const lastVisitRef = useRef<string | null>(
|
||||||
|
localStorage.getItem('lastVisitTimestamp')
|
||||||
|
)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const handler = () => localStorage.setItem('lastVisitTimestamp', new Date().toISOString())
|
||||||
|
window.addEventListener('beforeunload', handler)
|
||||||
|
return () => window.removeEventListener('beforeunload', handler)
|
||||||
|
}, [])
|
||||||
|
```
|
||||||
|
|
||||||
|
d. Compute `isNewSinceLastVisit` per entry when building rows. Create a helper:
|
||||||
|
```typescript
|
||||||
|
function isNewSince(receivedAt: string): boolean {
|
||||||
|
return lastVisitRef.current ? receivedAt > lastVisitRef.current : false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
e. Update taggedSections and untaggedRows to include `isNewSinceLastVisit`:
|
||||||
|
```typescript
|
||||||
|
const taggedSections = tags.map(tag => ({
|
||||||
|
tag,
|
||||||
|
rows: filteredEntries
|
||||||
|
.filter(([, e]) => e.tag?.id === tag.id)
|
||||||
|
.map(([image, entry]) => ({ image, entry, isNew: isNewSince(entry.received_at) })),
|
||||||
|
}))
|
||||||
|
const untaggedRows = filteredEntries
|
||||||
|
.filter(([, e]) => !e.tag)
|
||||||
|
.map(([image, entry]) => ({ image, entry, isNew: isNewSince(entry.received_at) }))
|
||||||
|
```
|
||||||
|
|
||||||
|
f. Update TagSectionRow type import in TagSection.tsx or define the `isNew` property. Actually, keep `TagSectionRow` unchanged and pass `isNewSinceLastVisit` through the ServiceCard render. In TagSection.tsx, update `TagSectionRow`:
|
||||||
|
```typescript
|
||||||
|
export interface TagSectionRow {
|
||||||
|
image: string
|
||||||
|
entry: UpdateEntry
|
||||||
|
isNew?: boolean
|
||||||
|
}
|
||||||
|
```
|
||||||
|
And in TagSection's ServiceCard render:
|
||||||
|
```tsx
|
||||||
|
<ServiceCard
|
||||||
|
key={image}
|
||||||
|
image={image}
|
||||||
|
entry={entry}
|
||||||
|
onAcknowledge={onAcknowledge}
|
||||||
|
isNewSinceLastVisit={isNew}
|
||||||
|
/>
|
||||||
|
```
|
||||||
|
Update the destructuring in the `.map()`: `{rows.map(({ image, entry, isNew }) => (`
|
||||||
|
|
||||||
|
g. Update Header props:
|
||||||
|
```tsx
|
||||||
|
<Header onRefresh={fetchUpdates} pendingCount={pending} onDismissAll={acknowledgeAll} />
|
||||||
|
```
|
||||||
|
|
||||||
|
h. Update TagSection props to include `onAcknowledgeGroup`:
|
||||||
|
```tsx
|
||||||
|
<TagSection
|
||||||
|
key={tag.id}
|
||||||
|
tag={tag}
|
||||||
|
rows={taggedSections_rows}
|
||||||
|
onAcknowledge={acknowledge}
|
||||||
|
onDeleteTag={deleteTag}
|
||||||
|
onAcknowledgeGroup={acknowledgeByTag}
|
||||||
|
/>
|
||||||
|
```
|
||||||
|
|
||||||
|
i. Add toast rendering and import:
|
||||||
|
```typescript
|
||||||
|
import { Toast } from '@/components/Toast'
|
||||||
|
```
|
||||||
|
Compute toast message from `newArrivals`:
|
||||||
|
```typescript
|
||||||
|
const toastMessage = newArrivals.length > 0
|
||||||
|
? newArrivals.length === 1
|
||||||
|
? `New update: ${newArrivals[0]}`
|
||||||
|
: `${newArrivals.length} new updates arrived`
|
||||||
|
: ''
|
||||||
|
```
|
||||||
|
Add `<Toast message={toastMessage} onDismiss={clearNewArrivals} />` at the end of the root div, before the closing `</div>`.
|
||||||
|
|
||||||
|
j. Import `useEffect` if not already imported (it should be from Plan 02 adding useMemo -- check). The import line should be:
|
||||||
|
```typescript
|
||||||
|
import React, { useState, useRef, useEffect, useMemo } from 'react'
|
||||||
|
```
|
||||||
|
</action>
|
||||||
|
<verify>
|
||||||
|
<automated>cd /home/jean-luc-makiola/Development/projects/DiunDashboard/frontend && bunx tsc --noEmit && bun run build</automated>
|
||||||
|
</verify>
|
||||||
|
<acceptance_criteria>
|
||||||
|
- Toast.tsx exists and exports `Toast` component
|
||||||
|
- Toast.tsx contains `setTimeout(onDismiss, 5000)`
|
||||||
|
- Toast.tsx contains `fixed bottom-4 right-4`
|
||||||
|
- Header.tsx contains `pendingCount` in HeaderProps interface
|
||||||
|
- Header.tsx contains `onDismissAll` in HeaderProps interface
|
||||||
|
- Header.tsx contains `confirmDismissAll` state
|
||||||
|
- Header.tsx contains `Sure? Dismiss all` text for confirm state
|
||||||
|
- Header.tsx contains `Badge` import
|
||||||
|
- TagSection.tsx contains `onAcknowledgeGroup` in TagSectionProps
|
||||||
|
- TagSection.tsx contains `confirmDismissGroup` state
|
||||||
|
- TagSection.tsx contains `Dismiss Group` text
|
||||||
|
- ServiceCard.tsx contains `isNewSinceLastVisit` in ServiceCardProps
|
||||||
|
- ServiceCard.tsx contains `border-l-4 border-l-amber-500`
|
||||||
|
- App.tsx contains `acknowledgeAll` and `acknowledgeByTag` destructured from useUpdates
|
||||||
|
- App.tsx contains `document.title` assignment with `DiunDash`
|
||||||
|
- App.tsx contains `lastVisitTimestamp` in localStorage calls
|
||||||
|
- App.tsx contains `<Toast` JSX element
|
||||||
|
- App.tsx contains `<Header` with `pendingCount=` and `onDismissAll=` props
|
||||||
|
- App.tsx contains `onAcknowledgeGroup=` prop on TagSection
|
||||||
|
- TagSection.tsx TagSectionRow interface contains `isNew`
|
||||||
|
- `bun run build` exits 0
|
||||||
|
</acceptance_criteria>
|
||||||
|
<done>Bulk dismiss buttons work (dismiss-all in header with inline two-click confirm, dismiss-group in each tag section with inline two-click confirm); pending badge shows in header; tab title reflects count; toast appears for new arrivals; new-since-last-visit items have amber left border highlight</done>
|
||||||
|
</task>
|
||||||
|
|
||||||
|
</tasks>
|
||||||
|
|
||||||
|
<verification>
|
||||||
|
```bash
|
||||||
|
cd /home/jean-luc-makiola/Development/projects/DiunDashboard/frontend
|
||||||
|
bunx tsc --noEmit
|
||||||
|
bun run build
|
||||||
|
# Full stack verification:
|
||||||
|
cd /home/jean-luc-makiola/Development/projects/DiunDashboard
|
||||||
|
go test -v ./pkg/diunwebhook/
|
||||||
|
go build ./...
|
||||||
|
```
|
||||||
|
</verification>
|
||||||
|
|
||||||
|
<success_criteria>
|
||||||
|
- Dismiss All button in header triggers POST /api/updates/acknowledge-all
|
||||||
|
- Per-group Dismiss Group button triggers POST /api/updates/acknowledge-by-tag with correct tag_id
|
||||||
|
- Both dismiss buttons use inline two-click confirmation (matching tag delete UX pattern)
|
||||||
|
- Pending count badge visible in header when > 0
|
||||||
|
- Browser tab title shows "DiunDash (N)" or "DiunDash"
|
||||||
|
- Toast appears at bottom-right when polling detects new images
|
||||||
|
- Toast auto-dismisses after 5 seconds
|
||||||
|
- New-since-last-visit updates have amber left border
|
||||||
|
- Frontend builds without TypeScript errors
|
||||||
|
</success_criteria>
|
||||||
|
|
||||||
|
<output>
|
||||||
|
After completion, create `.planning/phases/04-ux-improvements/04-03-SUMMARY.md`
|
||||||
|
</output>
|
||||||
118
.planning/phases/04-ux-improvements/04-CONTEXT.md
Normal file
118
.planning/phases/04-ux-improvements/04-CONTEXT.md
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
# Phase 4: UX Improvements - Context
|
||||||
|
|
||||||
|
**Gathered:** 2026-03-24
|
||||||
|
**Status:** Ready for planning
|
||||||
|
|
||||||
|
<domain>
|
||||||
|
## Phase Boundary
|
||||||
|
|
||||||
|
Deliver UX features that make the dashboard genuinely usable at scale: bulk dismiss (all + per-group), search and filter across updates, new-update indicators (badge, tab title, toast, highlight), and accessibility fixes (theme toggle, always-visible drag handle). No new database tables — bulk dismiss adds Store methods; search/filter is client-side; indicators use localStorage.
|
||||||
|
|
||||||
|
</domain>
|
||||||
|
|
||||||
|
<decisions>
|
||||||
|
## Implementation Decisions
|
||||||
|
|
||||||
|
### Bulk dismiss (BULK-01, BULK-02)
|
||||||
|
- **D-01:** Add two new Store methods: `AcknowledgeAll() (count int, err error)` and `AcknowledgeByTag(tagID int) (count int, err error)` — consistent with existing `AcknowledgeUpdate(image)` pattern
|
||||||
|
- **D-02:** Two new API endpoints: `POST /api/updates/acknowledge-all` and `POST /api/updates/acknowledge-by-tag` (with `tag_id` in body) — returning the count of dismissed items
|
||||||
|
- **D-03:** UI placement: "Dismiss All" button in the header/stats area; "Dismiss Group" button in each TagSection header next to the existing delete button
|
||||||
|
- **D-04:** Confirmation: inline two-click confirm pattern for both dismiss-all and per-group dismiss — consistent with existing tag delete UX, zero additional dependencies (modal/dialog originally considered but inline is simpler and matches established patterns)
|
||||||
|
|
||||||
|
### Search and filter (SRCH-01 through SRCH-04)
|
||||||
|
- **D-05:** Client-side filtering only — all data is already in memory from polling, no new API endpoints needed
|
||||||
|
- **D-06:** Filter bar placed above the sections list, below the stats row
|
||||||
|
- **D-07:** Controls: text search input (filters by image name), status dropdown (all/pending/acknowledged), tag dropdown (all/specific tag/untagged), sort dropdown (date/name/registry)
|
||||||
|
- **D-08:** Filters do not persist across page reloads — reset on each visit (dashboard is a quick-glance tool)
|
||||||
|
|
||||||
|
### New-update indicators (INDIC-01 through INDIC-04)
|
||||||
|
- **D-09:** Pending update badge/counter displayed in the Header component next to the "Diun Dashboard" title — always visible
|
||||||
|
- **D-10:** Browser tab title reflects pending count: `"DiunDash (N)"` when N > 0, `"DiunDash"` when zero
|
||||||
|
- **D-11:** Toast notification when new updates arrive during polling — auto-dismiss after 5 seconds with manual dismiss button; non-stacking (latest update replaces previous toast)
|
||||||
|
- **D-12:** "New since last visit" detection via localStorage timestamp — store `lastVisitTimestamp` on page unload; updates with `received_at` after that timestamp get a visual highlight
|
||||||
|
- **D-13:** Highlight style: subtle left border accent (e.g., `border-l-4 border-amber-500`) on ServiceCard for new-since-last-visit items
|
||||||
|
|
||||||
|
### Accessibility and theme (A11Y-01, A11Y-02)
|
||||||
|
- **D-14:** Light/dark theme toggle placed in the Header bar next to the refresh button — icon button (sun/moon)
|
||||||
|
- **D-15:** Theme preference persisted in localStorage; on first visit, respects `prefers-color-scheme` media query; removes the hardcoded `classList.add('dark')` from `main.tsx`
|
||||||
|
- **D-16:** Drag handle on ServiceCard always visible at reduced opacity (`opacity-40`), full opacity on hover — removes the current `opacity-0 group-hover:opacity-100` pattern
|
||||||
|
|
||||||
|
### Claude's Discretion
|
||||||
|
- Toast component implementation (custom or shadcn/ui Sonner)
|
||||||
|
- Exact filter bar layout and responsive breakpoints
|
||||||
|
- Animation/transition details for theme switching
|
||||||
|
- Whether to show a count in the per-group dismiss button (e.g., "Dismiss 3")
|
||||||
|
- Sort order default (most recent first vs alphabetical)
|
||||||
|
|
||||||
|
</decisions>
|
||||||
|
|
||||||
|
<canonical_refs>
|
||||||
|
## Canonical References
|
||||||
|
|
||||||
|
**Downstream agents MUST read these before planning or implementing.**
|
||||||
|
|
||||||
|
### Store interface and handler patterns
|
||||||
|
- `pkg/diunwebhook/store.go` -- Store interface (9 methods; new bulk methods extend this)
|
||||||
|
- `pkg/diunwebhook/sqlite_store.go` -- SQLiteStore implementation (pattern for new methods)
|
||||||
|
- `pkg/diunwebhook/postgres_store.go` -- PostgresStore implementation (must also get new methods)
|
||||||
|
- `pkg/diunwebhook/server.go` -- Server struct and handler registration (new endpoints go here)
|
||||||
|
|
||||||
|
### Frontend components affected
|
||||||
|
- `frontend/src/App.tsx` -- Root component (filter state, bulk dismiss wiring, layout changes)
|
||||||
|
- `frontend/src/hooks/useUpdates.ts` -- Polling hook (toast detection, bulk dismiss callbacks, tab title)
|
||||||
|
- `frontend/src/components/Header.tsx` -- Header (badge counter, theme toggle, dismiss-all button)
|
||||||
|
- `frontend/src/components/TagSection.tsx` -- Tag sections (per-group dismiss button)
|
||||||
|
- `frontend/src/components/ServiceCard.tsx` -- Service cards (new-update highlight, drag handle fix)
|
||||||
|
- `frontend/src/main.tsx` -- Entry point (theme initialization logic change)
|
||||||
|
|
||||||
|
### Requirements
|
||||||
|
- `.planning/REQUIREMENTS.md` -- BULK-01, BULK-02, SRCH-01-04, INDIC-01-04, A11Y-01, A11Y-02
|
||||||
|
|
||||||
|
</canonical_refs>
|
||||||
|
|
||||||
|
<code_context>
|
||||||
|
## Existing Code Insights
|
||||||
|
|
||||||
|
### Reusable Assets
|
||||||
|
- `Button` component (`frontend/src/components/ui/button.tsx`): use for dismiss-all and per-group dismiss buttons
|
||||||
|
- `Badge` component (`frontend/src/components/ui/badge.tsx`): use for pending count badge in header
|
||||||
|
- `cn()` utility (`frontend/src/lib/utils.ts`): conditional class composition for highlight styles
|
||||||
|
- `timeAgo()` utility (`frontend/src/lib/time.ts`): already used in ServiceCard, relevant for toast messages
|
||||||
|
- `AcknowledgeButton` component: existing per-item dismiss pattern to follow for bulk buttons
|
||||||
|
|
||||||
|
### Established Patterns
|
||||||
|
- `useUpdates` hook: centralized data fetching + state management -- extend with bulk dismiss, toast detection, and tab title side effects
|
||||||
|
- Optimistic updates: used for tag assignment -- apply same pattern for bulk dismiss (update UI immediately, fire API call)
|
||||||
|
- Polling at 5s intervals: toast detection can diff previous vs current poll results
|
||||||
|
- Dark mode via Tailwind `class` strategy: theme toggle adds/removes `dark` class on `document.documentElement`
|
||||||
|
- No global state library: filter state lives in `App.tsx` via `useState`, passed as props
|
||||||
|
|
||||||
|
### Integration Points
|
||||||
|
- `cmd/diunwebhook/main.go`: register 2 new routes on the mux
|
||||||
|
- `store.go`: add `AcknowledgeAll` and `AcknowledgeByTag` to Store interface
|
||||||
|
- `sqlite_store.go` + `postgres_store.go`: implement new Store methods in both dialects
|
||||||
|
- `server.go`: add handler methods for bulk acknowledge endpoints
|
||||||
|
- `App.tsx`: add filter state, wire filter bar component, pass bulk dismiss callbacks
|
||||||
|
- `Header.tsx`: add pending count badge, theme toggle button, dismiss-all button
|
||||||
|
- `main.tsx`: replace hardcoded dark mode with localStorage + prefers-color-scheme logic
|
||||||
|
|
||||||
|
</code_context>
|
||||||
|
|
||||||
|
<specifics>
|
||||||
|
## Specific Ideas
|
||||||
|
|
||||||
|
No specific requirements -- open to standard approaches. The existing shadcn/ui + Tailwind dark mode setup provides the foundation for theme toggling.
|
||||||
|
|
||||||
|
</specifics>
|
||||||
|
|
||||||
|
<deferred>
|
||||||
|
## Deferred Ideas
|
||||||
|
|
||||||
|
None -- discussion stayed within phase scope.
|
||||||
|
|
||||||
|
</deferred>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Phase: 04-ux-improvements*
|
||||||
|
*Context gathered: 2026-03-24 via auto mode*
|
||||||
181
.planning/phases/04-ux-improvements/04-DISCUSSION-LOG.md
Normal file
181
.planning/phases/04-ux-improvements/04-DISCUSSION-LOG.md
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
# Phase 4: UX Improvements - Discussion Log
|
||||||
|
|
||||||
|
> **Audit trail only.** Do not use as input to planning, research, or execution agents.
|
||||||
|
> Decisions are captured in CONTEXT.md -- this log preserves the alternatives considered.
|
||||||
|
|
||||||
|
**Date:** 2026-03-24
|
||||||
|
**Phase:** 04-ux-improvements
|
||||||
|
**Areas discussed:** Bulk dismiss scope, Search/filter architecture, New-update detection, Theme toggle behavior
|
||||||
|
**Mode:** auto (all decisions auto-selected)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Bulk Dismiss Scope
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| New Store methods + dedicated endpoints | Add AcknowledgeAll and AcknowledgeByTag to Store interface with new HTTP endpoints | ✓ |
|
||||||
|
| Batch image list from frontend | Frontend sends list of image names to a generic bulk-dismiss endpoint | |
|
||||||
|
| Reuse existing single-dismiss in loop | Frontend calls existing PATCH /api/updates/{image} for each item | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] New Store methods + dedicated endpoints (recommended default)
|
||||||
|
**Notes:** Consistent with existing per-image dismiss pattern. Server-side bulk is more efficient and keeps frontend simple.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Tag ID parameter for per-group dismiss | Server looks up which images belong to the tag | ✓ |
|
||||||
|
| Send list of images from frontend | Frontend determines which images are in the group | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Tag ID parameter (recommended default)
|
||||||
|
**Notes:** Server already knows tag-image relationships, fewer bytes over the wire.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Dismiss-all in header, dismiss-group in TagSection header | Natural placement near existing controls | ✓ |
|
||||||
|
| All bulk actions in a separate toolbar | Dedicated action bar for bulk operations | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Dismiss-all in header area, dismiss-by-group in each TagSection header (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Confirmation dialog for all, inline for per-group | Dismiss-all gets modal; per-group matches existing tag-delete pattern | ✓ |
|
||||||
|
| No confirmation for either | Fast but risky | |
|
||||||
|
| Confirmation for both | Consistent but slower workflow | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Yes, confirmation dialog for dismiss-all; inline confirm for per-group (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Search/Filter Architecture
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Client-side filtering | All data already in memory from polling; filter in React state | ✓ |
|
||||||
|
| Server-side with query params | Add filter params to GET /api/updates endpoint | |
|
||||||
|
| Hybrid (client with server fallback) | Client-side now, server-side when data grows | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Client-side filtering (recommended default)
|
||||||
|
**Notes:** All data is fetched via 5s polling. No need for server-side filtering at this scale.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Filter bar above sections, below stats | Standard placement, visible without scrolling | ✓ |
|
||||||
|
| Collapsible sidebar filters | More space but hidden by default | |
|
||||||
|
| Inline per-section filters | Distributed, harder to use across groups | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Filter bar above the sections list (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Text search + status + tag + sort dropdowns | Covers all SRCH requirements | ✓ |
|
||||||
|
| Text search only | Minimal, doesn't cover SRCH-02/03/04 | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Search text input + status dropdown + tag dropdown + sort dropdown (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| No persistence (reset on reload) | Simpler, dashboard is quick-glance tool | ✓ |
|
||||||
|
| Persist in URL params | Shareable/bookmarkable filters | |
|
||||||
|
| Persist in localStorage | Remembers across visits | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] No persistence -- reset on reload (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## New-Update Detection
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| localStorage timestamp | Store last visit time client-side, compare with received_at | ✓ |
|
||||||
|
| Server-side last-seen tracking | Track per-user last-seen on server | |
|
||||||
|
| Session-only (no persistence) | Only detect new items arriving during current session | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] localStorage timestamp (recommended default)
|
||||||
|
**Notes:** Single-user tool, no server changes needed. Simple and effective.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Auto-dismiss after 5s with dismiss button | Non-intrusive, doesn't pile up | ✓ |
|
||||||
|
| Sticky until manually dismissed | Persistent but can pile up | |
|
||||||
|
| No toast, badge only | Minimal notification | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Auto-dismiss after 5 seconds with dismiss button (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Header badge + tab title | Always visible, covers INDIC-01 and INDIC-02 | ✓ |
|
||||||
|
| Stats card only | Already partially exists | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] In the header next to title + browser tab title (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Subtle left border accent | Visible but not overwhelming | ✓ |
|
||||||
|
| Background color change | More prominent | |
|
||||||
|
| Pulsing dot indicator | Animated, attention-grabbing | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Subtle left border accent on ServiceCard (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Theme Toggle Behavior
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Header bar, next to refresh button | Compact, always accessible | ✓ |
|
||||||
|
| Footer | Less prominent | |
|
||||||
|
| Settings page | Requires new page | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Header bar, next to refresh button (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| localStorage with prefers-color-scheme fallback | Standard pattern, no server involvement | ✓ |
|
||||||
|
| Cookie-based | SSR-friendly but not needed here | |
|
||||||
|
| No persistence | Resets every visit | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] localStorage with prefers-color-scheme fallback (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
| Option | Description | Selected |
|
||||||
|
|--------|-------------|----------|
|
||||||
|
| Always visible at reduced opacity | Accessible without cluttering UI | ✓ |
|
||||||
|
| Always fully visible | More prominent but noisier | |
|
||||||
|
| Keep hover-only | Current behavior, accessibility issue | |
|
||||||
|
|
||||||
|
**User's choice:** [auto] Always visible at reduced opacity, full opacity on hover (recommended default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Claude's Discretion
|
||||||
|
|
||||||
|
- Toast component implementation (custom or shadcn/ui Sonner)
|
||||||
|
- Exact filter bar layout and responsive breakpoints
|
||||||
|
- Animation/transition details for theme switching
|
||||||
|
- Whether to show a count in the per-group dismiss button
|
||||||
|
- Sort order default
|
||||||
|
|
||||||
|
## Deferred Ideas
|
||||||
|
|
||||||
|
None -- discussion stayed within phase scope.
|
||||||
627
.planning/phases/04-ux-improvements/04-RESEARCH.md
Normal file
627
.planning/phases/04-ux-improvements/04-RESEARCH.md
Normal file
@@ -0,0 +1,627 @@
|
|||||||
|
# Phase 4: UX Improvements - Research
|
||||||
|
|
||||||
|
**Researched:** 2026-03-24
|
||||||
|
**Domain:** React SPA (search/filter, toast, theme, drag UX) + Go HTTP handlers (bulk acknowledge endpoints)
|
||||||
|
**Confidence:** HIGH — all findings are based on direct inspection of the live codebase. No third-party library unknowns; every feature maps to patterns already present in the project.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<user_constraints>
|
||||||
|
## User Constraints (from CONTEXT.md)
|
||||||
|
|
||||||
|
### Locked Decisions
|
||||||
|
|
||||||
|
**Bulk dismiss (BULK-01, BULK-02)**
|
||||||
|
- D-01: Add two new Store methods: `AcknowledgeAll() (count int, err error)` and `AcknowledgeByTag(tagID int) (count int, err error)` — consistent with existing `AcknowledgeUpdate(image)` pattern
|
||||||
|
- D-02: Two new API endpoints: `POST /api/updates/acknowledge-all` and `POST /api/updates/acknowledge-by-tag` (with `tag_id` in body) — returning the count of dismissed items
|
||||||
|
- D-03: UI placement: "Dismiss All" button in the header/stats area; "Dismiss Group" button in each TagSection header next to the existing delete button
|
||||||
|
- D-04: Confirmation: modal/dialog confirmation for dismiss-all (high-impact action); inline confirm pattern (matching existing tag delete) for per-group dismiss
|
||||||
|
|
||||||
|
**Search and filter (SRCH-01 through SRCH-04)**
|
||||||
|
- D-05: Client-side filtering only — all data is already in memory from polling, no new API endpoints needed
|
||||||
|
- D-06: Filter bar placed above the sections list, below the stats row
|
||||||
|
- D-07: Controls: text search input (filters by image name), status dropdown (all/pending/acknowledged), tag dropdown (all/specific tag/untagged), sort dropdown (date/name/registry)
|
||||||
|
- D-08: Filters do not persist across page reloads — reset on each visit
|
||||||
|
|
||||||
|
**New-update indicators (INDIC-01 through INDIC-04)**
|
||||||
|
- D-09: Pending update badge/counter displayed in the Header component next to the "Diun Dashboard" title — always visible
|
||||||
|
- D-10: Browser tab title reflects pending count: `"DiunDash (N)"` when N > 0, `"DiunDash"` when zero
|
||||||
|
- D-11: Toast notification when new updates arrive during polling — auto-dismiss after 5 seconds with manual dismiss button; non-stacking (latest update replaces previous toast)
|
||||||
|
- D-12: "New since last visit" detection via localStorage timestamp — store `lastVisitTimestamp` on page unload; updates with `received_at` after that timestamp get a visual highlight
|
||||||
|
- D-13: Highlight style: subtle left border accent (`border-l-4 border-amber-500`) on ServiceCard for new-since-last-visit items
|
||||||
|
|
||||||
|
**Accessibility and theme (A11Y-01, A11Y-02)**
|
||||||
|
- D-14: Light/dark theme toggle placed in the Header bar next to the refresh button — icon button (sun/moon)
|
||||||
|
- D-15: Theme preference persisted in localStorage; on first visit, respects `prefers-color-scheme` media query; removes the hardcoded `classList.add('dark')` from `main.tsx`
|
||||||
|
- D-16: Drag handle on ServiceCard always visible at reduced opacity (`opacity-40`), full opacity on hover — removes the current `opacity-0 group-hover:opacity-100` pattern
|
||||||
|
|
||||||
|
### Claude's Discretion
|
||||||
|
- Toast component implementation (custom or shadcn/ui Sonner)
|
||||||
|
- Exact filter bar layout and responsive breakpoints
|
||||||
|
- Animation/transition details for theme switching
|
||||||
|
- Whether to show a count in the per-group dismiss button (e.g., "Dismiss 3")
|
||||||
|
- Sort order default (most recent first vs alphabetical)
|
||||||
|
|
||||||
|
### Deferred Ideas (OUT OF SCOPE)
|
||||||
|
None — discussion stayed within phase scope.
|
||||||
|
</user_constraints>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<phase_requirements>
|
||||||
|
## Phase Requirements
|
||||||
|
|
||||||
|
| ID | Description | Research Support |
|
||||||
|
|----|-------------|------------------|
|
||||||
|
| BULK-01 | User can acknowledge all pending updates at once with a single action | New `AcknowledgeAll` Store method + `POST /api/updates/acknowledge-all` handler; optimistic update in useUpdates follows existing acknowledge pattern |
|
||||||
|
| BULK-02 | User can acknowledge all pending updates within a specific tag/group | New `AcknowledgeByTag` Store method + `POST /api/updates/acknowledge-by-tag` handler; TagSection receives `onAcknowledgeGroup` callback prop |
|
||||||
|
| SRCH-01 | User can search updates by image name (text search) | Client-side filter on `entries` array in App.tsx; filter state via useState; case-insensitive substring match on image key |
|
||||||
|
| SRCH-02 | User can filter updates by status (pending vs acknowledged) | Client-side filter on `entry.acknowledged` boolean already present in UpdateEntry type |
|
||||||
|
| SRCH-03 | User can filter updates by tag/group | Client-side filter on `entry.tag?.id` against tag dropdown value; "untagged" = null tag |
|
||||||
|
| SRCH-04 | User can sort updates by date, image name, or registry | Client-side sort on `entries` array before grouping; `received_at` (string ISO 8601 sortable), image key (string), registry extracted by existing `getRegistry` helper in ServiceCard |
|
||||||
|
| INDIC-01 | Dashboard shows a badge/counter of pending (unacknowledged) updates | `pending` count already computed in App.tsx; Badge component already exists; wire into Header props |
|
||||||
|
| INDIC-02 | Browser tab title includes pending update count | `document.title` side effect in useUpdates or App.tsx useEffect watching pending count |
|
||||||
|
| INDIC-03 | In-page toast notification appears when new updates arrive during polling | Detect new images in fetchUpdates by comparing prev vs new keys; toast state in useUpdates hook; custom toast component or Radix-based |
|
||||||
|
| INDIC-04 | Updates that arrived since the user's last visit are visually highlighted | localStorage `lastVisitTimestamp` written on `beforeunload`; read at mount; compare `entry.received_at` ISO string; add `isNewSinceLastVisit` boolean to derived state |
|
||||||
|
| A11Y-01 | Light/dark theme toggle with system preference detection | Tailwind `darkMode: ['class']` already configured; toggle adds/removes `dark` class; localStorage + `prefers-color-scheme` media query init replaces hardcoded `classList.add('dark')` in main.tsx |
|
||||||
|
| A11Y-02 | Drag handle for tag reordering is always visible (not hover-only) | Change `opacity-0 group-hover:opacity-100` to `opacity-40 hover:opacity-100` on the grip button in ServiceCard.tsx |
|
||||||
|
</phase_requirements>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
Phase 4 adds UX features across the entire stack. The backend requires two new SQL operations (`UPDATE ... WHERE acknowledged_at IS NULL` for all rows, and the same filtered by tag join) and two new HTTP handlers following the exact pattern already used for `DismissHandler`. No schema changes, no migrations, no new tables.
|
||||||
|
|
||||||
|
The frontend work is pure React/TypeScript. All features are enabled by the existing stack: client-side filter/sort, toast via a lightweight component, theme via the already-configured Tailwind `darkMode: ['class']` strategy, localStorage for persistence of theme preference and last-visit timestamp, and a one-line opacity change for the drag handle. No new npm packages are strictly required. The one discretionary choice is the toast implementation: a small custom component avoids a new dependency; `sonner` (shadcn/ui's recommended toast) is an option if polish justifies the dependency.
|
||||||
|
|
||||||
|
**Primary recommendation:** Implement everything with existing dependencies. Use a custom toast component (30 lines of Tailwind CSS) rather than installing sonner. Use native `<select>` elements for filter dropdowns styled with Tailwind rather than installing a headless select library. Both keep the bundle lean and avoid Radix package additions that would require new peer dependency management.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Standard Stack
|
||||||
|
|
||||||
|
### Core (already installed — no new packages required)
|
||||||
|
|
||||||
|
| Library | Version | Purpose | Why Standard |
|
||||||
|
|---------|---------|---------|--------------|
|
||||||
|
| React | ^19.0.0 | UI framework | Project constraint |
|
||||||
|
| TypeScript | ^5.7.2 | Type safety | Project constraint |
|
||||||
|
| Tailwind CSS | ^3.4.17 | Styling | Project constraint |
|
||||||
|
| shadcn/ui (Badge, Button) | in-repo | UI primitives | Already present; reuse for badge and buttons |
|
||||||
|
| Lucide React | ^0.469.0 | Icons | Already present; Sun/Moon icons for theme toggle |
|
||||||
|
| class-variance-authority | ^0.7.1 | Variant management | Already used in Button/Badge |
|
||||||
|
| clsx + tailwind-merge via `cn()` | in-repo | Conditional classes | Already used project-wide |
|
||||||
|
|
||||||
|
### Potentially New (discretionary)
|
||||||
|
|
||||||
|
| Library | Version | Purpose | When to Use |
|
||||||
|
|---------|---------|---------|-------------|
|
||||||
|
| sonner | ^1.7.x | Toast notifications (shadcn/ui recommended) | Only if custom toast feels too raw; adds ~15KB |
|
||||||
|
| @radix-ui/react-dialog | ^1.x | Accessible modal for dismiss-all confirmation | Only if a custom dialog is not acceptable; adds Radix peer dep |
|
||||||
|
| @radix-ui/react-select | ^2.x | Accessible filter dropdowns | Only if native `<select>` is unacceptable for design reasons |
|
||||||
|
|
||||||
|
**Version verification:** The above new packages are NOT currently in `package.json`. Before adding any, run `bun add <package>` to pull the latest version from the registry. Do not assume training-data version numbers.
|
||||||
|
|
||||||
|
**Recommendation:** Use native HTML `<select>` for filter dropdowns (Tailwind-styled). Use a custom inline dialog (confirm pattern already used for tag delete) or a small `<dialog>` element for dismiss-all. Use a custom toast component. This avoids any new package installs.
|
||||||
|
|
||||||
|
**If sonner is chosen:**
|
||||||
|
```bash
|
||||||
|
bun add sonner
|
||||||
|
```
|
||||||
|
Then add `<Toaster />` to `App.tsx` root and call `toast()` from anywhere.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Architecture Patterns
|
||||||
|
|
||||||
|
### Recommended Project Structure After Phase 4
|
||||||
|
|
||||||
|
```
|
||||||
|
frontend/src/
|
||||||
|
├── components/
|
||||||
|
│ ├── Header.tsx # add: pending badge, theme toggle, dismiss-all button
|
||||||
|
│ ├── TagSection.tsx # add: per-group dismiss button + inline confirm
|
||||||
|
│ ├── ServiceCard.tsx # change: drag handle opacity, new-visit highlight
|
||||||
|
│ ├── FilterBar.tsx # NEW: search input + 3 dropdowns
|
||||||
|
│ ├── Toast.tsx # NEW: simple toast notification component
|
||||||
|
│ └── ui/ # existing shadcn primitives (unchanged)
|
||||||
|
├── hooks/
|
||||||
|
│ └── useUpdates.ts # extend: bulk dismiss callbacks, toast detection, tab title
|
||||||
|
├── App.tsx # extend: filter state, filtered/sorted entries, Toast mount
|
||||||
|
└── main.tsx # change: theme init logic
|
||||||
|
|
||||||
|
pkg/diunwebhook/
|
||||||
|
├── store.go # add: AcknowledgeAll, AcknowledgeByTag to interface
|
||||||
|
├── sqlite_store.go # implement: AcknowledgeAll, AcknowledgeByTag
|
||||||
|
├── postgres_store.go # implement: AcknowledgeAll, AcknowledgeByTag
|
||||||
|
└── diunwebhook.go # add: AcknowledgeAllHandler, AcknowledgeByTagHandler
|
||||||
|
|
||||||
|
cmd/diunwebhook/
|
||||||
|
└── main.go # add: 2 route registrations
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 1: New Store Method Implementation
|
||||||
|
|
||||||
|
The two new Store methods follow the exact `AcknowledgeUpdate` pattern. Confirmed by reading `sqlite_store.go` and `postgres_store.go`.
|
||||||
|
|
||||||
|
**SQLite:**
|
||||||
|
```go
|
||||||
|
// AcknowledgeAll marks all unacknowledged updates as acknowledged.
|
||||||
|
// Returns the count of rows updated.
|
||||||
|
func (s *SQLiteStore) AcknowledgeAll() (int, error) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
res, err := s.db.Exec(`UPDATE updates SET acknowledged_at = datetime('now') WHERE acknowledged_at IS NULL`)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return int(n), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcknowledgeByTag marks all unacknowledged updates for images in the given tag as acknowledged.
|
||||||
|
func (s *SQLiteStore) AcknowledgeByTag(tagID int) (int, error) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
res, err := s.db.Exec(`
|
||||||
|
UPDATE updates SET acknowledged_at = datetime('now')
|
||||||
|
WHERE acknowledged_at IS NULL
|
||||||
|
AND image IN (SELECT image FROM tag_assignments WHERE tag_id = ?)`, tagID)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return int(n), nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**PostgreSQL (positional params, NOW() instead of datetime('now')):**
|
||||||
|
```go
|
||||||
|
func (s *PostgresStore) AcknowledgeAll() (int, error) {
|
||||||
|
res, err := s.db.Exec(`UPDATE updates SET acknowledged_at = NOW() WHERE acknowledged_at IS NULL`)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return int(n), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *PostgresStore) AcknowledgeByTag(tagID int) (int, error) {
|
||||||
|
res, err := s.db.Exec(`
|
||||||
|
UPDATE updates SET acknowledged_at = NOW()
|
||||||
|
WHERE acknowledged_at IS NULL
|
||||||
|
AND image IN (SELECT image FROM tag_assignments WHERE tag_id = $1)`, tagID)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return int(n), nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 2: New HTTP Handlers
|
||||||
|
|
||||||
|
Follow `DismissHandler` exactly: POST method check, body size limit, JSON decode, store call, JSON response with count.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// AcknowledgeAllHandler handles POST /api/updates/acknowledge-all
|
||||||
|
func (s *Server) AcknowledgeAllHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
count, err := s.store.AcknowledgeAll()
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("AcknowledgeAllHandler: %v", err)
|
||||||
|
http.Error(w, "internal error", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]int{"count": count}) //nolint:errcheck
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcknowledgeByTagHandler handles POST /api/updates/acknowledge-by-tag
|
||||||
|
func (s *Server) AcknowledgeByTagHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
if r.Method != http.MethodPost {
|
||||||
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.Body = http.MaxBytesReader(w, r.Body, maxBodyBytes)
|
||||||
|
var req struct {
|
||||||
|
TagID int `json:"tag_id"`
|
||||||
|
}
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
http.Error(w, "bad request", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if req.TagID <= 0 {
|
||||||
|
http.Error(w, "bad request: tag_id required", http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
count, err := s.store.AcknowledgeByTag(req.TagID)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("AcknowledgeByTagHandler: %v", err)
|
||||||
|
http.Error(w, "internal error", http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
json.NewEncoder(w).Encode(map[string]int{"count": count}) //nolint:errcheck
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Route registration in main.go** — note the specific path order matters in `net/http`'s default mux. `/api/updates/acknowledge-all` must be registered before `/api/updates/` to avoid the catch-all stripping:
|
||||||
|
```go
|
||||||
|
mux.HandleFunc("/api/updates/acknowledge-all", srv.AcknowledgeAllHandler)
|
||||||
|
mux.HandleFunc("/api/updates/acknowledge-by-tag", srv.AcknowledgeByTagHandler)
|
||||||
|
mux.HandleFunc("/api/updates/", srv.DismissHandler) // existing — must remain after the above
|
||||||
|
mux.HandleFunc("/api/updates", srv.UpdatesHandler)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 3: Client-Side Filter and Sort
|
||||||
|
|
||||||
|
Filter state lives in `App.tsx` (no global state library — project constraint). Filtering happens on the computed `entries` array before grouping into `taggedSections` and `untaggedRows`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In App.tsx — filter state
|
||||||
|
const [search, setSearch] = useState('')
|
||||||
|
const [statusFilter, setStatusFilter] = useState<'all' | 'pending' | 'acknowledged'>('all')
|
||||||
|
const [tagFilter, setTagFilter] = useState<'all' | 'untagged' | number>('all')
|
||||||
|
const [sortOrder, setSortOrder] = useState<'date-desc' | 'date-asc' | 'name' | 'registry'>('date-desc')
|
||||||
|
|
||||||
|
// Derived: filtered + sorted entries
|
||||||
|
const filteredEntries = useMemo(() => {
|
||||||
|
let result = Object.entries(updates)
|
||||||
|
if (search) {
|
||||||
|
const q = search.toLowerCase()
|
||||||
|
result = result.filter(([image]) => image.toLowerCase().includes(q))
|
||||||
|
}
|
||||||
|
if (statusFilter === 'pending') result = result.filter(([, e]) => !e.acknowledged)
|
||||||
|
if (statusFilter === 'acknowledged') result = result.filter(([, e]) => e.acknowledged)
|
||||||
|
if (tagFilter === 'untagged') result = result.filter(([, e]) => !e.tag)
|
||||||
|
if (typeof tagFilter === 'number') result = result.filter(([, e]) => e.tag?.id === tagFilter)
|
||||||
|
result.sort(([ia, ea], [ib, eb]) => {
|
||||||
|
switch (sortOrder) {
|
||||||
|
case 'date-asc': return ea.received_at < eb.received_at ? -1 : 1
|
||||||
|
case 'name': return ia.localeCompare(ib)
|
||||||
|
case 'registry': return getRegistry(ia).localeCompare(getRegistry(ib))
|
||||||
|
default: return ea.received_at > eb.received_at ? -1 : 1 // date-desc
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
}, [updates, search, statusFilter, tagFilter, sortOrder])
|
||||||
|
```
|
||||||
|
|
||||||
|
`getRegistry` already exists in `ServiceCard.tsx` — move it to a shared utility or duplicate in `App.tsx`.
|
||||||
|
|
||||||
|
### Pattern 4: Toast Detection in useUpdates
|
||||||
|
|
||||||
|
Detect new arrivals by comparing the keys of the previous poll result against the current result. New keys = new images arrived.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In useUpdates.ts — track previous keys
|
||||||
|
const prevKeysRef = useRef<Set<string>>(new Set())
|
||||||
|
const [newArrivals, setNewArrivals] = useState<string[]>([])
|
||||||
|
|
||||||
|
const fetchUpdates = useCallback(async () => {
|
||||||
|
// ... existing fetch logic ...
|
||||||
|
const data: UpdatesMap = await res.json()
|
||||||
|
const newKeys = Object.keys(data).filter(k => !prevKeysRef.current.has(k))
|
||||||
|
if (newKeys.length > 0 && prevKeysRef.current.size > 0) {
|
||||||
|
// Only fire toast after initial load (size > 0 guard)
|
||||||
|
setNewArrivals(newKeys)
|
||||||
|
}
|
||||||
|
prevKeysRef.current = new Set(Object.keys(data))
|
||||||
|
setUpdates(data)
|
||||||
|
// ...
|
||||||
|
}, [])
|
||||||
|
```
|
||||||
|
|
||||||
|
Non-stacking: `newArrivals` state is replaced (not appended) each poll, so the toast always shows the latest batch.
|
||||||
|
|
||||||
|
### Pattern 5: Theme Toggle
|
||||||
|
|
||||||
|
The project already has `darkMode: ['class']` in `tailwind.config.ts` and CSS variables for both `:root` (light) and `.dark` (dark) in `index.css`. The only change is in `main.tsx` — replace the hardcoded `classList.add('dark')` with an initializer that reads localStorage and falls back to `prefers-color-scheme`.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// main.tsx — replace classList.add('dark') with:
|
||||||
|
const stored = localStorage.getItem('theme')
|
||||||
|
if (stored === 'dark' || (!stored && window.matchMedia('(prefers-color-scheme: dark)').matches)) {
|
||||||
|
document.documentElement.classList.add('dark')
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Toggle function (in Header or a custom hook):
|
||||||
|
```typescript
|
||||||
|
function toggleTheme() {
|
||||||
|
const isDark = document.documentElement.classList.toggle('dark')
|
||||||
|
localStorage.setItem('theme', isDark ? 'dark' : 'light')
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 6: Last-Visit Highlight
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In App.tsx (or useUpdates.ts) — read at mount
|
||||||
|
const lastVisitRef = useRef<string | null>(
|
||||||
|
localStorage.getItem('lastVisitTimestamp')
|
||||||
|
)
|
||||||
|
// Write on unload
|
||||||
|
useEffect(() => {
|
||||||
|
const handler = () => localStorage.setItem('lastVisitTimestamp', new Date().toISOString())
|
||||||
|
window.addEventListener('beforeunload', handler)
|
||||||
|
return () => window.removeEventListener('beforeunload', handler)
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
// Usage in ServiceCard or when building rows:
|
||||||
|
const isNewSinceLastVisit = lastVisitRef.current
|
||||||
|
? entry.received_at > lastVisitRef.current
|
||||||
|
: false
|
||||||
|
```
|
||||||
|
|
||||||
|
In `ServiceCard.tsx`:
|
||||||
|
```tsx
|
||||||
|
<div className={cn(
|
||||||
|
'group p-4 rounded-xl border border-border bg-card ...',
|
||||||
|
isNewSinceLastVisit && 'border-l-4 border-l-amber-500',
|
||||||
|
isDragging && 'opacity-30',
|
||||||
|
)}>
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: `isNewSinceLastVisit` must be passed as a prop to ServiceCard since the ref lives in App/useUpdates.
|
||||||
|
|
||||||
|
### Pattern 7: Tab Title
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In App.tsx or useUpdates.ts — side effect watching pending count
|
||||||
|
useEffect(() => {
|
||||||
|
document.title = pending > 0 ? `DiunDash (${pending})` : 'DiunDash'
|
||||||
|
}, [pending])
|
||||||
|
```
|
||||||
|
|
||||||
|
`pending` is already computed in `App.tsx`.
|
||||||
|
|
||||||
|
### Pattern 8: Dismiss-All Confirmation Modal
|
||||||
|
|
||||||
|
The project has no existing modal component. The simplest approach consistent with the inline confirm pattern already used for tag delete is a two-click confirm pattern on the "Dismiss All" button itself — same UX as the "Delete" button in `TagSection.tsx`. This avoids adding a dialog library.
|
||||||
|
|
||||||
|
If a modal is preferred (D-04 says "modal/dialog confirmation"), the lightest option is the HTML `<dialog>` element with no external dependencies:
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
// Simple inline confirm state (matches TagSection pattern exactly)
|
||||||
|
const [confirmDismissAll, setConfirmDismissAll] = useState(false)
|
||||||
|
|
||||||
|
<Button
|
||||||
|
variant={confirmDismissAll ? 'destructive' : 'outline'}
|
||||||
|
size="sm"
|
||||||
|
onClick={() => {
|
||||||
|
if (!confirmDismissAll) { setConfirmDismissAll(true); return }
|
||||||
|
onDismissAll()
|
||||||
|
setConfirmDismissAll(false)
|
||||||
|
}}
|
||||||
|
onBlur={() => setConfirmDismissAll(false)}
|
||||||
|
>
|
||||||
|
{confirmDismissAll ? 'Sure? Dismiss all' : 'Dismiss All'}
|
||||||
|
</Button>
|
||||||
|
```
|
||||||
|
|
||||||
|
This matches the exact two-click confirm pattern already shipping in `TagSection.tsx` for tag deletion. Use this unless the user explicitly requires a modal overlay.
|
||||||
|
|
||||||
|
### Anti-Patterns to Avoid
|
||||||
|
|
||||||
|
- **Route registration order:** In `net/http`'s default mux, registering `/api/updates/` before `/api/updates/acknowledge-all` means the handler for the more specific path is never reached. Always register specific paths before catch-alls.
|
||||||
|
- **Filtering after grouping:** Do not filter within each `TagSection` separately — filter `entries` before grouping, then re-derive `taggedSections` and `untaggedRows` from filtered entries. Otherwise the tag group counts shown in section headers will be wrong.
|
||||||
|
- **Mutating `updates` object for bulk dismiss optimistic update:** Use the functional `setUpdates(prev => ...)` form and create a new object with `Object.fromEntries(Object.entries(prev).map(...))` to avoid mutating in place — same pattern as the existing `acknowledge` callback.
|
||||||
|
- **Hardcoded `classList.add('dark')` left in place:** If main.tsx is not updated, the theme toggle will fight with the initialization and users will see a flash or be unable to switch to light mode.
|
||||||
|
- **Toast stacking:** If toast state is accumulated into an array rather than replaced, multiple rapid polls accumulate toasts. D-11 says non-stacking — always replace, never append.
|
||||||
|
- **beforeunload timestamp written before any data loads:** The first visit will write a `lastVisitTimestamp` of "now", making every update appear highlighted. The guard is: only highlight items where `received_at > lastVisitTimestamp` AND `lastVisitTimestamp` existed before this page load (i.e., use the `useRef` initialized from localStorage at mount, not the live localStorage value).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Don't Hand-Roll
|
||||||
|
|
||||||
|
| Problem | Don't Build | Use Instead | Why |
|
||||||
|
|---------|-------------|-------------|-----|
|
||||||
|
| Case-insensitive image name search | Custom fuzzy matcher | `string.toLowerCase().includes(query.toLowerCase())` | The data set is small (dozens of images); simple substring match is sufficient |
|
||||||
|
| Toast notification system | Multiple-file toast context/provider | Single `Toast.tsx` component with useState in App | Project has no global state; keep toast state local |
|
||||||
|
| SQL bulk update | Row-by-row loop over `AcknowledgeUpdate` | Single `UPDATE ... WHERE acknowledged_at IS NULL` | One round-trip vs N; transactional; simpler |
|
||||||
|
| Theme persistence | Cookie or server-side preference | localStorage + `prefers-color-scheme` | Client-only SPA; localStorage is sufficient and already used for `lastVisitTimestamp` |
|
||||||
|
| Filter URL serialization | Query string encode/decode | Transient state in useState | D-08 explicitly locks: filters reset on reload |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Common Pitfalls
|
||||||
|
|
||||||
|
### Pitfall 1: Route Ordering in net/http ServeMux
|
||||||
|
|
||||||
|
**What goes wrong:** `mux.HandleFunc("/api/updates/", srv.DismissHandler)` is a subtree pattern that matches ALL paths starting with `/api/updates/`. If registered before `/api/updates/acknowledge-all`, the new endpoints are never reached.
|
||||||
|
|
||||||
|
**Why it happens:** Go's `http.ServeMux` matches subtree patterns (`/path/` with trailing slash) before exact patterns only when the subtree is registered first. More specific paths win only if registered first.
|
||||||
|
|
||||||
|
**How to avoid:** Register `/api/updates/acknowledge-all` and `/api/updates/acknowledge-by-tag` before `/api/updates/` in `main.go`. Verify the order matches the current pattern where `/api/updates` (no slash, exact) is registered after `/api/updates/` (subtree).
|
||||||
|
|
||||||
|
**Warning signs:** HTTP 204 or 404 returned by AcknowledgeAllHandler with no log output means the DismissHandler is handling the request.
|
||||||
|
|
||||||
|
### Pitfall 2: CSS Variable Missing for destructive in dark mode
|
||||||
|
|
||||||
|
**What goes wrong:** The `--destructive` and `--destructive-foreground` CSS variables are used by `buttonVariants` in `button.tsx` but are NOT defined in `index.css`. If a destructive-variant button is added (e.g., for dismiss-all confirm), it will render with no background.
|
||||||
|
|
||||||
|
**Why it happens:** The existing code never uses `variant="destructive"` — the two-click confirm in `TagSection.tsx` uses custom Tailwind classes (`text-destructive hover:bg-destructive/10`) rather than the Button component. So the missing CSS var was never noticed.
|
||||||
|
|
||||||
|
**How to avoid:** Either (a) add `--destructive` and `--destructive-foreground` to both `:root` and `.dark` in `index.css`, or (b) continue using inline Tailwind classes for the confirm state rather than the Button destructive variant.
|
||||||
|
|
||||||
|
A suitable value for `:root`: `--destructive: 0 84.2% 60.2%; --destructive-foreground: 0 0% 98%;`
|
||||||
|
For `.dark`: `--destructive: 0 62.8% 30.6%; --destructive-foreground: 0 85.7% 97.3%;`
|
||||||
|
|
||||||
|
### Pitfall 3: ServiceCard receives isNewSinceLastVisit as a Prop
|
||||||
|
|
||||||
|
**What goes wrong:** The `lastVisitRef` value is available in `App.tsx` at mount time, but `ServiceCard` currently receives only `image`, `entry`, and `onAcknowledge`. If the highlight logic is added inside ServiceCard reading from localStorage directly, every card reads localStorage independently — which is fine but couples the component to a side effect.
|
||||||
|
|
||||||
|
**Why it happens:** Convenience — it seems simpler to read localStorage in the card.
|
||||||
|
|
||||||
|
**How to avoid:** Compute `isNewSinceLastVisit` at the point where rows are built in `App.tsx` and pass it as a prop to `ServiceCard`. This keeps the component pure and the logic testable.
|
||||||
|
|
||||||
|
### Pitfall 4: Tab Title Not Reset When All Dismissed
|
||||||
|
|
||||||
|
**What goes wrong:** `document.title` is set to `"DiunDash (N)"` but if `pending` reaches 0 after a bulk dismiss, the title must be reset to `"DiunDash"`.
|
||||||
|
|
||||||
|
**Why it happens:** The `useEffect` dependency only fires if `pending` changes, so if `pending` was already 0 and stays 0, the title is never set at all.
|
||||||
|
|
||||||
|
**How to avoid:** The `useEffect` watching `pending` handles this correctly as long as it runs on mount (initial render with pending=0 will set title to "DiunDash"). Ensure the effect has `[pending]` in its dependency array, not `[pending > 0]`.
|
||||||
|
|
||||||
|
### Pitfall 5: AcknowledgeByTag Does Not Verify Tag Exists
|
||||||
|
|
||||||
|
**What goes wrong:** If `tag_id` in the request body refers to a deleted tag, the query silently updates 0 rows and returns count=0. This is acceptable behavior (idempotent), but the test should verify it returns 200 with count:0 rather than 404.
|
||||||
|
|
||||||
|
**Why it happens:** Inconsistency with `DismissHandler` which returns 404 when no row is found. Bulk operations should not 404 on empty result sets — they're batch operations.
|
||||||
|
|
||||||
|
**How to avoid:** Document and test the 200+count:0 response explicitly. Do NOT add a `TagExists` check before the bulk update (it adds a round-trip and a TOCTOU race).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Code Examples
|
||||||
|
|
||||||
|
### AcknowledgeAll SQL (SQLite)
|
||||||
|
```sql
|
||||||
|
-- Source: direct analysis of existing sqlite_store.go patterns
|
||||||
|
UPDATE updates SET acknowledged_at = datetime('now') WHERE acknowledged_at IS NULL
|
||||||
|
```
|
||||||
|
|
||||||
|
### AcknowledgeAll SQL (PostgreSQL)
|
||||||
|
```sql
|
||||||
|
UPDATE updates SET acknowledged_at = NOW() WHERE acknowledged_at IS NULL
|
||||||
|
```
|
||||||
|
|
||||||
|
### AcknowledgeByTag SQL (SQLite)
|
||||||
|
```sql
|
||||||
|
UPDATE updates SET acknowledged_at = datetime('now')
|
||||||
|
WHERE acknowledged_at IS NULL
|
||||||
|
AND image IN (SELECT image FROM tag_assignments WHERE tag_id = ?)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bulk Dismiss Optimistic Update (TypeScript)
|
||||||
|
```typescript
|
||||||
|
// Source: pattern derived from existing acknowledge callback in useUpdates.ts
|
||||||
|
const acknowledgeAll = useCallback(async () => {
|
||||||
|
// Optimistic
|
||||||
|
setUpdates(prev =>
|
||||||
|
Object.fromEntries(
|
||||||
|
Object.entries(prev).map(([img, entry]) => [img, { ...entry, acknowledged: true }])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
try {
|
||||||
|
await fetch('/api/updates/acknowledge-all', { method: 'POST' })
|
||||||
|
} catch (e) {
|
||||||
|
console.error('acknowledgeAll failed:', e)
|
||||||
|
fetchUpdates() // re-sync on failure
|
||||||
|
}
|
||||||
|
}, [fetchUpdates])
|
||||||
|
|
||||||
|
const acknowledgeByTag = useCallback(async (tagID: number) => {
|
||||||
|
setUpdates(prev =>
|
||||||
|
Object.fromEntries(
|
||||||
|
Object.entries(prev).map(([img, entry]) => [
|
||||||
|
img,
|
||||||
|
entry.tag?.id === tagID ? { ...entry, acknowledged: true } : entry,
|
||||||
|
])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
try {
|
||||||
|
await fetch('/api/updates/acknowledge-by-tag', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ tag_id: tagID }),
|
||||||
|
})
|
||||||
|
} catch (e) {
|
||||||
|
console.error('acknowledgeByTag failed:', e)
|
||||||
|
fetchUpdates()
|
||||||
|
}
|
||||||
|
}, [fetchUpdates])
|
||||||
|
```
|
||||||
|
|
||||||
|
### Theme Init (main.tsx replacement)
|
||||||
|
```typescript
|
||||||
|
// Source: Tailwind CSS darkMode: ['class'] documentation pattern
|
||||||
|
const stored = localStorage.getItem('theme')
|
||||||
|
if (stored === 'dark' || (!stored && window.matchMedia('(prefers-color-scheme: dark)').matches)) {
|
||||||
|
document.documentElement.classList.add('dark')
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Environment Availability
|
||||||
|
|
||||||
|
Step 2.6: SKIPPED for new tool dependencies — this phase adds no external tools, services, CLIs, or databases beyond what is already confirmed operational from Phase 3. Bun is available (v1.3.10, verified above). The Go compiler is not accessible in this shell environment but CI uses the Gitea Actions runner with the custom Docker image that includes Go 1.26.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Project Constraints (from CLAUDE.md)
|
||||||
|
|
||||||
|
The following directives must be respected by the planner:
|
||||||
|
|
||||||
|
| Constraint | Impact on This Phase |
|
||||||
|
|------------|----------------------|
|
||||||
|
| No CGO — pure Go SQLite driver `modernc.org/sqlite` | No impact; new methods use existing `database/sql` patterns |
|
||||||
|
| Go 1.26, no third-party router | New handlers follow `net/http` stdlib pattern exactly |
|
||||||
|
| `gofmt` enforced in CI | All new Go files must be `gofmt`-clean before commit |
|
||||||
|
| `go vet` runs in CI | No unsafe patterns |
|
||||||
|
| TypeScript `strict: true`, `noUnusedLocals`, `noUnusedParameters` | Filter state, toast state, and new props must have types; no unused imports |
|
||||||
|
| No ESLint/Prettier for frontend | No linting enforcement, but follow project style (2-space indent, single quotes, no semicolons) |
|
||||||
|
| Handler naming: `<Noun>Handler` | New handlers: `AcknowledgeAllHandler`, `AcknowledgeByTagHandler` |
|
||||||
|
| Test function naming: `Test<FunctionName>_<Scenario>` | e.g., `TestAcknowledgeAllHandler_Empty`, `TestAcknowledgeByTagHandler_NotFound` |
|
||||||
|
| External test package `package diunwebhook_test` | New tests use `NewTestServer()` from `export_test.go` |
|
||||||
|
| Error messages lowercase | `"bad request"`, `"internal error"` — matches existing style |
|
||||||
|
| `log.Printf` with handler name prefix | `"AcknowledgeAllHandler: ..."` |
|
||||||
|
| Single `diunwebhook.go` file for handler logic | New handlers go in `diunwebhook.go` alongside existing handlers |
|
||||||
|
| Backward compatible — existing SQLite DBs | No schema changes in this phase (confirmed: no migrations needed) |
|
||||||
|
| GSD workflow enforcement | All work enters via GSD execute-phase |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Open Questions
|
||||||
|
|
||||||
|
1. **Dismiss-All: inline confirm vs modal overlay**
|
||||||
|
- What we know: D-04 says "modal/dialog confirmation for dismiss-all"; the inline two-click pattern is simpler and consistent with existing tag delete UX
|
||||||
|
- What's unclear: Whether "modal" means a literal overlay dialog or just a confirmation step
|
||||||
|
- Recommendation: Use the inline two-click confirm (matches existing pattern, zero new dependencies). The planner can escalate to a proper `<dialog>` element if the user reviews the plan and wants a modal overlay.
|
||||||
|
|
||||||
|
2. **getRegistry helper duplication**
|
||||||
|
- What we know: `getRegistry` function lives in `ServiceCard.tsx` (not exported); sort-by-registry in `App.tsx` needs the same logic
|
||||||
|
- What's unclear: Whether to move `getRegistry` to `lib/serviceIcons.ts` or `lib/utils.ts` or duplicate it
|
||||||
|
- Recommendation: Move to `frontend/src/lib/utils.ts` or `frontend/src/lib/serviceIcons.ts` and re-import in ServiceCard. This is a small refactor but cleaner than duplication. The planner should include this as a sub-task.
|
||||||
|
|
||||||
|
3. **Toast: custom vs sonner**
|
||||||
|
- What we know: No toast library is installed; shadcn/ui recommends sonner; a custom component is ~30 lines
|
||||||
|
- What's unclear: How polished the toast needs to look
|
||||||
|
- Recommendation: Custom component. If the user requests sonner, it is `bun add sonner` plus a `<Toaster />` in App.tsx root.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sources
|
||||||
|
|
||||||
|
### Primary (HIGH confidence)
|
||||||
|
- Direct codebase inspection: `pkg/diunwebhook/store.go`, `sqlite_store.go`, `postgres_store.go`, `diunwebhook.go`, `server.go` (does not exist yet — all handlers are in `diunwebhook.go`)
|
||||||
|
- Direct codebase inspection: `frontend/src/App.tsx`, `useUpdates.ts`, `Header.tsx`, `TagSection.tsx`, `ServiceCard.tsx`, `main.tsx`, `tailwind.config.ts`, `index.css`
|
||||||
|
- Direct codebase inspection: `frontend/package.json` — confirmed no sonner, dialog, or select Radix packages installed
|
||||||
|
|
||||||
|
### Secondary (MEDIUM confidence)
|
||||||
|
- Tailwind CSS `darkMode: ['class']` pattern — well-established, matches existing project configuration
|
||||||
|
- `localStorage` + `prefers-color-scheme` theme init pattern — standard web platform API, no library required
|
||||||
|
- HTML5 `beforeunload` event for last-visit timestamp — standard, widely supported
|
||||||
|
|
||||||
|
### Tertiary (LOW confidence — none)
|
||||||
|
No findings rely solely on unverified web search.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Metadata
|
||||||
|
|
||||||
|
**Confidence breakdown:**
|
||||||
|
- Standard stack: HIGH — direct package.json inspection, no assumptions
|
||||||
|
- Architecture: HIGH — derived from reading every file the phase touches
|
||||||
|
- Pitfalls: HIGH — route ordering and CSS var gaps verified directly in the source; others are logic-level
|
||||||
|
- SQL patterns: HIGH — derived from existing store implementations in the same codebase
|
||||||
|
|
||||||
|
**Research date:** 2026-03-24
|
||||||
|
**Valid until:** 2026-04-24 (stable stack; 30-day validity)
|
||||||
@@ -12,25 +12,38 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
diun "awesomeProject/pkg/diunwebhook"
|
diun "awesomeProject/pkg/diunwebhook"
|
||||||
|
_ "github.com/jackc/pgx/v5/stdlib"
|
||||||
_ "modernc.org/sqlite"
|
_ "modernc.org/sqlite"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
databaseURL := os.Getenv("DATABASE_URL")
|
||||||
|
var store diun.Store
|
||||||
|
if databaseURL != "" {
|
||||||
|
db, err := sql.Open("pgx", databaseURL)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("sql.Open postgres: %v", err)
|
||||||
|
}
|
||||||
|
if err := diun.RunPostgresMigrations(db); err != nil {
|
||||||
|
log.Fatalf("RunPostgresMigrations: %v", err)
|
||||||
|
}
|
||||||
|
store = diun.NewPostgresStore(db)
|
||||||
|
log.Println("Using PostgreSQL database")
|
||||||
|
} else {
|
||||||
dbPath := os.Getenv("DB_PATH")
|
dbPath := os.Getenv("DB_PATH")
|
||||||
if dbPath == "" {
|
if dbPath == "" {
|
||||||
dbPath = "./diun.db"
|
dbPath = "./diun.db"
|
||||||
}
|
}
|
||||||
|
|
||||||
db, err := sql.Open("sqlite", dbPath)
|
db, err := sql.Open("sqlite", dbPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("sql.Open: %v", err)
|
log.Fatalf("sql.Open sqlite: %v", err)
|
||||||
}
|
}
|
||||||
|
if err := diun.RunSQLiteMigrations(db); err != nil {
|
||||||
if err := diun.RunMigrations(db); err != nil {
|
log.Fatalf("RunSQLiteMigrations: %v", err)
|
||||||
log.Fatalf("RunMigrations: %v", err)
|
}
|
||||||
|
store = diun.NewSQLiteStore(db)
|
||||||
|
log.Printf("Using SQLite database at %s", dbPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
store := diun.NewSQLiteStore(db)
|
|
||||||
|
|
||||||
secret := os.Getenv("WEBHOOK_SECRET")
|
secret := os.Getenv("WEBHOOK_SECRET")
|
||||||
if secret == "" {
|
if secret == "" {
|
||||||
|
|||||||
@@ -5,4 +5,32 @@ services:
|
|||||||
- "8080:8080"
|
- "8080:8080"
|
||||||
environment:
|
environment:
|
||||||
- WEBHOOK_SECRET=${WEBHOOK_SECRET:-}
|
- WEBHOOK_SECRET=${WEBHOOK_SECRET:-}
|
||||||
|
- DATABASE_URL=${DATABASE_URL:-}
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
required: false
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:17-alpine
|
||||||
|
profiles:
|
||||||
|
- postgres
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER:-diun}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-diun}
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB:-diundashboard}
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-diun}"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres-data:
|
||||||
|
|||||||
25
compose.yml
25
compose.yml
@@ -1,3 +1,4 @@
|
|||||||
|
# Minimum Docker Compose v2.20 required for depends_on.required
|
||||||
services:
|
services:
|
||||||
app:
|
app:
|
||||||
image: gitea.jeanlucmakiola.de/makiolaj/diundashboard:latest
|
image: gitea.jeanlucmakiola.de/makiolaj/diundashboard:latest
|
||||||
@@ -7,9 +8,33 @@ services:
|
|||||||
- WEBHOOK_SECRET=${WEBHOOK_SECRET:-}
|
- WEBHOOK_SECRET=${WEBHOOK_SECRET:-}
|
||||||
- PORT=${PORT:-8080}
|
- PORT=${PORT:-8080}
|
||||||
- DB_PATH=/data/diun.db
|
- DB_PATH=/data/diun.db
|
||||||
|
- DATABASE_URL=${DATABASE_URL:-}
|
||||||
volumes:
|
volumes:
|
||||||
- diun-data:/data
|
- diun-data:/data
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
required: false
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:17-alpine
|
||||||
|
profiles:
|
||||||
|
- postgres
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${POSTGRES_USER:-diun}
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-diun}
|
||||||
|
POSTGRES_DB: ${POSTGRES_DB:-diundashboard}
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-diun}"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
diun-data:
|
diun-data:
|
||||||
|
postgres-data:
|
||||||
|
|||||||
7
go.mod
7
go.mod
@@ -4,17 +4,24 @@ go 1.26
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/golang-migrate/migrate/v4 v4.19.1
|
github.com/golang-migrate/migrate/v4 v4.19.1
|
||||||
|
github.com/jackc/pgx/v5 v5.9.1
|
||||||
modernc.org/sqlite v1.46.1
|
modernc.org/sqlite v1.46.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/google/uuid v1.6.0 // indirect
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
|
github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa // indirect
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect
|
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 // indirect
|
||||||
|
golang.org/x/sync v0.18.0 // indirect
|
||||||
golang.org/x/sys v0.38.0 // indirect
|
golang.org/x/sys v0.38.0 // indirect
|
||||||
|
golang.org/x/text v0.31.0 // indirect
|
||||||
modernc.org/libc v1.67.6 // indirect
|
modernc.org/libc v1.67.6 // indirect
|
||||||
modernc.org/mathutil v1.7.1 // indirect
|
modernc.org/mathutil v1.7.1 // indirect
|
||||||
modernc.org/memory v1.11.0 // indirect
|
modernc.org/memory v1.11.0 // indirect
|
||||||
|
|||||||
71
go.sum
71
go.sum
@@ -1,7 +1,34 @@
|
|||||||
|
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||||
|
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||||
|
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||||
|
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
||||||
|
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
|
||||||
|
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
|
||||||
|
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dhui/dktest v0.4.6 h1:+DPKyScKSEp3VLtbMDHcUq6V5Lm5zfZZVb0Sk7Ahom4=
|
||||||
|
github.com/dhui/dktest v0.4.6/go.mod h1:JHTSYDtKkvFNFHJKqCzVzqXecyv+tKt8EzceOmQOgbU=
|
||||||
|
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||||
|
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||||
|
github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI=
|
||||||
|
github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
|
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
|
||||||
|
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
|
||||||
|
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||||
|
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
|
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||||
|
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||||
|
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||||
|
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
|
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||||
|
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||||
|
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||||
|
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||||
github.com/golang-migrate/migrate/v4 v4.19.1 h1:OCyb44lFuQfYXYLx1SCxPZQGU7mcaZ7gH9yH4jSFbBA=
|
github.com/golang-migrate/migrate/v4 v4.19.1 h1:OCyb44lFuQfYXYLx1SCxPZQGU7mcaZ7gH9yH4jSFbBA=
|
||||||
github.com/golang-migrate/migrate/v4 v4.19.1/go.mod h1:CTcgfjxhaUtsLipnLoQRWCrjYXycRz/g5+RWDuYgPrE=
|
github.com/golang-migrate/migrate/v4 v4.19.1/go.mod h1:CTcgfjxhaUtsLipnLoQRWCrjYXycRz/g5+RWDuYgPrE=
|
||||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||||
@@ -10,18 +37,54 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
|||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||||
|
github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa h1:s+4MhCQ6YrzisK6hFJUX53drDT4UsSW3DEhKn0ifuHw=
|
||||||
|
github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||||
|
github.com/jackc/pgx/v5 v5.9.1 h1:uwrxJXBnx76nyISkhr33kQLlUqjv7et7b9FjCen/tdc=
|
||||||
|
github.com/jackc/pgx/v5 v5.9.1/go.mod h1:mal1tBGAFfLHvZzaYh77YS/eC6IX9OWbRV1QIIM0Jn4=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||||
|
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||||
|
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
|
||||||
|
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||||
|
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||||
|
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||||
|
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||||
|
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||||
|
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
|
||||||
|
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
|
||||||
|
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
|
||||||
|
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
|
||||||
|
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
|
||||||
|
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
|
||||||
|
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
|
||||||
|
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
|
||||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
|
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
|
||||||
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
|
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
|
||||||
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||||
@@ -31,8 +94,12 @@ golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
|||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||||
|
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||||
|
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||||
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||||
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
||||||
|
|||||||
@@ -169,7 +169,7 @@ func (s *Server) TagsHandler(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
tag, err := s.store.CreateTag(req.Name)
|
tag, err := s.store.CreateTag(req.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if strings.Contains(err.Error(), "UNIQUE") {
|
if strings.Contains(strings.ToLower(err.Error()), "unique") {
|
||||||
http.Error(w, "conflict: tag name already exists", http.StatusConflict)
|
http.Error(w, "conflict: tag name already exists", http.StatusConflict)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ func NewTestServer() (*Server, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if err := RunMigrations(db); err != nil {
|
if err := RunSQLiteMigrations(db); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
store := NewSQLiteStore(db)
|
store := NewSQLiteStore(db)
|
||||||
@@ -22,7 +22,7 @@ func NewTestServerWithSecret(secret string) (*Server, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if err := RunMigrations(db); err != nil {
|
if err := RunSQLiteMigrations(db); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
store := NewSQLiteStore(db)
|
store := NewSQLiteStore(db)
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
|
|
||||||
"github.com/golang-migrate/migrate/v4"
|
"github.com/golang-migrate/migrate/v4"
|
||||||
|
pgxmigrate "github.com/golang-migrate/migrate/v4/database/pgx/v5"
|
||||||
sqlitemigrate "github.com/golang-migrate/migrate/v4/database/sqlite"
|
sqlitemigrate "github.com/golang-migrate/migrate/v4/database/sqlite"
|
||||||
"github.com/golang-migrate/migrate/v4/source/iofs"
|
"github.com/golang-migrate/migrate/v4/source/iofs"
|
||||||
_ "modernc.org/sqlite"
|
_ "modernc.org/sqlite"
|
||||||
@@ -14,9 +15,12 @@ import (
|
|||||||
//go:embed migrations/sqlite
|
//go:embed migrations/sqlite
|
||||||
var sqliteMigrations embed.FS
|
var sqliteMigrations embed.FS
|
||||||
|
|
||||||
// RunMigrations applies all pending schema migrations to the given SQLite database.
|
//go:embed migrations/postgres
|
||||||
|
var postgresMigrations embed.FS
|
||||||
|
|
||||||
|
// RunSQLiteMigrations applies all pending schema migrations to the given SQLite database.
|
||||||
// Returns nil if all migrations applied successfully or if database is already up to date.
|
// Returns nil if all migrations applied successfully or if database is already up to date.
|
||||||
func RunMigrations(db *sql.DB) error {
|
func RunSQLiteMigrations(db *sql.DB) error {
|
||||||
src, err := iofs.New(sqliteMigrations, "migrations/sqlite")
|
src, err := iofs.New(sqliteMigrations, "migrations/sqlite")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -34,3 +38,24 @@ func RunMigrations(db *sql.DB) error {
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RunPostgresMigrations applies all pending schema migrations to the given PostgreSQL database.
|
||||||
|
// Returns nil if all migrations applied successfully or if database is already up to date.
|
||||||
|
func RunPostgresMigrations(db *sql.DB) error {
|
||||||
|
src, err := iofs.New(postgresMigrations, "migrations/postgres")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
driver, err := pgxmigrate.WithInstance(db, &pgxmigrate.Config{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
m, err := migrate.NewWithInstance("iofs", src, "pgx5", driver)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := m.Up(); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
DROP TABLE IF EXISTS tag_assignments;
|
||||||
|
DROP TABLE IF EXISTS tags;
|
||||||
|
DROP TABLE IF EXISTS updates;
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS updates (
|
||||||
|
image TEXT PRIMARY KEY,
|
||||||
|
diun_version TEXT NOT NULL DEFAULT '',
|
||||||
|
hostname TEXT NOT NULL DEFAULT '',
|
||||||
|
status TEXT NOT NULL DEFAULT '',
|
||||||
|
provider TEXT NOT NULL DEFAULT '',
|
||||||
|
hub_link TEXT NOT NULL DEFAULT '',
|
||||||
|
mime_type TEXT NOT NULL DEFAULT '',
|
||||||
|
digest TEXT NOT NULL DEFAULT '',
|
||||||
|
created TEXT NOT NULL DEFAULT '',
|
||||||
|
platform TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_name TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_id TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_state TEXT NOT NULL DEFAULT '',
|
||||||
|
ctn_status TEXT NOT NULL DEFAULT '',
|
||||||
|
received_at TEXT NOT NULL,
|
||||||
|
acknowledged_at TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS tags (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL UNIQUE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS tag_assignments (
|
||||||
|
image TEXT PRIMARY KEY,
|
||||||
|
tag_id INTEGER NOT NULL REFERENCES tags(id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
176
pkg/diunwebhook/postgres_store.go
Normal file
176
pkg/diunwebhook/postgres_store.go
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
package diunwebhook
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PostgresStore implements Store using a PostgreSQL database.
|
||||||
|
type PostgresStore struct {
|
||||||
|
db *sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPostgresStore creates a new PostgresStore backed by the given *sql.DB.
|
||||||
|
// Configures connection pool settings appropriate for PostgreSQL.
|
||||||
|
// PostgreSQL handles concurrent writes natively so no mutex is needed.
|
||||||
|
func NewPostgresStore(db *sql.DB) *PostgresStore {
|
||||||
|
db.SetMaxOpenConns(25)
|
||||||
|
db.SetMaxIdleConns(5)
|
||||||
|
db.SetConnMaxLifetime(5 * time.Minute)
|
||||||
|
return &PostgresStore{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpsertEvent inserts or updates a DIUN event in the updates table.
|
||||||
|
// On conflict (same image), all fields are updated and acknowledged_at is reset to NULL.
|
||||||
|
func (s *PostgresStore) UpsertEvent(event DiunEvent) error {
|
||||||
|
_, err := s.db.Exec(`
|
||||||
|
INSERT INTO updates (
|
||||||
|
image, diun_version, hostname, status, provider,
|
||||||
|
hub_link, mime_type, digest, created, platform,
|
||||||
|
ctn_name, ctn_id, ctn_state, ctn_status,
|
||||||
|
received_at, acknowledged_at
|
||||||
|
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,NULL)
|
||||||
|
ON CONFLICT(image) DO UPDATE SET
|
||||||
|
diun_version = EXCLUDED.diun_version,
|
||||||
|
hostname = EXCLUDED.hostname,
|
||||||
|
status = EXCLUDED.status,
|
||||||
|
provider = EXCLUDED.provider,
|
||||||
|
hub_link = EXCLUDED.hub_link,
|
||||||
|
mime_type = EXCLUDED.mime_type,
|
||||||
|
digest = EXCLUDED.digest,
|
||||||
|
created = EXCLUDED.created,
|
||||||
|
platform = EXCLUDED.platform,
|
||||||
|
ctn_name = EXCLUDED.ctn_name,
|
||||||
|
ctn_id = EXCLUDED.ctn_id,
|
||||||
|
ctn_state = EXCLUDED.ctn_state,
|
||||||
|
ctn_status = EXCLUDED.ctn_status,
|
||||||
|
received_at = EXCLUDED.received_at,
|
||||||
|
acknowledged_at = NULL`,
|
||||||
|
event.Image, event.DiunVersion, event.Hostname, event.Status, event.Provider,
|
||||||
|
event.HubLink, event.MimeType, event.Digest,
|
||||||
|
event.Created.Format(time.RFC3339), event.Platform,
|
||||||
|
event.Metadata.ContainerName, event.Metadata.ContainerID,
|
||||||
|
event.Metadata.State, event.Metadata.Status,
|
||||||
|
time.Now().Format(time.RFC3339),
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUpdates returns all update entries joined with their tag assignments.
|
||||||
|
func (s *PostgresStore) GetUpdates() (map[string]UpdateEntry, error) {
|
||||||
|
rows, err := s.db.Query(`SELECT u.image, u.diun_version, u.hostname, u.status, u.provider,
|
||||||
|
u.hub_link, u.mime_type, u.digest, u.created, u.platform,
|
||||||
|
u.ctn_name, u.ctn_id, u.ctn_state, u.ctn_status, u.received_at, COALESCE(u.acknowledged_at, ''),
|
||||||
|
t.id, t.name
|
||||||
|
FROM updates u
|
||||||
|
LEFT JOIN tag_assignments ta ON u.image = ta.image
|
||||||
|
LEFT JOIN tags t ON ta.tag_id = t.id`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
result := make(map[string]UpdateEntry)
|
||||||
|
for rows.Next() {
|
||||||
|
var e UpdateEntry
|
||||||
|
var createdStr, receivedStr, acknowledgedAt string
|
||||||
|
var tagID sql.NullInt64
|
||||||
|
var tagName sql.NullString
|
||||||
|
err := rows.Scan(&e.Event.Image, &e.Event.DiunVersion, &e.Event.Hostname,
|
||||||
|
&e.Event.Status, &e.Event.Provider, &e.Event.HubLink, &e.Event.MimeType,
|
||||||
|
&e.Event.Digest, &createdStr, &e.Event.Platform,
|
||||||
|
&e.Event.Metadata.ContainerName, &e.Event.Metadata.ContainerID,
|
||||||
|
&e.Event.Metadata.State, &e.Event.Metadata.Status,
|
||||||
|
&receivedStr, &acknowledgedAt, &tagID, &tagName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
e.Event.Created, _ = time.Parse(time.RFC3339, createdStr)
|
||||||
|
e.ReceivedAt, _ = time.Parse(time.RFC3339, receivedStr)
|
||||||
|
e.Acknowledged = acknowledgedAt != ""
|
||||||
|
if tagID.Valid && tagName.Valid {
|
||||||
|
e.Tag = &Tag{ID: int(tagID.Int64), Name: tagName.String}
|
||||||
|
}
|
||||||
|
result[e.Event.Image] = e
|
||||||
|
}
|
||||||
|
return result, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// AcknowledgeUpdate marks the given image as acknowledged.
|
||||||
|
// Returns found=false if no row with that image exists.
|
||||||
|
func (s *PostgresStore) AcknowledgeUpdate(image string) (found bool, err error) {
|
||||||
|
res, err := s.db.Exec(`UPDATE updates SET acknowledged_at = NOW() WHERE image = $1`, image)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return n > 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListTags returns all tags ordered by name.
|
||||||
|
func (s *PostgresStore) ListTags() ([]Tag, error) {
|
||||||
|
rows, err := s.db.Query(`SELECT id, name FROM tags ORDER BY name`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
tags := []Tag{}
|
||||||
|
for rows.Next() {
|
||||||
|
var t Tag
|
||||||
|
if err := rows.Scan(&t.ID, &t.Name); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tags = append(tags, t)
|
||||||
|
}
|
||||||
|
return tags, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateTag inserts a new tag with the given name and returns the created tag.
|
||||||
|
// Uses RETURNING id since pgx does not support LastInsertId.
|
||||||
|
func (s *PostgresStore) CreateTag(name string) (Tag, error) {
|
||||||
|
var id int
|
||||||
|
err := s.db.QueryRow(
|
||||||
|
`INSERT INTO tags (name) VALUES ($1) RETURNING id`, name,
|
||||||
|
).Scan(&id)
|
||||||
|
if err != nil {
|
||||||
|
return Tag{}, err
|
||||||
|
}
|
||||||
|
return Tag{ID: id, Name: name}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteTag deletes the tag with the given id.
|
||||||
|
// Returns found=false if no tag with that id exists.
|
||||||
|
func (s *PostgresStore) DeleteTag(id int) (found bool, err error) {
|
||||||
|
res, err := s.db.Exec(`DELETE FROM tags WHERE id = $1`, id)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
n, _ := res.RowsAffected()
|
||||||
|
return n > 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AssignTag assigns the given image to the given tag.
|
||||||
|
// Uses INSERT ... ON CONFLICT DO UPDATE so re-assigning an image to a different tag replaces the existing assignment.
|
||||||
|
func (s *PostgresStore) AssignTag(image string, tagID int) error {
|
||||||
|
_, err := s.db.Exec(
|
||||||
|
`INSERT INTO tag_assignments (image, tag_id) VALUES ($1, $2)
|
||||||
|
ON CONFLICT (image) DO UPDATE SET tag_id = EXCLUDED.tag_id`,
|
||||||
|
image, tagID,
|
||||||
|
)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnassignTag removes any tag assignment for the given image.
|
||||||
|
func (s *PostgresStore) UnassignTag(image string) error {
|
||||||
|
_, err := s.db.Exec(`DELETE FROM tag_assignments WHERE image = $1`, image)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// TagExists returns true if a tag with the given id exists.
|
||||||
|
func (s *PostgresStore) TagExists(id int) (bool, error) {
|
||||||
|
var count int
|
||||||
|
err := s.db.QueryRow(`SELECT COUNT(*) FROM tags WHERE id = $1`, id).Scan(&count)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return count > 0, nil
|
||||||
|
}
|
||||||
29
pkg/diunwebhook/postgres_test.go
Normal file
29
pkg/diunwebhook/postgres_test.go
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
//go:build postgres
|
||||||
|
|
||||||
|
package diunwebhook
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
_ "github.com/jackc/pgx/v5/stdlib"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewTestPostgresServer constructs a Server backed by a PostgreSQL database.
|
||||||
|
// Requires a running PostgreSQL instance. Set TEST_DATABASE_URL to override
|
||||||
|
// the default connection string.
|
||||||
|
func NewTestPostgresServer() (*Server, error) {
|
||||||
|
databaseURL := os.Getenv("TEST_DATABASE_URL")
|
||||||
|
if databaseURL == "" {
|
||||||
|
databaseURL = "postgres://diun:diun@localhost:5432/diundashboard_test?sslmode=disable"
|
||||||
|
}
|
||||||
|
db, err := sql.Open("pgx", databaseURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := RunPostgresMigrations(db); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
store := NewPostgresStore(db)
|
||||||
|
return NewServer(store, ""), nil
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user