diff --git a/.claude/escalations/.gitkeep b/.claude/escalations/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/.lint-baselines.json b/.lint-baselines.json
new file mode 100644
index 00000000..419b64b5
--- /dev/null
+++ b/.lint-baselines.json
@@ -0,0 +1,12 @@
+{
+ "@bantayog/shared-types": 0,
+ "@bantayog/citizen-pwa": 0,
+ "@bantayog/shared-sms-parser": 0,
+ "@bantayog/responder-app": 0,
+ "@bantayog/shared-ui": 0,
+ "@bantayog/functions": 0,
+ "@bantayog/admin-desktop": 0,
+ "@bantayog/shared-validators": 0,
+ "@bantayog/shared-data": 0,
+ "@bantayog/e2e-tests": 0
+}
diff --git a/apps/admin-desktop/package.json b/apps/admin-desktop/package.json
index fdf2845d..6689d6a8 100644
--- a/apps/admin-desktop/package.json
+++ b/apps/admin-desktop/package.json
@@ -8,7 +8,8 @@
"build": "tsc --noEmit && vite build",
"preview": "vite preview",
"lint": "eslint src",
- "typecheck": "tsc --noEmit"
+ "typecheck": "tsc --noEmit",
+ "test": "vitest run"
},
"dependencies": {
"@bantayog/shared-types": "workspace:*",
@@ -21,6 +22,7 @@
"devDependencies": {
"@testing-library/jest-dom": "^6.4.0",
"@testing-library/react": "^16.0.0",
+ "@testing-library/user-event": "^14.5.2",
"@types/react": "^19.2.14",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react": "^6.0.1",
diff --git a/apps/admin-desktop/src/__tests__/shift-handoff-modal.test.tsx b/apps/admin-desktop/src/__tests__/shift-handoff-modal.test.tsx
new file mode 100644
index 00000000..9d0898ec
--- /dev/null
+++ b/apps/admin-desktop/src/__tests__/shift-handoff-modal.test.tsx
@@ -0,0 +1,80 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest'
+import { render, screen } from '@testing-library/react'
+import userEvent from '@testing-library/user-event'
+
+const mockInitiateHandoff = vi.hoisted(() => vi.fn())
+
+vi.mock('../app/firebase', () => ({ db: {} }))
+
+vi.mock('@bantayog/shared-ui', () => ({
+ useAuth: () => ({
+ claims: { municipalityId: 'daet', role: 'municipal_admin' },
+ signOut: vi.fn(),
+ }),
+}))
+
+vi.mock('../services/callables', () => ({
+ callables: {
+ verifyReport: vi.fn(),
+ rejectReport: vi.fn(),
+ initiateShiftHandoff: mockInitiateHandoff,
+ acceptShiftHandoff: vi.fn(),
+ },
+}))
+
+vi.mock('../hooks/useMuniReports', () => ({
+ useMuniReports: () => ({
+ reports: [],
+ hasMore: false,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ }),
+}))
+
+vi.mock('../hooks/usePendingHandoffs', () => ({
+ usePendingHandoffs: () => [],
+}))
+
+vi.mock('../pages/ReportDetailPanel', () => ({ ReportDetailPanel: () =>
detail
}))
+vi.mock('../pages/DispatchModal', () => ({ DispatchModal: () => dispatch
}))
+vi.mock('../pages/CloseReportModal', () => ({ CloseReportModal: () => close
}))
+
+import { TriageQueuePage } from '../pages/TriageQueuePage'
+
+describe('ShiftHandoffModal', () => {
+ beforeEach(() => {
+ mockInitiateHandoff.mockResolvedValue({ success: true, handoffId: 'h-new-1' })
+ })
+
+ it('renders Start Handoff button in header', () => {
+ render()
+ expect(screen.getByRole('button', { name: /start handoff/i })).toBeInTheDocument()
+ })
+
+ it('opens ShiftHandoffModal on Start Handoff click', async () => {
+ const user = userEvent.setup()
+ render()
+ await user.click(screen.getByRole('button', { name: /start handoff/i }))
+ expect(screen.getByRole('dialog', { name: /shift handoff/i })).toBeInTheDocument()
+ })
+
+ it('calls initiateShiftHandoff on Initiate click', async () => {
+ const user = userEvent.setup()
+ render()
+ await user.click(screen.getByRole('button', { name: /start handoff/i }))
+ const notesField = screen.getByLabelText(/notes/i)
+ await user.type(notesField, 'End of day shift')
+ await user.click(screen.getByRole('button', { name: /initiate/i }))
+ expect(mockInitiateHandoff).toHaveBeenCalledWith(
+ expect.objectContaining({ notes: 'End of day shift' }),
+ )
+ })
+})
+
+describe('Incoming handoff banner', () => {
+ it('shows no banner when no pending handoffs', () => {
+ render()
+ expect(screen.queryByRole('button', { name: /accept handoff/i })).not.toBeInTheDocument()
+ })
+})
diff --git a/apps/admin-desktop/src/__tests__/triage-queue.test.tsx b/apps/admin-desktop/src/__tests__/triage-queue.test.tsx
new file mode 100644
index 00000000..127ffc15
--- /dev/null
+++ b/apps/admin-desktop/src/__tests__/triage-queue.test.tsx
@@ -0,0 +1,188 @@
+import { describe, it, expect, vi, beforeEach } from 'vitest'
+import { render, screen, fireEvent } from '@testing-library/react'
+import userEvent from '@testing-library/user-event'
+
+const mockUseMuniReports = vi.fn()
+
+vi.mock('../hooks/useMuniReports', () => ({
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-return
+ useMuniReports: (...args: unknown[]) => mockUseMuniReports(...args),
+}))
+
+vi.mock('../app/firebase', () => ({
+ db: {},
+}))
+
+vi.mock('@bantayog/shared-ui', () => ({
+ useAuth: () => ({
+ claims: { municipalityId: 'daet', role: 'municipal_admin' },
+ signOut: vi.fn(),
+ }),
+}))
+
+vi.mock('../services/callables', () => ({
+ callables: {
+ verifyReport: vi.fn(),
+ rejectReport: vi.fn(),
+ },
+}))
+
+vi.mock('../hooks/usePendingHandoffs', () => ({
+ usePendingHandoffs: () => [],
+}))
+
+vi.mock('../pages/ReportDetailPanel', () => ({
+ ReportDetailPanel: () => detail
,
+}))
+vi.mock('../pages/DispatchModal', () => ({
+ DispatchModal: () => dispatch
,
+}))
+vi.mock('../pages/CloseReportModal', () => ({
+ CloseReportModal: () => close
,
+}))
+
+import { TriageQueuePage } from '../pages/TriageQueuePage'
+
+describe('TriageQueuePage', () => {
+ beforeEach(() => {
+ mockUseMuniReports.mockReturnValue({
+ reports: [],
+ hasMore: false,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ })
+ })
+
+ it('renders Load More button when hasMore is true', () => {
+ mockUseMuniReports.mockReturnValue({
+ reports: [
+ { reportId: 'r1', status: 'new', severity: 'high', createdAt: null, municipalityLabel: '' },
+ ],
+ hasMore: true,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ })
+ render()
+ expect(screen.getByRole('button', { name: /load more/i })).toBeInTheDocument()
+ })
+
+ it('does not render Load More button when hasMore is false', () => {
+ render()
+ expect(screen.queryByRole('button', { name: /load more/i })).not.toBeInTheDocument()
+ })
+
+ it('calls loadMore when Load More is clicked', () => {
+ const loadMore = vi.fn()
+ mockUseMuniReports.mockReturnValue({
+ reports: [
+ { reportId: 'r1', status: 'new', severity: 'high', createdAt: null, municipalityLabel: '' },
+ ],
+ hasMore: true,
+ loadMore,
+ loading: false,
+ error: null,
+ })
+ render()
+ fireEvent.click(screen.getByRole('button', { name: /load more/i }))
+ expect(loadMore).toHaveBeenCalledTimes(1)
+ })
+
+ it('shows Showing X count', () => {
+ mockUseMuniReports.mockReturnValue({
+ reports: [
+ { reportId: 'r1', status: 'new', severity: 'high', createdAt: null, municipalityLabel: '' },
+ {
+ reportId: 'r2',
+ status: 'new',
+ severity: 'medium',
+ createdAt: null,
+ municipalityLabel: '',
+ },
+ ],
+ hasMore: true,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ })
+ render()
+ expect(screen.getByText(/showing 2/i)).toBeInTheDocument()
+ })
+
+ it('renders severity from severity field, not severityDerived', () => {
+ mockUseMuniReports.mockReturnValue({
+ reports: [
+ { reportId: 'r1', status: 'new', severity: 'high', createdAt: null, municipalityLabel: '' },
+ ],
+ hasMore: false,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ })
+ render()
+ expect(screen.getByText(/high/i)).toBeInTheDocument()
+ })
+
+ it('pressing j selects the next report in the list', async () => {
+ const user = userEvent.setup()
+ mockUseMuniReports.mockReturnValue({
+ reports: [
+ { reportId: 'r1', status: 'new', severity: 'high', createdAt: null, municipalityLabel: '' },
+ {
+ reportId: 'r2',
+ status: 'new',
+ severity: 'medium',
+ createdAt: null,
+ municipalityLabel: '',
+ },
+ ],
+ hasMore: false,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ })
+ render()
+ await user.keyboard('j')
+ expect(screen.getByText('detail')).toBeInTheDocument()
+ })
+
+ it('pressing k moves selection backward', async () => {
+ const user = userEvent.setup()
+ mockUseMuniReports.mockReturnValue({
+ reports: [
+ { reportId: 'r1', status: 'new', severity: 'high', createdAt: null, municipalityLabel: '' },
+ {
+ reportId: 'r2',
+ status: 'new',
+ severity: 'medium',
+ createdAt: null,
+ municipalityLabel: '',
+ },
+ ],
+ hasMore: false,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ })
+ render()
+ await user.keyboard('jj')
+ await user.keyboard('k')
+ expect(screen.getByText('detail')).toBeInTheDocument()
+ })
+
+ it('keyboard shortcuts do not fire when a modal is open', async () => {
+ const user = userEvent.setup()
+ mockUseMuniReports.mockReturnValue({
+ reports: [],
+ hasMore: false,
+ loadMore: vi.fn(),
+ loading: false,
+ error: null,
+ })
+ render()
+ await user.keyboard('j')
+ await user.keyboard('k')
+ expect(screen.queryByText('detail')).not.toBeInTheDocument()
+ })
+})
diff --git a/apps/admin-desktop/src/hooks/useMuniReports.ts b/apps/admin-desktop/src/hooks/useMuniReports.ts
index 2f95d6ed..ab455a43 100644
--- a/apps/admin-desktop/src/hooks/useMuniReports.ts
+++ b/apps/admin-desktop/src/hooks/useMuniReports.ts
@@ -1,25 +1,44 @@
import { useEffect, useState } from 'react'
-import { collection, onSnapshot, query, where, orderBy, limit, Timestamp } from 'firebase/firestore'
+import {
+ collection,
+ onSnapshot,
+ query,
+ where,
+ orderBy,
+ limit,
+ type Timestamp,
+} from 'firebase/firestore'
import { db } from '../app/firebase'
+import type { ReportStatus, Severity } from '@bantayog/shared-types'
export interface MuniReportRow {
reportId: string
- status: string
- severityDerived: string
+ status: ReportStatus
+ severity: Severity
+ reportType?: string
+ duplicateClusterId?: string
+ barangayId?: string
createdAt: Timestamp
municipalityLabel: string
}
+const ACTIVE_STATUSES: ReportStatus[] = ['new', 'awaiting_verify', 'verified', 'assigned']
+
export function useMuniReports(municipalityId: string | undefined) {
- const [rows, setRows] = useState([])
+ const [limitCount, setLimitCount] = useState(100)
+ const [reports, setReports] = useState([])
+ const [hasMore, setHasMore] = useState(false)
const [loading, setLoading] = useState(true)
const [error, setError] = useState(null)
useEffect(() => {
if (!municipalityId) {
queueMicrotask(() => {
- setRows([])
+ setReports([])
setLoading(false)
+ setLimitCount(100)
+ setHasMore(false)
+ setError(null)
})
return
}
@@ -27,27 +46,32 @@ export function useMuniReports(municipalityId: string | undefined) {
setLoading(true)
})
const q = query(
- collection(db, 'reports'),
+ collection(db, 'report_ops'),
where('municipalityId', '==', municipalityId),
- where('status', 'in', ['new', 'awaiting_verify', 'verified', 'assigned']),
+ where('status', 'in', ACTIVE_STATUSES),
orderBy('createdAt', 'desc'),
- limit(100),
+ limit(limitCount + 1),
)
const unsub = onSnapshot(
q,
(snap) => {
- setRows(
- snap.docs.map((d) => {
- const data = d.data()
- return {
- reportId: d.id,
- status: String(data.status),
- severityDerived: String(data.severityDerived ?? 'medium'),
- createdAt: data.createdAt as Timestamp,
- municipalityLabel: String(data.municipalityLabel ?? ''),
- }
- }),
- )
+ const all = snap.docs.map((d) => {
+ const data = d.data()
+ const row: MuniReportRow = {
+ reportId: d.id,
+ status: String(data.status) as ReportStatus,
+ severity: String(data.severity ?? 'medium') as Severity,
+ createdAt: data.createdAt as Timestamp,
+ municipalityLabel: String(data.municipalityLabel ?? ''),
+ }
+ if (data.reportType !== undefined) row.reportType = String(data.reportType)
+ if (data.duplicateClusterId !== undefined)
+ row.duplicateClusterId = String(data.duplicateClusterId)
+ if (data.barangayId !== undefined) row.barangayId = String(data.barangayId)
+ return row
+ })
+ setHasMore(all.length > limitCount)
+ setReports(all.slice(0, limitCount))
setLoading(false)
},
(err) => {
@@ -56,7 +80,15 @@ export function useMuniReports(municipalityId: string | undefined) {
},
)
return unsub
- }, [municipalityId])
+ }, [municipalityId, limitCount])
- return { rows, loading, error }
+ return {
+ reports,
+ hasMore,
+ loadMore: () => {
+ setLimitCount((n) => n + 100)
+ },
+ loading,
+ error,
+ }
}
diff --git a/apps/admin-desktop/src/hooks/usePendingHandoffs.ts b/apps/admin-desktop/src/hooks/usePendingHandoffs.ts
new file mode 100644
index 00000000..17b6ac19
--- /dev/null
+++ b/apps/admin-desktop/src/hooks/usePendingHandoffs.ts
@@ -0,0 +1,65 @@
+import { useEffect, useState } from 'react'
+import { Timestamp, collection, onSnapshot, query, where } from 'firebase/firestore'
+import { db } from '../app/firebase'
+
+export interface PendingHandoff {
+ id: string
+ fromUid: string
+ createdAt: Timestamp
+ notes: string
+ activeIncidentIds: string[]
+}
+
+export function usePendingHandoffs(municipalityId: string | undefined) {
+ const [handoffs, setHandoffs] = useState([])
+ const [error, setError] = useState(null)
+
+ useEffect(() => {
+ if (!municipalityId) {
+ queueMicrotask(() => {
+ setHandoffs([])
+ setError(null)
+ })
+ return
+ }
+
+ // Clear before subscribing
+ queueMicrotask(() => {
+ setHandoffs([])
+ setError(null)
+ })
+
+ const q = query(
+ collection(db, 'shift_handoffs'),
+ where('municipalityId', '==', municipalityId),
+ where('status', '==', 'pending'),
+ )
+ return onSnapshot(
+ q,
+ (snap) => {
+ setHandoffs(
+ snap.docs.map((d) => {
+ const raw = d.data()
+ const activeIncidentIds = Array.isArray(raw.activeIncidentIds)
+ ? raw.activeIncidentIds.filter((id): id is string => typeof id === 'string')
+ : []
+ return {
+ id: d.id,
+ fromUid: typeof raw.fromUid === 'string' ? raw.fromUid : '',
+ createdAt: raw.createdAt instanceof Timestamp ? raw.createdAt : Timestamp.now(),
+ notes: typeof raw.notes === 'string' ? raw.notes : '',
+ activeIncidentIds,
+ }
+ }),
+ )
+ setError(null)
+ },
+ (err) => {
+ setHandoffs([]) // Clear on error
+ setError(err.message)
+ },
+ )
+ }, [municipalityId])
+
+ return { handoffs, error }
+}
diff --git a/apps/admin-desktop/src/pages/TriageQueuePage.tsx b/apps/admin-desktop/src/pages/TriageQueuePage.tsx
index 80f60a8c..8d64cf34 100644
--- a/apps/admin-desktop/src/pages/TriageQueuePage.tsx
+++ b/apps/admin-desktop/src/pages/TriageQueuePage.tsx
@@ -1,20 +1,37 @@
-import { useState } from 'react'
+import { useState, useEffect, useRef } from 'react'
import { useAuth } from '@bantayog/shared-ui'
-import { useMuniReports } from '../hooks/useMuniReports'
+import { useMuniReports, type MuniReportRow } from '../hooks/useMuniReports'
import { ReportDetailPanel } from './ReportDetailPanel'
import { DispatchModal } from './DispatchModal'
import { CloseReportModal } from './CloseReportModal'
import { callables } from '../services/callables'
+import { usePendingHandoffs } from '../hooks/usePendingHandoffs'
export function TriageQueuePage() {
const { claims, signOut } = useAuth()
const municipalityId =
typeof claims?.municipalityId === 'string' ? claims.municipalityId : undefined
- const { rows, loading, error } = useMuniReports(municipalityId)
+ const { reports, hasMore, loadMore, loading, error } = useMuniReports(municipalityId)
const [selected, setSelected] = useState(null)
const [dispatchForReportId, setDispatchForReportId] = useState(null)
const [closeForReportId, setCloseForReportId] = useState(null)
const [banner, setBanner] = useState(null)
+ const [handoffModalOpen, setHandoffModalOpen] = useState(false)
+ const [handoffNotes, setHandoffNotes] = useState('')
+ const [handoffLoading, setHandoffLoading] = useState(false)
+ const [rejectReason, setRejectReason] = useState('')
+ const [rejectingReportId, setRejectingReportId] = useState(null)
+ const [acceptingHandoffId, setAcceptingHandoffId] = useState(null)
+ const { handoffs: pendingHandoffs, error: handoffsError } = usePendingHandoffs(municipalityId)
+ const dialogRef = useRef(null)
+
+ useEffect(() => {
+ if (handoffModalOpen) {
+ dialogRef.current?.showModal()
+ } else {
+ dialogRef.current?.close()
+ }
+ }, [handoffModalOpen])
const handleVerify = (reportId: string) => {
void (async () => {
@@ -27,35 +44,124 @@ export function TriageQueuePage() {
})()
}
+ const VALID_REJECT_REASONS = [
+ 'obviously_false',
+ 'duplicate',
+ 'test_submission',
+ 'insufficient_detail',
+ ] as const
+
const handleReject = (reportId: string) => {
- const reason = prompt(
- 'Reject reason (obviously_false, duplicate, test_submission, insufficient_detail)?',
- )
- if (!reason) return
- void (async () => {
- try {
- await callables.rejectReport({
- reportId,
- reason: reason as
- | 'obviously_false'
- | 'duplicate'
- | 'test_submission'
- | 'insufficient_detail',
- idempotencyKey: crypto.randomUUID(),
- })
- } catch (err: unknown) {
- setBanner(err instanceof Error ? err.message : 'Reject failed')
- }
- })()
+ setRejectingReportId(reportId)
+ setRejectReason('')
+ }
+
+ const confirmReject = async () => {
+ if (!rejectingReportId) return
+ if (!VALID_REJECT_REASONS.includes(rejectReason as (typeof VALID_REJECT_REASONS)[number])) {
+ setBanner('Invalid reject reason')
+ return
+ }
+ try {
+ await callables.rejectReport({
+ reportId: rejectingReportId,
+ reason: rejectReason as (typeof VALID_REJECT_REASONS)[number],
+ idempotencyKey: crypto.randomUUID(),
+ })
+ setRejectingReportId(null)
+ setRejectReason('')
+ } catch (err: unknown) {
+ setBanner(err instanceof Error ? err.message : 'Reject failed')
+ }
}
+ const indexRef = useRef(-1)
+ const modalOpen = !!dispatchForReportId || !!closeForReportId || handoffModalOpen
+
+ useEffect(() => {
+ const onKey = (e: KeyboardEvent) => {
+ if (e.key === 'Escape') {
+ setDispatchForReportId(null)
+ setCloseForReportId(null)
+ setHandoffModalOpen(false)
+ return
+ }
+ if (modalOpen) return
+ if (e.key === 'j') {
+ const next = Math.min(indexRef.current + 1, reports.length - 1)
+ if (next >= 0) {
+ indexRef.current = next
+ setSelected(reports[next]?.reportId ?? null)
+ }
+ } else if (e.key === 'k') {
+ const prev = Math.max(indexRef.current - 1, 0)
+ if (prev >= 0 && reports.length > 0) {
+ indexRef.current = prev
+ setSelected(reports[prev]?.reportId ?? null)
+ }
+ }
+ }
+ window.addEventListener('keydown', onKey)
+ return () => {
+ window.removeEventListener('keydown', onKey)
+ }
+ }, [modalOpen, reports])
+
return (
Triage · {municipalityId ?? 'N/A'}
-
+
+
{banner && {banner}
}
+ {handoffsError && Handoffs error: {handoffsError}
}
+ {pendingHandoffs.length > 0 && (
+
+ {pendingHandoffs.length} pending handoff(s) awaiting acceptance.
+ {pendingHandoffs.map((h) => (
+
+ ))}
+
+ )}
Queue
@@ -63,25 +169,67 @@ export function TriageQueuePage() {
Loading…
) : error ? (
Error: {error}
- ) : rows.length === 0 ? (
+ ) : reports.length === 0 ? (
No active reports.
) : (
-
- {rows.map((r) => (
- -
-
-
- ))}
-
+ <>
+
+ Showing {reports.length}
+ {hasMore ? '+' : ''} reports
+
+
+ {reports.map((r: MuniReportRow, i: number) => (
+ -
+
+
+ ))}
+
+ {hasMore &&
}
+ >
)}
- {selected && (
+ {rejectingReportId ? (
+
+
Reject Report
+
+
+
+
+
+ ) : selected ? (
- )}
+ ) : null}
{dispatchForReportId && (
)}
+ {handoffModalOpen && (
+
+ )}
)
}
diff --git a/apps/admin-desktop/src/services/callables.ts b/apps/admin-desktop/src/services/callables.ts
index 00096e5c..64c63308 100644
--- a/apps/admin-desktop/src/services/callables.ts
+++ b/apps/admin-desktop/src/services/callables.ts
@@ -61,4 +61,14 @@ export const callables = {
functions,
'declineAgencyAssistance',
)(payload).then((r) => r.data),
+ initiateShiftHandoff: (payload: { notes: string; idempotencyKey: IdempotencyKey }) =>
+ httpsCallable(
+ functions,
+ 'initiateShiftHandoff',
+ )(payload).then((r) => r.data),
+ acceptShiftHandoff: (payload: { handoffId: string; idempotencyKey: IdempotencyKey }) =>
+ httpsCallable(
+ functions,
+ 'acceptShiftHandoff',
+ )(payload).then((r) => r.data),
}
diff --git a/apps/admin-desktop/vitest.config.ts b/apps/admin-desktop/vitest.config.ts
index 5eaa352f..268e592b 100644
--- a/apps/admin-desktop/vitest.config.ts
+++ b/apps/admin-desktop/vitest.config.ts
@@ -1,6 +1,8 @@
import { defineConfig } from 'vitest/config'
+import react from '@vitejs/plugin-react'
export default defineConfig({
+ plugins: [react()],
test: {
globals: true,
environment: 'happy-dom',
diff --git a/docs/agent-tasks/.gitkeep b/docs/agent-tasks/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/progress.md b/docs/progress.md
index 5c8d1a2c..a034683c 100644
--- a/docs/progress.md
+++ b/docs/progress.md
@@ -2,6 +2,23 @@
## Current
+### Task 8: A.3 — ShiftHandoffModal + incoming handoff banner UI (2026-04-25)
+
+- Status: DONE
+- Branch: `phase5-cluster-a-task2`
+- Scope:
+ - `callables.ts`: added `initiateShiftHandoff` and `acceptShiftHandoff` callable wrappers
+ - `usePendingHandoffs.ts`: new hook querying `shift_handoffs` collection for pending handoffs
+ - `TriageQueuePage.tsx`: added "Start Handoff" button, ShiftHandoffModal dialog, incoming handoff banner with accept buttons
+ - `shift-handoff-modal.test.tsx`: 4 tests (button renders, modal opens, initiate callable, no banner when empty)
+ - `triage-queue.test.tsx`: added `usePendingHandoffs` mock to fix existing tests
+- Verification:
+ - `pnpm --filter @bantayog/admin-desktop exec vitest run src/__tests__/shift-handoff-modal.test.tsx` — PASS (4/4)
+ - `pnpm --filter @bantayog/admin-desktop exec vitest run src/__tests__/triage-queue.test.tsx` — PASS (8/8)
+ - `pnpm --filter @bantayog/admin-desktop lint` — PASS
+ - `pnpm --filter @bantayog/admin-desktop typecheck` — PASS
+- Note: `functions/src/index.ts` was included in the commit (pre-staged from prior task — exports `initiateShiftHandoff` and `acceptShiftHandoff` callables)
+
### PR #63 CodeRabbit follow-up fixes (2026-04-24)
- Status: DONE locally - resolved the remaining review comments on schema validation, inbox materialization, and Firestore rules
diff --git a/docs/superpowers/specs/2026-04-24-agent-team-design.md b/docs/superpowers/specs/2026-04-24-agent-team-design.md
new file mode 100644
index 00000000..52af598e
--- /dev/null
+++ b/docs/superpowers/specs/2026-04-24-agent-team-design.md
@@ -0,0 +1,571 @@
+# Agent Team Design — Claude Code Orchestrator + OpenCode Kimi Workers
+
+**Date:** 2026-04-24
+**Version:** 2.0
+**Status:** Approved
+**Scope:** Multi-agent development workflow for Bantayog Alert (Phases 6–12)
+
+---
+
+## 0. Prerequisites
+
+The following must be true before the workflow runs:
+
+| Prerequisite | Check |
+| ------------------------------------------------------------- | ---------------------------------------------------------------- |
+| `opencode` CLI installed and authenticated | `opencode providers list` shows Kimi credentials |
+| `gh` CLI installed and authenticated | `gh auth status` succeeds |
+| `git worktree` available | `git --version` ≥ 2.5 |
+| `.gitignore` covers `.env*`, `*.key`, `service-account*.json` | Verify before first phase |
+| `turbo.json` `--affected` graph is accurate | Run `npx turbo run lint --dry-run` and confirm affected packages |
+| All tests on `main` are currently deterministic | Run full suite twice; both runs must produce the same result |
+
+If any prerequisite fails, stop and fix it before spawning agents.
+
+---
+
+## 1. Overview
+
+Claude Code (Claude Sonnet 4.6) acts as orchestrator. OpenCode Exxeed agents running Kimi models act as implementation workers. The PRD and architecture spec are the authoritative source of truth for all task decomposition.
+
+**What this system is:**
+
+- A structured way to parallelize implementation work across PRD phases
+- A quality-gated pipeline with machine-verifiable checks at every stage
+- A human-in-the-loop escalation path for failures that exceed agent capability
+
+**What this system is not:**
+
+- A replacement for human judgment on architecture decisions
+- A fully autonomous deployment pipeline (no agent may deploy to any environment)
+- A way to skip the two-stage review gate
+
+---
+
+## 2. Roles
+
+### Claude Code (Orchestrator)
+
+Responsible for: reading the PRD and `docs/progress.md`, decomposing phases into tasks, writing task artifacts, creating worktrees, managing the spawn semaphore, running all gate scripts, merging branches in dependency order, writing telemetry (including its own actions), and escalating on terminal failure.
+
+Claude Code writes a telemetry entry for every action it takes, not just agent outcomes. See Section 12.
+
+### OpenCode Exxeed Workers
+
+Invoked via `opencode run`. Each worker:
+
+- Receives the full task brief markdown as the run message
+- Follows the Exxeed 4-phase workflow: Spec Ingestion → Implementation Plan → Implementation → Verification
+- Writes a prose handoff to `.claude/plans/exxeed-[slug]-report.md`
+- Writes a machine-readable result to `.claude/plans/exxeed-[slug]-result.json`
+- Exits — does not merge, deploy, commit to the staging branch, or open PRs
+
+### Git Worktrees
+
+One worktree per task, at `../bantayog-wt-[slug]`, on branch `agent/[slug]`. Workers cannot see or affect sibling worktrees.
+
+---
+
+## 3. Models
+
+All tasks use `kimi-for-coding/k2p6`. Model is specified in the companion JSON and passed to `opencode run --model`.
+
+---
+
+## 4. Task Artifacts (Two Files Per Task)
+
+Each task produces two files with the same basename:
+
+### 4a. Human-Readable Brief — `docs/agent-tasks/YYYY-MM-DD-[slug].md`
+
+```markdown
+# Agent Task: [slug]
+
+## Objective
+
+[One sentence — what this task produces]
+
+## Spec references
+
+- docs/superpowers/specs/[relevant-design.md]
+- prd/bantayog-alert-prd-v1.0.md §[section]
+
+## Requirements
+
+R01: [functional requirement]
+R02: [functional requirement]
+R03: [constraint — e.g., "do not touch firestore.rules directly, use scripts/build-rules.ts"]
+
+## Files NOT to touch
+
+- [explicitly list adjacent files that are out of scope]
+- [Stage 1 enforces this list against the companion JSON's allowed_files]
+```
+
+The brief is for the agent to read. All machine-parsed fields live in the companion JSON.
+
+### 4b. Companion JSON — `docs/agent-tasks/YYYY-MM-DD-[slug].json`
+
+```json
+{
+ "slug": "p6-t3-functions-telemetry",
+ "phase": 6,
+ "model": "kimi-for-coding/k2p6",
+ "pnpm_filter": "@bantayog/functions",
+ "timeout_minutes": 30,
+ "modifies_lockfile": true,
+ "base_commit": "",
+ "allowed_files": {
+ "create": ["functions/src/callables/telemetry.ts"],
+ "modify": ["functions/src/index.ts"],
+ "delete": []
+ },
+ "verification_command": "pnpm --filter @bantayog/functions test && pnpm --filter @bantayog/functions typecheck",
+ "blocks": ["p6-t5"],
+ "blocked_by": ["p6-t1"]
+}
+```
+
+`base_commit` is empty when written by Claude Code and filled in at worktree creation time. `blocked_by` is the explicit inverse of `blocks` — both fields are written together so there is no ambiguity. `dag.json` is generated from the companion JSONs, not maintained separately.
+
+**Single source of truth rule:** If `modifies_lockfile` in a companion JSON ever differs from the derived value in `dag.json`, Stage 1 fails immediately.
+
+---
+
+## 5. Machine-Readable Result Format
+
+Workers write `.claude/plans/exxeed-[slug]-result.json`:
+
+```json
+{
+ "task": "p6-t3-functions-telemetry",
+ "verification_exit_code": 0,
+ "verification_command": "pnpm --filter @bantayog/functions test && pnpm --filter @bantayog/functions typecheck",
+ "files_changed": ["functions/src/callables/telemetry.ts"],
+ "files_deleted": [],
+ "requirements_satisfied": ["R01", "R02", "R03"],
+ "open_items": [],
+ "baseline": "47 passing, 0 failing",
+ "final": "49 passing, 0 failing",
+ "discovered_required_files": []
+}
+```
+
+`discovered_required_files`: populated if the agent found a file outside `allowed_files` that is genuinely required (not a scope violation). Stage 1 fails-open on a non-empty list — Claude Code reviews and may update `allowed_files` in the companion JSON and respawn, rather than treating it as a hard failure.
+
+---
+
+## 6. Dependency Graph
+
+Claude Code generates `docs/agent-tasks/dag.json` from companion JSONs at phase start:
+
+```json
+{
+ "p6-t1": { "blocks": ["p6-t2", "p6-t3", "p6-t4"], "blocked_by": [], "modifies_lockfile": false },
+ "p6-t2": { "blocks": ["p6-t6"], "blocked_by": ["p6-t1"], "modifies_lockfile": false },
+ "p6-t3": { "blocks": ["p6-t5"], "blocked_by": ["p6-t1"], "modifies_lockfile": true },
+ "p6-t4": { "blocks": ["p6-t5"], "blocked_by": ["p6-t1"], "modifies_lockfile": true },
+ "p6-t5": { "blocks": ["p6-t6"], "blocked_by": ["p6-t3", "p6-t4"], "modifies_lockfile": false },
+ "p6-t6": { "blocks": [], "blocked_by": ["p6-t2", "p6-t5"], "modifies_lockfile": false }
+}
+```
+
+**Spawn rules:**
+
+- A task spawns when all tasks in its `blocked_by` set have merged to the phase staging branch.
+- Two tasks with `modifies_lockfile: true` may never run in parallel. The second waits for the first to merge.
+- Lockfile reconciliation (`pnpm install`) runs **immediately** after each `modifies_lockfile` task merges to staging — not at phase end. This ensures downstream tasks start from a valid lockfile state.
+
+**Layer ordering (general guidance — task-level edges override):**
+
+```
+L0 — shared-validators schemas and types
+L1 — Firestore rules, functions/callables
+L2 — apps (admin-desktop, citizen-pwa, responder-app)
+L3 — E2E tests, acceptance harness
+```
+
+---
+
+## 7. Invocation
+
+### Pre-spawn checks
+
+Before spawning any agent, Claude Code:
+
+1. Verifies `opencode` is reachable: `opencode --version`
+2. Confirms the worktree path is inside the project root — not above `$HOME` or in any path containing `.ssh`, `.gnupg`, `.config`, or system directories
+3. Confirms no existing worktree at `../bantayog-wt-[slug]` matching the active name pattern. Worktrees matching `../bantayog-wt-[slug]-TERMINAL-*` are forensic archives from prior terminal failures and are not a blocking condition — they are skipped.
+4. Records the base commit: `BASE_SHA=$(git rev-parse main)` and writes it to `base_commit` in the companion JSON
+
+### Spawn command
+
+```bash
+BASE_SHA=$(git rev-parse main)
+# write BASE_SHA to companion JSON base_commit field
+git worktree add ../bantayog-wt-[slug] -b agent/[slug]
+
+opencode run "$(cat docs/agent-tasks/YYYY-MM-DD-[slug].md)" \
+ --agent exxeed \
+ --model kimi-for-coding/k2p6 \
+ --dir ../bantayog-wt-[slug] \
+ --dangerously-skip-permissions
+```
+
+`--dangerously-skip-permissions` bypasses OpenCode's interactive permission prompts. It is acceptable here because: (1) each agent runs in an isolated git worktree outside `main`'s branch, (2) the pre-spawn check confirms the worktree is within the project tree, and (3) Claude Code reviews the full diff before any merge. The flag must not be used if the pre-spawn check fails.
+
+Workers run via `Bash run_in_background: true`. Claude Code does not block waiting for them.
+
+### Concurrency limit
+
+`MAX_PARALLEL_AGENTS = 3` (constant). Claude Code maintains a semaphore queue. When a slot opens (agent exits or times out), the next ready task spawns. This prevents disk exhaustion (8 worktrees × node_modules) and API rate-limit saturation.
+
+### Timeout enforcement
+
+Each task brief specifies `timeout_minutes`. If an agent is still running after that duration, Claude Code kills the background process and treats the run as a Stage 1 failure (missing or incomplete result.json). The telemetry entry records `"exit_reason": "timeout"`.
+
+---
+
+## 8. Merge Strategy — Phase Staging Branch
+
+All agent branches merge to a phase staging branch, never directly to `main`:
+
+```
+main
+ └── phase/6-responder-telemetry
+ ├── agent/p6-t2 (merged)
+ ├── agent/p6-t3 (merged)
+ └── agent/p6-t5 (pending)
+```
+
+### Merge conflict strategy
+
+- **Lockfiles (`pnpm-lock.yaml`, `package-lock.json`):** Use `git merge -X theirs`. The lockfile-reconcile step that follows is the authoritative version.
+- **Code conflicts:** Abort the merge (`git merge --abort`), log it as a Stage 2 Run B failure, and escalate to human resolution before proceeding.
+- **Preventing conflicts:** Agents whose tasks are sequentially ordered (due to `modifies_lockfile: true`) cannot cause code conflicts with each other by design. The primary conflict risk is in shared import files — caught by Stage 2 Run B.
+
+When **all** phase tasks pass both gates, the staging branch merges to `main` as a single squash PR. If any task reaches terminal failure, the entire staging branch is deleted — no partial state lands on `main`.
+
+---
+
+## 9. Quality Gates
+
+### Stage 1 — Artifact Verification (per task)
+
+Runs immediately after a worker exits or times out.
+
+**Script:** `scripts/agent-gate-stage1.sh `
+
+Steps (all must pass; any failure exits 1):
+
+1. `exxeed-[slug]-result.json` exists and is valid JSON.
+2. Stage 1 **re-runs** `verification_command` from the companion JSON inside the worktree. Compares actual exit code to `verification_exit_code` in result.json. Mismatch = immediate fail. Agents cannot claim success without running tests.
+3. `git -C diff --name-only ` (pinned SHA from companion JSON, not floating `main`) contains only files listed in `allowed_files.create`, `allowed_files.modify`, and `allowed_files.delete`. Extra files = fail.
+4. If `discovered_required_files` is non-empty: **fail-open**. Claude Code is notified and may update `allowed_files` in the companion JSON and respawn. Not an automatic hard fail.
+5. `open_items` array in result.json contains no entries with status `❌`.
+6. Companion JSON `modifies_lockfile` matches the derived value in `dag.json`. Mismatch = immediate fail.
+7. For every task slug X listed in this task's `blocks`, X's companion JSON must list this slug in its `blocked_by`. Asymmetric edges = immediate fail — this prevents silent execution misordering.
+
+### Stage 2 — Code Quality (per task, then combined)
+
+#### Run A — per-task (before merging to staging branch)
+
+```bash
+# scripts/agent-gate-stage2.sh
+BASELINE=$(jq -r --arg pkg "$1" '.[$pkg] // 0' .lint-baselines.json)
+
+pnpm --filter "$1" lint -- --max-warnings="$BASELINE" &&
+pnpm --filter "$1" typecheck &&
+pnpm --filter "$1" test -- --coverage &&
+if [[ "$1" == *"@bantayog/functions"* ]]; then
+ firebase emulators:exec --only firestore,database,storage \
+ "pnpm --filter $1 test:rules"
+fi &&
+scripts/check-no-any.sh "$1" &&
+scripts/check-no-empty-catch.sh "$1" &&
+scripts/check-secrets.sh "$1" &&
+scripts/check-lockfile-integrity.sh
+```
+
+**Emulator port collisions:** Stage 2 Run A steps that invoke `firebase emulators:exec` are serialized — only one emulator session runs at a time, even if multiple agents have completed Stage 1 simultaneously. Non-emulator steps run in parallel.
+
+#### Run B — combined staging branch (before PR to `main`)
+
+```bash
+# scripts/agent-gate-stage2-combined.sh
+git checkout "$1"
+npx turbo run lint typecheck test --affected &&
+firebase emulators:exec --only firestore,database,storage \
+ "pnpm --filter @bantayog/functions run test:rules" &&
+scripts/check-secrets.sh all &&
+scripts/check-lockfile-integrity.sh
+```
+
+Run B catches cross-task issues: duplicate imports, type errors that only appear when both T2 and T5 are combined, and shared-file conflicts that individual task gates cannot see.
+
+### Lint Baselines
+
+`.lint-baselines.json` (checked in, generated once from `main`):
+
+```json
+{
+ "@bantayog/functions": 5,
+ "@bantayog/shared-validators": 0,
+ "@bantayog/citizen-pwa": 12,
+ "@bantayog/admin-desktop": 8,
+ "@bantayog/responder-app": 3
+}
+```
+
+Stage 2 Run A looks up the relevant package count. A task fails only on _new_ warnings for its package, not pre-existing ones in other packages. Run B checks the combined count across all affected packages.
+
+Regenerate with:
+
+```bash
+scripts/generate-lint-baselines.sh > .lint-baselines.json
+git add .lint-baselines.json && git commit -m "chore: update lint baselines"
+```
+
+### Secrets Scan
+
+`scripts/check-secrets.sh ` uses `git-secrets` (or equivalent) to scan staged changes for:
+
+- Hardcoded API keys and tokens
+- `.env` values committed to tracked files
+- Firebase project IDs or service account JSON content
+- Private key headers (`-----BEGIN`)
+
+Runs in both Stage 2 Run A and Run B.
+
+---
+
+## 10. Circuit Breaker & Retry
+
+```
+Attempt 1 → Stage 1 or Stage 2 Run A fails
+ └── Claude Code writes targeted correction brief
+ (specific violations only — not a re-statement of the original brief)
+ └── Fresh worktree: ../bantayog-wt-[slug]-retry-1
+ (not --continue; clean context prevents compounding prior hallucinated state)
+ └── Agent respawned with correction brief as the run message
+
+Attempt 2 → Still fails
+ └── Claude Code attempts direct fix in ../bantayog-wt-[slug]-retry-1
+
+Claude Code direct fix → Stage 2 still fails
+ └── TERMINAL_FAILURE (see Section 11)
+```
+
+On direct fix, Claude Code operates in `../bantayog-wt-[slug]-retry-1` (the most recent worktree). The original `../bantayog-wt-[slug]` is preserved for forensic comparison alongside retry-1.
+
+---
+
+## 11. Terminal Failure
+
+### Scope
+
+A terminal failure is **task-scoped**, not phase-scoped:
+
+- All pending tasks in the **current phase** are cancelled.
+- Running background agents are killed.
+- The phase staging branch is deleted — tasks already merged to staging are discarded.
+- **Other phases are unaffected** (they have their own staging branches).
+- All worktrees for this phase (original + retries) are **renamed** before preservation: `../bantayog-wt-[slug]-TERMINAL-$(date +%s)` and `../bantayog-wt-[slug]-retry-1-TERMINAL-$(date +%s)`. The `*-TERMINAL-*` suffix prevents future pre-spawn checks from blocking on them while keeping the state intact for forensic inspection.
+- The telemetry log records total tokens and duration spent on the failed phase.
+
+### Escalation artifacts
+
+Claude Code writes `.claude/escalations/YYYY-MM-DD-[slug]-terminal.md` containing:
+
+- All telemetry entries for every attempt (`telemetry.jsonl` entries for this slug)
+- `git diff ` from the last failed worktree
+- The original task brief
+- All correction briefs written during retry
+- A one-paragraph diagnosis: what the agent failed to do, and why Claude Code's direct fix also failed
+
+Claude Code attempts to open a GitHub issue:
+
+- Title: `[terminal-failure] Phase N — [slug]`
+- Label: `terminal-failure`
+- Body: link to escalation file + one-paragraph summary
+
+If `gh issue create` exits non-zero (network failure, auth expired, rate limit):
+
+1. Appends `{"actor":"claude-code","action":"escalation_failed","task":"[slug]","reason":"gh_issue_create_failed"}` to `telemetry.jsonl`
+2. Writes the full issue body to `.claude/escalations/YYYY-MM-DD-[slug]-issue-fallback.md`
+3. Prints to the session: `TERMINAL FAILURE — gh issue create failed — manual intervention required: .claude/escalations/YYYY-MM-DD-[slug]-terminal.md`
+
+Claude Code then **stops** — no retry, no workaround, no partial merge.
+
+The human receives: escalation file path, GitHub issue link (or fallback file path), worktree paths for forensic inspection.
+
+---
+
+## 12. Human-in-the-Loop Gate
+
+Computed by Claude Code before merging staging → `main`:
+
+| Signal | Score |
+| ---------------------------------------------------------- | -------------------- |
+| Total files changed > 5 | +2 |
+| Total lines changed > 100 | +1 |
+| Any file outside `allowed_files` detected at Stage 1 | +5 (immediate block) |
+| Firestore rules or `firestore.indexes.json` touched | +5 (immediate block) |
+| Any task passed on attempt 2+ (Stage 2 failure, not flaky) | +2 |
+| Any `discovered_required_files` entries accepted | +1 |
+
+**Score ≥ 3:** Claude Code posts a summary (files changed, gate results, suspicion score, diff stat) and waits for explicit `proceed` before merging.
+
+**Score < 3:** Claude Code merges automatically.
+
+Firestore rules changes always score ≥ 5, always block for human approval.
+
+### Flake Detection
+
+Before scoring a retry as +2, Claude Code runs `scripts/detect-flakes.sh `, which re-executes the verification command 3 times. If it passes ≥ 2 of 3 runs, the failure is classified as a flaky test, not a real quality regression:
+
+- Telemetry records `"flaky": true` on that task's entry
+- The +2 suspicion score is **not** applied
+- A `"actor": "claude-code", "action": "flake_detected"` event is written to telemetry
+
+If the failure is genuine (passes 0 or 1 of 3 runs), the +2 score is applied and the retry proceeds normally.
+
+---
+
+## 13. Observability
+
+All events append to `docs/agent-tasks/telemetry.jsonl`. Claude Code commits this file **after every task completion** (not at phase end) to prevent loss on crash.
+
+**Agent run events (written by Claude Code after each gate):**
+
+```jsonl
+{
+ "ts": "2026-04-24T10:00:00Z",
+ "actor": "agent",
+ "phase": 6,
+ "task": "p6-t3",
+ "model": "kimi-for-coding/k2p6",
+ "attempt": 1,
+ "stage1": "PASS",
+ "stage2_run_a": "FAIL",
+ "duration_sec": 420,
+ "files_changed": 3,
+ "lines_changed": 87,
+ "exit_reason": "completed"
+}
+```
+
+**Claude Code orchestrator events:**
+
+```jsonl
+{"ts":"2026-04-24T10:00:00Z","actor":"claude-code","action":"spawn","phase":6,"task":"p6-t3","model":"kimi-for-coding/k2p6"}
+{"ts":"2026-04-24T10:07:00Z","actor":"claude-code","action":"stage1_result","phase":6,"task":"p6-t3","result":"PASS"}
+{"ts":"2026-04-24T10:07:05Z","actor":"claude-code","action":"merge","phase":6,"task":"p6-t3","target":"phase/6-responder-telemetry"}
+```
+
+`exit_reason` values: `completed`, `timeout`, `terminal_failure`.
+
+Telemetry file is committed after each task and at phase end. It is never deleted — it is the permanent record of all agent work on this project.
+
+---
+
+## 14. Phase State Persistence (Crash Recovery)
+
+Claude Code writes `docs/agent-tasks/phase-state.json` atomically after every state transition (spawn, gate result, merge, cancel):
+
+```json
+{
+ "phase": 6,
+ "staging_branch": "phase/6-responder-telemetry",
+ "base_commit": "abc1234",
+ "tasks": {
+ "p6-t1": { "status": "merged", "worktree": null, "pid": null },
+ "p6-t2": { "status": "in_progress", "worktree": "../bantayog-wt-p6-t2", "pid": 12345 },
+ "p6-t3": { "status": "pending", "worktree": null, "pid": null }
+ }
+}
+```
+
+`status` values (per-task): `pending`, `in_progress`, `stage1_pass`, `stage2_pass`, `merged`, `failed`, `cancelled`, `terminal`.
+
+Phase-level status (top-level field in `phase-state.json`): `active`, `staging_complete`, `run_b_pass`, `pr_opened`, `done`, `terminal`.
+
+- `staging_complete`: all tasks merged to the staging branch; Run B has not yet run. This status is written **before** Run B starts. On restart, if phase status is `staging_complete`, Claude Code re-runs Run B — it does not assume Run B passed.
+- `run_b_pass`: Run B completed successfully; PR not yet opened.
+- `pr_opened`: PR to `main` has been opened; awaiting merge or human `proceed`.
+
+On Claude Code restart, it reads `phase-state.json` before taking any action:
+
+- Phase status `staging_complete` → re-run Stage 2 Run B before proceeding.
+- Phase status `run_b_pass` or `pr_opened` → resume from that point (do not re-run gates).
+- Per-task `in_progress`: check if the PID is still running. If yes, continue monitoring. If no, treat as a Stage 1 fail and begin retry.
+- Per-task `merged`: do not respawn.
+- Per-task `pending`: evaluate whether `blocked_by` set is satisfied before spawning.
+
+`phase-state.json` is committed alongside each `telemetry.jsonl` update.
+
+---
+
+## 15. Phase Workflow Summary
+
+```
+ 1. Claude Code reads PRD phase + progress.md
+ 2. Claude Code writes task briefs (.md) + companion JSONs (.json) for all tasks in the phase
+ 3. Claude Code generates dag.json from companion JSONs
+ 4. Claude Code creates staging branch: git checkout -b phase/N-description
+ 5. Claude Code initializes phase-state.json
+ 6. For each task whose blocked_by set is fully merged (respecting MAX_PARALLEL_AGENTS = 3):
+ a. Pre-spawn checks (path safety, no stale worktree, write base_commit to companion JSON)
+ b. git worktree add ../bantayog-wt-[slug] -b agent/[slug]
+ c. opencode run (background) → worker runs Exxeed 4-phase workflow
+ d. Write telemetry: {actor: "claude-code", action: "spawn", ...}
+ e. Update phase-state.json
+ f. On agent exit (or timeout kill):
+ - Stage 1 gate runs (reruns verification_command, checks file allowlist, etc.)
+ - Stage 1 pass → Stage 2 Run A (serialized if emulator required)
+ - Both pass → merge agent/[slug] into staging branch
+ - If task's `modifies_lockfile` is true: run lockfile-reconcile (pnpm install) immediately before unblocking downstream tasks
+ - Write telemetry, update phase-state.json
+ - Newly unblocked tasks → repeat from step 6
+ g. Stage 1 or 2 fail → circuit breaker (Section 10)
+ h. Terminal failure → Section 11 escalation, cancel all pending tasks, delete staging branch
+ 7. All tasks merged to staging branch
+ 8. Write phase status → `staging_complete` in phase-state.json (crash-safe checkpoint)
+ 9. Stage 2 Run B (combined) runs on staging branch
+10. Write phase status → `run_b_pass` in phase-state.json
+11. Suspicion score computed (Section 12)
+12. Score < 3 → Claude Code merges staging → main (single squash PR); write phase status → `pr_opened`
+ Score ≥ 3 → post summary, wait for explicit "proceed"; then merge and write `pr_opened`
+13. Worktrees deleted (except `*-TERMINAL-*` forensic archives)
+14. telemetry.jsonl committed, phase-state.json archived, progress.md updated; write phase status → `done`
+```
+
+---
+
+## 16. Example: Phase 6 — Responder App Telemetry
+
+**Companion JSON DAG:**
+
+```
+p6-t1 (schemas, k2-thinking, lockfile: false)
+ → p6-t2 (rules, k2-thinking, lockfile: false) ─────────────────────────> p6-t6 (e2e)
+ → p6-t3 (functions, k2p6, lockfile: true) ─┐ ↑
+ → p6-t4 (capacitor, k2p6, lockfile: true) ─┤──> p6-t5 (hooks + race-loss UI) ─────┘
+ (p6-t5 blocked_by: [p6-t3, p6-t4])
+```
+
+**Execution with semaphore (MAX_PARALLEL_AGENTS = 3):**
+
+```
+Spawn: p6-t1 (slot 1 of 3)
+p6-t1 merges →
+ Spawn: p6-t2 (slot 1), p6-t3 (slot 2) [parallel; both ready, neither modifies lockfile conflict yet]
+ p6-t3 completes → lockfile-reconcile immediately → p6-t4 queued (p6-t3 has lockfile: true)
+ p6-t2 completes (no lockfile conflict with p6-t4)
+ p6-t4 spawns (slot 2) → completes → lockfile-reconcile
+ Now p6-t5 unblocked (both p6-t3 and p6-t4 merged)
+ Spawn: p6-t5 (slot 1)
+ p6-t5 merges → p6-t6 unblocked
+ Spawn: p6-t6 (slot 1)
+ p6-t6 merges → Stage 2 Run B → suspicion score → PR to main
+```
diff --git a/functions/src/__tests__/callables/merge-duplicates.test.ts b/functions/src/__tests__/callables/merge-duplicates.test.ts
new file mode 100644
index 00000000..2941eac3
--- /dev/null
+++ b/functions/src/__tests__/callables/merge-duplicates.test.ts
@@ -0,0 +1,302 @@
+import { describe, it, expect, beforeAll, afterAll, beforeEach, vi } from 'vitest'
+import { initializeTestEnvironment, type RulesTestEnvironment } from '@firebase/rules-unit-testing'
+import { setDoc, doc } from 'firebase/firestore'
+import { type Firestore, Timestamp, getFirestore } from 'firebase-admin/firestore'
+import { initializeApp, deleteApp, type App } from 'firebase-admin/app'
+import type { UserRole } from '@bantayog/shared-types'
+
+const onCallMock = vi.hoisted(() => vi.fn())
+vi.mock('firebase-functions/v2/https', () => ({ onCall: onCallMock }))
+vi.mock('firebase-admin/database', () => ({ getDatabase: vi.fn(() => ({})) }))
+let adminApp: App
+let adminDb: Firestore
+vi.mock('../../admin-init.js', () => ({
+ get adminDb() {
+ return adminDb
+ },
+}))
+
+import {
+ mergeDuplicatesCore,
+ type MergeDuplicatesResult,
+} from '../../callables/merge-duplicates.js'
+
+const uuid = (n: number) => `00000000-0000-0000-0000-${String(n).padStart(12, '0')}`
+const ts = 1713350400000
+const CLUSTER_ID = 'cluster-uuid-1'
+let testEnv: RulesTestEnvironment
+
+beforeAll(async () => {
+ process.env.FIRESTORE_EMULATOR_HOST = 'localhost:8081'
+ testEnv = await initializeTestEnvironment({
+ projectId: 'merge-dup-test',
+ firestore: {
+ host: 'localhost',
+ port: 8081,
+ rules:
+ 'rules_version = "2"; service cloud.firestore { match /{d=**} { allow read, write: if true; } }',
+ },
+ })
+ adminApp = initializeApp({ projectId: 'merge-dup-test' }, 'merge-dup-test')
+ adminDb = getFirestore(adminApp)
+})
+
+beforeEach(async () => {
+ await testEnv.clearFirestore()
+})
+afterAll(async () => {
+ await testEnv.cleanup()
+ await deleteApp(adminApp)
+})
+
+async function seedReport(id: string, overrides: Record = {}) {
+ await testEnv.withSecurityRulesDisabled(async (ctx) => {
+ await setDoc(doc(ctx.firestore(), 'reports', id), {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ barangayId: 'brgy1',
+ mediaRefs: [],
+ createdAt: ts,
+ updatedAt: ts,
+ schemaVersion: 1,
+ ...overrides,
+ })
+ await setDoc(doc(ctx.firestore(), 'report_ops', id), {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ duplicateClusterId: CLUSTER_ID,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ ...overrides,
+ })
+ })
+}
+
+function expectError(result: MergeDuplicatesResult, code: string) {
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe(code)
+ }
+}
+
+const muniAdminActor = {
+ uid: 'admin-1',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+}
+
+describe('mergeDuplicates', () => {
+ it('rejects a non-muni-admin caller', async () => {
+ const result = await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r1',
+ duplicateReportIds: ['r2'],
+ idempotencyKey: uuid(1),
+ },
+ {
+ uid: 'citizen-1',
+ claims: { role: 'citizen' as UserRole, active: true, auth_time: Math.floor(ts / 1000) },
+ },
+ )
+ expectError(result, 'permission-denied')
+ })
+
+ it('rejects inactive admin', async () => {
+ await seedReport('r1')
+ await seedReport('r2')
+ const result = await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r1',
+ duplicateReportIds: ['r2'],
+ idempotencyKey: uuid(99),
+ },
+ {
+ uid: 'admin-1',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: false,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ )
+ expectError(result, 'permission-denied')
+ })
+
+ it('rejects report IDs from different municipalities', async () => {
+ await seedReport('r1')
+ await seedReport('r2', { municipalityId: 'labo' })
+ const result = await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r1',
+ duplicateReportIds: ['r2'],
+ idempotencyKey: uuid(2),
+ },
+ muniAdminActor,
+ )
+ expectError(result, 'invalid-argument')
+ })
+
+ it('rejects report IDs that do not share a duplicateClusterId', async () => {
+ await seedReport('r1', { duplicateClusterId: 'cluster-a' })
+ await seedReport('r2', { duplicateClusterId: 'cluster-b' })
+ const result = await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r1',
+ duplicateReportIds: ['r2'],
+ idempotencyKey: uuid(3),
+ },
+ muniAdminActor,
+ )
+ expectError(result, 'failed-precondition')
+ })
+
+ it('sets status merged_as_duplicate on all non-primary reports', async () => {
+ await seedReport('r-primary')
+ await seedReport('r-dup1')
+ await seedReport('r-dup2')
+ await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r-primary',
+ duplicateReportIds: ['r-dup1', 'r-dup2'],
+ idempotencyKey: uuid(4),
+ },
+ muniAdminActor,
+ )
+ const dup1 = await adminDb.collection('reports').doc('r-dup1').get()
+ const dup2 = await adminDb.collection('reports').doc('r-dup2').get()
+ expect(dup1.data()?.status).toBe('merged_as_duplicate')
+ expect(dup2.data()?.status).toBe('merged_as_duplicate')
+ })
+
+ it('sets mergedInto on all non-primary reports', async () => {
+ await seedReport('r-primary')
+ await seedReport('r-dup1')
+ await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r-primary',
+ duplicateReportIds: ['r-dup1'],
+ idempotencyKey: uuid(5),
+ },
+ muniAdminActor,
+ )
+ const dup1 = await adminDb.collection('reports').doc('r-dup1').get()
+ expect(dup1.data()?.mergedInto).toBe('r-primary')
+ })
+
+ it('aggregates unique mediaRefs from duplicates onto the primary', async () => {
+ await seedReport('r-primary', { mediaRefs: ['media-a', 'media-b'] })
+ await seedReport('r-dup1', { mediaRefs: ['media-b', 'media-c'] })
+ await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r-primary',
+ duplicateReportIds: ['r-dup1'],
+ idempotencyKey: uuid(6),
+ },
+ muniAdminActor,
+ )
+ const primary = await adminDb.collection('reports').doc('r-primary').get()
+ const refs = primary.data()?.mediaRefs as string[]
+ expect(refs).toContain('media-a')
+ expect(refs).toContain('media-b')
+ expect(refs).toContain('media-c')
+ expect(new Set(refs).size).toBe(refs.length)
+ })
+
+ it('is idempotent', async () => {
+ await seedReport('r-primary')
+ await seedReport('r-dup1')
+ const result1 = await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r-primary',
+ duplicateReportIds: ['r-dup1'],
+ idempotencyKey: uuid(7),
+ },
+ muniAdminActor,
+ )
+ const result2 = await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r-primary',
+ duplicateReportIds: ['r-dup1'],
+ idempotencyKey: uuid(7),
+ },
+ muniAdminActor,
+ )
+
+ // Assert first call succeeded
+ expect(result1.success).toBe(true)
+ if (result1.success) {
+ expect(result1.mergedCount).toBe(1)
+ }
+
+ // Assert replay returns same result
+ expect(result2.success).toBe(true)
+ if (result2.success) {
+ expect(result2.mergedCount).toBe(1)
+ }
+
+ const dup1 = await adminDb.collection('reports').doc('r-dup1').get()
+ expect(dup1.data()?.status).toBe('merged_as_duplicate')
+
+ const mergeEvents = await adminDb
+ .collection('report_events')
+ .where('reportId', '==', 'r-primary')
+ .get()
+ expect(mergeEvents.size).toBe(1)
+ })
+
+ it('rejects when primary report does not exist', async () => {
+ await seedReport('r-dup1')
+ const result = await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r-missing',
+ duplicateReportIds: ['r-dup1'],
+ idempotencyKey: uuid(8),
+ },
+ muniAdminActor,
+ )
+ expectError(result, 'not-found')
+ })
+
+ it('updates report_ops for primary and duplicates', async () => {
+ await seedReport('r-primary')
+ await seedReport('r-dup1')
+ await mergeDuplicatesCore(
+ adminDb,
+ {
+ primaryReportId: 'r-primary',
+ duplicateReportIds: ['r-dup1'],
+ idempotencyKey: uuid(9),
+ },
+ muniAdminActor,
+ )
+ const primaryOps = await adminDb.collection('report_ops').doc('r-primary').get()
+ const dupOps = await adminDb.collection('report_ops').doc('r-dup1').get()
+ expect(dupOps.data()?.status).toBe('merged_as_duplicate')
+ expect((primaryOps.data()?.updatedAt as Timestamp).toMillis()).toBeGreaterThan(ts)
+ expect((dupOps.data()?.updatedAt as Timestamp).toMillis()).toBeGreaterThan(ts)
+ })
+})
diff --git a/functions/src/__tests__/callables/shift-handoff.test.ts b/functions/src/__tests__/callables/shift-handoff.test.ts
new file mode 100644
index 00000000..ffa9774c
--- /dev/null
+++ b/functions/src/__tests__/callables/shift-handoff.test.ts
@@ -0,0 +1,401 @@
+import { describe, it, expect, beforeAll, afterAll, beforeEach, vi } from 'vitest'
+import { initializeTestEnvironment, type RulesTestEnvironment } from '@firebase/rules-unit-testing'
+import { setDoc, doc, Timestamp } from 'firebase/firestore'
+import { type Firestore, getFirestore } from 'firebase-admin/firestore'
+import { initializeApp, deleteApp, type App } from 'firebase-admin/app'
+import { type UserRole } from '@bantayog/shared-types'
+
+const onCallMock = vi.hoisted(() => vi.fn())
+vi.mock('firebase-functions/v2/https', () => ({
+ onCall: onCallMock,
+ HttpsError: class HttpsError extends Error {
+ code: string
+ constructor(code: string, message: string) {
+ super(message)
+ this.code = code
+ }
+ },
+}))
+vi.mock('firebase-admin/database', () => ({ getDatabase: vi.fn(() => ({})) }))
+let adminApp: App
+let adminDb: Firestore
+vi.mock('../../admin-init.js', () => ({
+ get adminDb() {
+ return adminDb
+ },
+}))
+
+import { initiateShiftHandoffCore, acceptShiftHandoffCore } from '../../callables/shift-handoff.js'
+
+const uuid = (n: number) => `00000000-0000-0000-0000-${String(n).padStart(12, '0')}`
+const ts = 1713350400000
+let testEnv: RulesTestEnvironment
+
+beforeAll(async () => {
+ process.env.FIRESTORE_EMULATOR_HOST = 'localhost:8081'
+ testEnv = await initializeTestEnvironment({
+ projectId: 'shift-handoff-test',
+ firestore: {
+ host: 'localhost',
+ port: 8081,
+ rules:
+ 'rules_version = "2"; service cloud.firestore { match /{d=**} { allow read, write: if true; } }',
+ },
+ })
+ adminApp = initializeApp({ projectId: 'shift-handoff-test' }, 'shift-handoff-test')
+ adminDb = getFirestore(adminApp)
+})
+
+beforeEach(async () => {
+ await testEnv.clearFirestore()
+})
+afterAll(async () => {
+ await testEnv.cleanup()
+ await deleteApp(adminApp)
+})
+
+const adminActor = {
+ uid: 'admin-from',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+}
+
+async function seedReportOp(id: string) {
+ await testEnv.withSecurityRulesDisabled(async (ctx) => {
+ await setDoc(doc(ctx.firestore(), 'report_ops', id), {
+ municipalityId: 'daet',
+ status: 'assigned',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ reportType: 'flood',
+ schemaVersion: 1,
+ })
+ })
+}
+
+describe('initiateShiftHandoff', () => {
+ it('rejects citizens and responders', async () => {
+ const result = await initiateShiftHandoffCore(
+ adminDb,
+ {
+ notes: 'Handover notes',
+ idempotencyKey: uuid(1),
+ },
+ {
+ uid: 'u1',
+ claims: { role: 'citizen' as UserRole, active: true, auth_time: Math.floor(ts / 1000) },
+ },
+ 'corr-1',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('permission-denied')
+ }
+ })
+
+ it('rejects inactive admin', async () => {
+ const result = await initiateShiftHandoffCore(
+ adminDb,
+ {
+ notes: 'Handover notes',
+ idempotencyKey: uuid(10),
+ },
+ {
+ uid: 'admin-inactive',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: false,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-inactive',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('permission-denied')
+ }
+ })
+
+ it('rejects municipal_admin missing municipalityId', async () => {
+ const result = await initiateShiftHandoffCore(
+ adminDb,
+ {
+ notes: 'Handover notes',
+ idempotencyKey: uuid(11),
+ },
+ {
+ uid: 'admin-no-muni',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-no-muni',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('permission-denied')
+ }
+ })
+
+ it('creates shift_handoffs doc with status pending and no toUid', async () => {
+ const result = await initiateShiftHandoffCore(
+ adminDb,
+ {
+ notes: 'End of shift',
+ idempotencyKey: uuid(2),
+ },
+ adminActor,
+ 'corr-2',
+ )
+ expect(result.success).toBe(true)
+ if (result.success) {
+ expect(result.handoffId).toBeDefined()
+ }
+ const created = await adminDb
+ .collection('shift_handoffs')
+ .doc(result.success ? result.handoffId : '')
+ .get()
+ expect(created.data()?.status).toBe('pending')
+ expect(created.data()?.toUid).toBeUndefined()
+ expect(created.data()?.fromUid).toBe('admin-from')
+ })
+
+ it('builds activeIncidentSnapshot from live Firestore state', async () => {
+ await seedReportOp('r-active-1')
+ await seedReportOp('r-active-2')
+ const result = await initiateShiftHandoffCore(
+ adminDb,
+ {
+ notes: 'Handover',
+ idempotencyKey: uuid(3),
+ },
+ adminActor,
+ 'corr-3',
+ )
+ expect(result.success).toBe(true)
+ const created = await adminDb
+ .collection('shift_handoffs')
+ .doc(result.success ? result.handoffId : '')
+ .get()
+ const snapshot = created.data()?.activeIncidentIds as string[]
+ expect(snapshot).toContain('r-active-1')
+ expect(snapshot).toContain('r-active-2')
+ })
+
+ it('is idempotent', async () => {
+ const result1 = await initiateShiftHandoffCore(
+ adminDb,
+ {
+ notes: '',
+ idempotencyKey: uuid(4),
+ },
+ adminActor,
+ 'corr-4',
+ )
+ expect(result1.success).toBe(true)
+
+ const result2 = await initiateShiftHandoffCore(
+ adminDb,
+ {
+ notes: '',
+ idempotencyKey: uuid(4),
+ },
+ adminActor,
+ 'corr-5',
+ )
+ expect(result2.success).toBe(true)
+
+ if (result1.success && result2.success) {
+ expect(result1.handoffId).toBe(result2.handoffId)
+ }
+ })
+})
+
+describe('acceptShiftHandoff', () => {
+ async function createHandoff(id: string, overrides: Record = {}) {
+ await testEnv.withSecurityRulesDisabled(async (ctx) => {
+ await setDoc(doc(ctx.firestore(), 'shift_handoffs', id), {
+ fromUid: 'admin-from',
+ municipalityId: 'daet',
+ notes: '',
+ activeIncidentIds: [],
+ status: 'pending',
+ createdAt: Timestamp.fromMillis(ts),
+ expiresAt: Timestamp.fromMillis(Date.now() + 1800000),
+ schemaVersion: 1,
+ ...overrides,
+ })
+ })
+ }
+
+ it('rejects inactive admin', async () => {
+ await createHandoff('h-inactive')
+ const result = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h-inactive', idempotencyKey: uuid(12) },
+ {
+ uid: 'admin-to',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: false,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-inactive',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('permission-denied')
+ }
+ })
+
+ it('rejects non-existent handoff', async () => {
+ const result = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h-missing', idempotencyKey: uuid(13) },
+ {
+ uid: 'admin-to',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-missing',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('not-found')
+ }
+ })
+
+ it('rejects expired handoff', async () => {
+ await createHandoff('h-expired', { expiresAt: Timestamp.fromMillis(ts - 1) })
+ const result = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h-expired', idempotencyKey: uuid(14) },
+ {
+ uid: 'admin-to',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-expired',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('failed-precondition')
+ }
+ })
+
+ it('rejects self-accept', async () => {
+ await createHandoff('h-self')
+ const result = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h-self', idempotencyKey: uuid(15) },
+ {
+ uid: 'admin-from',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-self',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('failed-precondition')
+ }
+ })
+
+ it('rejects a caller from a different municipality', async () => {
+ await createHandoff('h1')
+ const result = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h1', idempotencyKey: uuid(5) },
+ {
+ uid: 'other-admin',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'labo',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-6',
+ )
+ expect(result.success).toBe(false)
+ if (!result.success) {
+ expect(result.errorCode).toBe('permission-denied')
+ }
+ })
+
+ it('updates status to accepted and sets toUid', async () => {
+ await createHandoff('h2')
+ const result = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h2', idempotencyKey: uuid(6) },
+ {
+ uid: 'admin-to',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+ },
+ 'corr-7',
+ )
+ expect(result.success).toBe(true)
+ const updated = await adminDb.collection('shift_handoffs').doc('h2').get()
+ expect(updated.data()?.status).toBe('accepted')
+ expect(updated.data()?.toUid).toBe('admin-to')
+ })
+
+ it('is idempotent — double-accept returns success', async () => {
+ await createHandoff('h3')
+ const actor = {
+ uid: 'admin-to',
+ claims: {
+ role: 'municipal_admin' as UserRole,
+ municipalityId: 'daet',
+ active: true,
+ auth_time: Math.floor(ts / 1000),
+ },
+ }
+ const result1 = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h3', idempotencyKey: uuid(7) },
+ actor,
+ 'corr-8',
+ )
+ expect(result1.success).toBe(true)
+
+ const result2 = await acceptShiftHandoffCore(
+ adminDb,
+ { handoffId: 'h3', idempotencyKey: uuid(7) },
+ actor,
+ 'corr-9',
+ )
+ expect(result2.success).toBe(true)
+ })
+})
diff --git a/functions/src/__tests__/scheduled/admin-operations-sweep.test.ts b/functions/src/__tests__/scheduled/admin-operations-sweep.test.ts
index 453578fc..6883850f 100644
--- a/functions/src/__tests__/scheduled/admin-operations-sweep.test.ts
+++ b/functions/src/__tests__/scheduled/admin-operations-sweep.test.ts
@@ -52,12 +52,12 @@ describe('adminOperationsSweep — agency assistance escalation', () => {
priority: 'normal',
fulfilledByDispatchIds: [],
expiresAt: ts + 3600000,
- schemaVersion: 1,
+ escalatedAt: null,
})
})
await adminOperationsSweepCore(adminDb, { now: Timestamp.fromMillis(ts) })
const snap = await adminDb.collection('agency_assistance_requests').doc('ar1').get()
- expect(snap.data()?.escalatedAt).toBeUndefined()
+ expect(snap.data()?.escalatedAt).toBeNull()
})
it('sets escalatedAt on requests pending over 30 minutes', async () => {
@@ -74,7 +74,7 @@ describe('adminOperationsSweep — agency assistance escalation', () => {
priority: 'normal',
fulfilledByDispatchIds: [],
expiresAt: ts + 3600000,
- schemaVersion: 1,
+ escalatedAt: null,
})
})
await adminOperationsSweepCore(adminDb, { now: Timestamp.fromMillis(ts) })
@@ -88,7 +88,6 @@ describe('adminOperationsSweep — agency assistance escalation', () => {
await setDoc(doc(ctx.firestore(), 'agency_assistance_requests', 'ar1'), {
status: 'pending',
createdAt: ts - THIRTY_MIN_MS - 1,
- escalatedAt: originalEscalatedAt,
reportId: 'r1',
requestedByMunicipalId: 'daet',
requestedByMunicipality: 'Daet',
@@ -98,7 +97,7 @@ describe('adminOperationsSweep — agency assistance escalation', () => {
priority: 'normal',
fulfilledByDispatchIds: [],
expiresAt: ts + 3600000,
- schemaVersion: 1,
+ escalatedAt: originalEscalatedAt,
})
})
await adminOperationsSweepCore(adminDb, { now: Timestamp.fromMillis(ts) })
@@ -106,3 +105,41 @@ describe('adminOperationsSweep — agency assistance escalation', () => {
expect(snap.data()?.escalatedAt).toBe(originalEscalatedAt) // unchanged
})
})
+
+describe('adminOperationsSweep — shift handoff escalation', () => {
+ it('ignores handoffs pending less than 30 minutes', async () => {
+ await testEnv.withSecurityRulesDisabled(async (ctx) => {
+ await setDoc(doc(ctx.firestore(), 'shift_handoffs', 'h1'), {
+ fromUid: 'admin-1',
+ municipalityId: 'daet',
+ notes: '',
+ activeIncidentSnapshot: [],
+ status: 'pending',
+ createdAt: ts - THIRTY_MIN_MS + 60000,
+ expiresAt: ts + 1800000,
+ escalatedAt: null,
+ })
+ })
+ await adminOperationsSweepCore(adminDb, { now: Timestamp.fromMillis(ts) })
+ const snap = await adminDb.collection('shift_handoffs').doc('h1').get()
+ expect(snap.data()?.escalatedAt).toBeNull()
+ })
+
+ it('sets escalatedAt on handoffs pending over 30 minutes', async () => {
+ await testEnv.withSecurityRulesDisabled(async (ctx) => {
+ await setDoc(doc(ctx.firestore(), 'shift_handoffs', 'h1'), {
+ fromUid: 'admin-1',
+ municipalityId: 'daet',
+ notes: '',
+ activeIncidentSnapshot: [],
+ status: 'pending',
+ createdAt: ts - THIRTY_MIN_MS - 1,
+ expiresAt: ts + 1800000,
+ escalatedAt: null,
+ })
+ })
+ await adminOperationsSweepCore(adminDb, { now: Timestamp.fromMillis(ts) })
+ const snap = await adminDb.collection('shift_handoffs').doc('h1').get()
+ expect(snap.data()?.escalatedAt).toBe(ts)
+ })
+})
diff --git a/functions/src/__tests__/triggers/duplicate-cluster.test.ts b/functions/src/__tests__/triggers/duplicate-cluster.test.ts
new file mode 100644
index 00000000..10636817
--- /dev/null
+++ b/functions/src/__tests__/triggers/duplicate-cluster.test.ts
@@ -0,0 +1,245 @@
+import { describe, it, expect, beforeAll, afterAll, beforeEach, vi } from 'vitest'
+import { initializeTestEnvironment, type RulesTestEnvironment } from '@firebase/rules-unit-testing'
+import { setDoc, doc } from 'firebase/firestore'
+import { type Firestore } from 'firebase-admin/firestore'
+import type { QueryDocumentSnapshot } from 'firebase-admin/firestore'
+
+vi.mock('firebase-admin/database', () => ({ getDatabase: vi.fn(() => ({})) }))
+let adminDb: Firestore
+vi.mock('../../admin-init.js', () => ({
+ get adminDb() {
+ return adminDb
+ },
+}))
+
+import { duplicateClusterTriggerCore } from '../../triggers/duplicate-cluster-trigger.js'
+
+const ts = 1713350400000
+let testEnv: RulesTestEnvironment
+
+beforeAll(async () => {
+ testEnv = await initializeTestEnvironment({
+ projectId: 'dup-cluster-test',
+ firestore: {
+ host: 'localhost',
+ port: 8081,
+ rules:
+ 'rules_version = "2"; service cloud.firestore { match /{d=**} { allow read, write: if true; } }',
+ },
+ })
+ adminDb = testEnv.unauthenticatedContext().firestore() as unknown as Firestore
+})
+
+beforeEach(async () => {
+ await testEnv.clearFirestore()
+})
+afterAll(async () => {
+ await testEnv.cleanup()
+})
+
+const DAET_GEOHASH = 'w7hfm2mb'
+const NEARBY_GEOHASH = 'w7hfm2mc'
+
+async function seedReportOps(id: string, overrides: Record) {
+ await testEnv.withSecurityRulesDisabled(async (ctx) => {
+ await setDoc(doc(ctx.firestore(), 'report_ops', id), {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ locationGeohash: DAET_GEOHASH,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ ...overrides,
+ })
+ })
+}
+
+function makeSnap(id: string, data: Record): QueryDocumentSnapshot {
+ return {
+ id,
+ ref: adminDb.collection('report_ops').doc(id),
+ data: () => data,
+ } as unknown as QueryDocumentSnapshot
+}
+
+describe('duplicateClusterTrigger', () => {
+ it('does not set duplicateClusterId when no nearby reports exist', async () => {
+ const newData = {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ locationGeohash: DAET_GEOHASH,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ }
+ const snap = makeSnap('r-new', newData)
+ await duplicateClusterTriggerCore(adminDb, snap)
+ const updated = await adminDb.collection('report_ops').doc('r-new').get()
+ expect(updated.data()?.duplicateClusterId).toBeUndefined()
+ })
+
+ it('sets duplicateClusterId on both reports when same type + muni + within geohash proximity + within 2h', async () => {
+ await seedReportOps('r-existing', { locationGeohash: NEARBY_GEOHASH, createdAt: ts - 3600000 })
+ const newData = {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ locationGeohash: DAET_GEOHASH,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ }
+ await seedReportOps('r-new', { locationGeohash: DAET_GEOHASH })
+ const snap = makeSnap('r-new', newData)
+ await duplicateClusterTriggerCore(adminDb, snap)
+ const newSnap = await adminDb.collection('report_ops').doc('r-new').get()
+ const existingSnap = await adminDb.collection('report_ops').doc('r-existing').get()
+ expect(newSnap.data()?.duplicateClusterId).toBeDefined()
+ expect(newSnap.data()?.duplicateClusterId).toBe(existingSnap.data()?.duplicateClusterId)
+ })
+
+ it('does not cluster reports of different types', async () => {
+ await seedReportOps('r-fire', {
+ reportType: 'fire',
+ locationGeohash: NEARBY_GEOHASH,
+ createdAt: ts - 60000,
+ })
+ const newData = {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ locationGeohash: DAET_GEOHASH,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ }
+ const snap = makeSnap('r-new', newData)
+ await duplicateClusterTriggerCore(adminDb, snap)
+ const updated = await adminDb.collection('report_ops').doc('r-new').get()
+ expect(updated.data()?.duplicateClusterId).toBeUndefined()
+ })
+
+ it('does not cluster reports older than 2h', async () => {
+ const TWO_H_PLUS_ONE = 2 * 3600000 + 1
+ await seedReportOps('r-old', {
+ locationGeohash: NEARBY_GEOHASH,
+ createdAt: ts - TWO_H_PLUS_ONE,
+ })
+ const newData = {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ locationGeohash: DAET_GEOHASH,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ }
+ const snap = makeSnap('r-new', newData)
+ await duplicateClusterTriggerCore(adminDb, snap)
+ const updated = await adminDb.collection('report_ops').doc('r-new').get()
+ expect(updated.data()?.duplicateClusterId).toBeUndefined()
+ })
+
+ it('assigns the same existing clusterId when a third report joins a cluster', async () => {
+ const existingClusterId = 'cluster-uuid-existing'
+ await seedReportOps('r-first', {
+ locationGeohash: NEARBY_GEOHASH,
+ createdAt: ts - 3600000,
+ duplicateClusterId: existingClusterId,
+ })
+ const newData = {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ locationGeohash: DAET_GEOHASH,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ }
+ await seedReportOps('r-third', { locationGeohash: DAET_GEOHASH })
+ const snap = makeSnap('r-third', newData)
+ await duplicateClusterTriggerCore(adminDb, snap)
+ const updated = await adminDb.collection('report_ops').doc('r-third').get()
+ expect(updated.data()?.duplicateClusterId).toBe(existingClusterId)
+ })
+
+ it('skips reports with no locationGeohash', async () => {
+ const newData = {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ }
+ const snap = makeSnap('r-noloc', newData)
+ await duplicateClusterTriggerCore(adminDb, snap)
+ const updated = await adminDb.collection('report_ops').doc('r-noloc').get()
+ expect(updated.data()?.duplicateClusterId).toBeUndefined()
+ })
+
+ it('is safe to run twice (idempotent cluster assignment)', async () => {
+ await seedReportOps('r-existing', { locationGeohash: NEARBY_GEOHASH, createdAt: ts - 3600000 })
+ const newData = {
+ municipalityId: 'daet',
+ reportType: 'flood',
+ status: 'new',
+ severity: 'high',
+ createdAt: ts,
+ updatedAt: ts,
+ agencyIds: [],
+ activeResponderCount: 0,
+ requiresLocationFollowUp: false,
+ locationGeohash: DAET_GEOHASH,
+ visibility: { scope: 'municipality', sharedWith: [] },
+ schemaVersion: 1,
+ }
+ const snap = makeSnap('r-new', newData)
+ await seedReportOps('r-new', { locationGeohash: DAET_GEOHASH })
+ await duplicateClusterTriggerCore(adminDb, snap)
+ const firstRunSnap = await adminDb.collection('report_ops').doc('r-new').get()
+ const firstClusterId = firstRunSnap.data()?.duplicateClusterId
+
+ const snap2 = makeSnap('r-new', { ...newData, duplicateClusterId: firstClusterId })
+ await duplicateClusterTriggerCore(adminDb, snap2)
+ const secondRunSnap = await adminDb.collection('report_ops').doc('r-new').get()
+ expect(secondRunSnap.data()?.duplicateClusterId).toBe(firstClusterId)
+ })
+})
diff --git a/functions/src/callables/merge-duplicates.ts b/functions/src/callables/merge-duplicates.ts
new file mode 100644
index 00000000..64036f3d
--- /dev/null
+++ b/functions/src/callables/merge-duplicates.ts
@@ -0,0 +1,277 @@
+import { onCall, HttpsError, type CallableRequest } from 'firebase-functions/v2/https'
+import { Timestamp } from 'firebase-admin/firestore'
+import { z } from 'zod'
+import { BantayogError, logDimension } from '@bantayog/shared-validators'
+import type { UserRole } from '@bantayog/shared-types'
+import { adminDb } from '../admin-init.js'
+import { bantayogErrorToHttps } from './https-error.js'
+import { withIdempotency, IdempotencyInProgressError } from '../idempotency/guard.js'
+import { checkRateLimit } from '../services/rate-limit.js'
+
+const log = logDimension('mergeDuplicates')
+
+const inputSchema = z
+ .object({
+ primaryReportId: z.string().min(1),
+ duplicateReportIds: z.array(z.string().min(1)).min(1).max(50),
+ idempotencyKey: z.uuid(),
+ })
+ .refine((data) => new Set(data.duplicateReportIds).size === data.duplicateReportIds.length, {
+ message: 'duplicateReportIds must be unique',
+ path: ['duplicateReportIds'],
+ })
+ .refine((data) => !data.duplicateReportIds.includes(data.primaryReportId), {
+ message: 'primaryReportId cannot be in duplicateReportIds',
+ path: ['duplicateReportIds'],
+ })
+
+export interface MergeDuplicatesActor {
+ uid: string
+ claims: { role: UserRole; municipalityId?: string; active: boolean; auth_time: number }
+}
+
+export type MergeDuplicatesResult =
+ | { success: true; mergedCount: number }
+ | { success: false; errorCode: string }
+
+interface OpsRow {
+ id: string
+ municipalityId?: string
+ duplicateClusterId?: string
+}
+
+export async function mergeDuplicatesCore(
+ db: FirebaseFirestore.Firestore,
+ input: z.infer,
+ actor: MergeDuplicatesActor,
+ correlationId = crypto.randomUUID(),
+): Promise {
+ if (actor.claims.role !== 'municipal_admin' && actor.claims.role !== 'provincial_superadmin') {
+ log({
+ severity: 'ERROR',
+ code: 'merge.permission_denied',
+ message: 'Caller role not allowed',
+ data: { role: actor.claims.role, correlationId },
+ })
+ return { success: false, errorCode: 'permission-denied' }
+ }
+
+ if (!actor.claims.active) {
+ log({
+ severity: 'ERROR',
+ code: 'merge.permission_denied',
+ message: 'Caller account is not active',
+ data: { correlationId },
+ })
+ return { success: false, errorCode: 'permission-denied' }
+ }
+
+ const { primaryReportId, duplicateReportIds, idempotencyKey } = input
+ const allIds = [primaryReportId, ...duplicateReportIds]
+
+ const { result: cached } = await withIdempotency(
+ db,
+ { key: `mergeDuplicates:${actor.uid}:${idempotencyKey}`, payload: input },
+ async () => {
+ return db.runTransaction(async (tx) => {
+ // Read report_ops inside transaction
+ const opsSnaps = await Promise.all(
+ allIds.map((id) => tx.get(db.collection('report_ops').doc(id))),
+ )
+
+ // Fail fast if any missing
+ for (const snap of opsSnaps) {
+ if (!snap.exists) {
+ return { success: false, errorCode: 'not-found' } as MergeDuplicatesResult
+ }
+ }
+
+ const opsData: OpsRow[] = opsSnaps.map((s) => {
+ const d = s.data()
+ return {
+ id: s.id,
+ municipalityId: d?.municipalityId,
+ duplicateClusterId: d?.duplicateClusterId,
+ }
+ })
+
+ // Validate all reports have municipalityId
+ const missingMunicipality = opsData.some((d) => !d.municipalityId)
+ if (missingMunicipality) {
+ return { success: false, errorCode: 'failed-precondition' } as MergeDuplicatesResult
+ }
+
+ // Municipality check
+ const municipalities = new Set(opsData.map((d) => d.municipalityId))
+ if (municipalities.size > 1) {
+ return { success: false, errorCode: 'invalid-argument' } as MergeDuplicatesResult
+ }
+
+ // Cluster check — all reports must share exactly one cluster ID
+ const clusterIds = opsData
+ .map((d) => d.duplicateClusterId)
+ .filter((id): id is string => typeof id === 'string' && id.length > 0)
+ if (clusterIds.length !== opsData.length) {
+ return { success: false, errorCode: 'failed-precondition' } as MergeDuplicatesResult
+ }
+ if (new Set(clusterIds).size > 1) {
+ return { success: false, errorCode: 'failed-precondition' } as MergeDuplicatesResult
+ }
+
+ // Municipality authorization
+ const municipalityId = opsData[0]?.municipalityId
+ if (
+ actor.claims.role === 'municipal_admin' &&
+ actor.claims.municipalityId !== municipalityId
+ ) {
+ return { success: false, errorCode: 'permission-denied' } as MergeDuplicatesResult
+ }
+
+ const reportSnaps = await Promise.all(
+ allIds.map((id) => tx.get(db.collection('reports').doc(id))),
+ )
+
+ for (const snap of reportSnaps) {
+ if (!snap.exists) {
+ return { success: false, errorCode: 'not-found' } as MergeDuplicatesResult
+ }
+ }
+
+ const primarySnap = reportSnaps.find((s) => s.id === primaryReportId)
+ if (!primarySnap) {
+ return { success: false, errorCode: 'not-found' } as MergeDuplicatesResult
+ }
+ const primaryReportData = primarySnap.data()
+ if (!primaryReportData) {
+ return { success: false, errorCode: 'not-found' } as MergeDuplicatesResult
+ }
+
+ const primaryMediaRefs = primaryReportData.mediaRefs
+ const safePrimaryMediaRefs = Array.isArray(primaryMediaRefs)
+ ? primaryMediaRefs.filter((r): r is string => typeof r === 'string')
+ : []
+ const allMediaRefs = new Set(safePrimaryMediaRefs)
+
+ for (const s of reportSnaps) {
+ if (s.id === primaryReportId) continue
+ const dupMediaRefs = s.data()?.mediaRefs
+ if (Array.isArray(dupMediaRefs)) {
+ for (const ref of dupMediaRefs) {
+ if (typeof ref === 'string') {
+ allMediaRefs.add(ref)
+ }
+ }
+ }
+ }
+
+ tx.update(db.collection('reports').doc(primaryReportId), {
+ mediaRefs: Array.from(allMediaRefs),
+ updatedAt: Timestamp.now(),
+ })
+ tx.update(db.collection('report_ops').doc(primaryReportId), {
+ updatedAt: Timestamp.now(),
+ })
+
+ const eventRef = db.collection('report_events').doc()
+ tx.set(eventRef, {
+ eventId: eventRef.id,
+ reportId: primaryReportId,
+ actor: actor.uid,
+ actorRole: actor.claims.role,
+ at: Timestamp.now(),
+ correlationId,
+ schemaVersion: 1,
+ mergedCount: duplicateReportIds.length,
+ mergedDuplicateIds: duplicateReportIds,
+ })
+
+ for (const dupId of duplicateReportIds) {
+ tx.update(db.collection('reports').doc(dupId), {
+ status: 'merged_as_duplicate',
+ mergedInto: primaryReportId,
+ updatedAt: Timestamp.now(),
+ })
+ tx.update(db.collection('report_ops').doc(dupId), {
+ status: 'merged_as_duplicate',
+ updatedAt: Timestamp.now(),
+ })
+ }
+
+ log({
+ severity: 'INFO',
+ code: 'merge.complete',
+ message: `Merged ${String(duplicateReportIds.length)} duplicates into ${primaryReportId}`,
+ data: { correlationId },
+ })
+
+ return { success: true, mergedCount: duplicateReportIds.length } as MergeDuplicatesResult
+ })
+ },
+ ).catch((err: unknown): { result: MergeDuplicatesResult; fromCache: boolean } => {
+ if (err instanceof IdempotencyInProgressError) {
+ return { result: { success: false, errorCode: 'resource-exhausted' }, fromCache: false }
+ }
+ throw err
+ })
+
+ return cached
+}
+
+export const mergeDuplicates = onCall(
+ { region: 'asia-southeast1', enforceAppCheck: true, maxInstances: 100 },
+ async (req: CallableRequest) => {
+ if (!req.auth) throw new HttpsError('unauthenticated', 'sign-in required')
+ const claims = req.auth.token as Record | null
+ if (!claims) throw new HttpsError('unauthenticated', 'token required')
+ if (claims.role !== 'municipal_admin' && claims.role !== 'provincial_superadmin') {
+ throw new HttpsError('permission-denied', 'municipal_admin or provincial_superadmin required')
+ }
+ if (claims.active !== true) {
+ throw new HttpsError('permission-denied', 'account is not active')
+ }
+ if (claims.role === 'municipal_admin' && claims.municipalityId === undefined) {
+ throw new HttpsError('permission-denied', 'municipalityId missing from token claims')
+ }
+
+ const parsed = inputSchema.safeParse(req.data)
+ if (!parsed.success) throw new HttpsError('invalid-argument', 'malformed payload')
+
+ const rl = await checkRateLimit(adminDb, {
+ key: `mergeDuplicates:${req.auth.uid}`,
+ limit: 60,
+ windowSeconds: 60,
+ now: Timestamp.now(),
+ })
+ if (!rl.allowed) {
+ throw new HttpsError('resource-exhausted', 'rate limit', {
+ retryAfterSeconds: rl.retryAfterSeconds,
+ })
+ }
+
+ try {
+ const correlationId = crypto.randomUUID()
+ const actorClaims: MergeDuplicatesActor['claims'] = {
+ role: claims.role as UserRole,
+ active: claims.active as boolean,
+ auth_time: claims.auth_time as number,
+ }
+ if (typeof claims.municipalityId === 'string') {
+ actorClaims.municipalityId = claims.municipalityId
+ }
+ return await mergeDuplicatesCore(
+ adminDb,
+ parsed.data,
+ {
+ uid: req.auth.uid,
+ claims: actorClaims,
+ },
+ correlationId,
+ )
+ } catch (err: unknown) {
+ if (err instanceof BantayogError) {
+ throw bantayogErrorToHttps(err)
+ }
+ throw err
+ }
+ },
+)
diff --git a/functions/src/callables/shift-handoff.ts b/functions/src/callables/shift-handoff.ts
new file mode 100644
index 00000000..f07c3346
--- /dev/null
+++ b/functions/src/callables/shift-handoff.ts
@@ -0,0 +1,319 @@
+import { createHash, randomUUID } from 'node:crypto'
+import { Timestamp } from 'firebase-admin/firestore'
+import {
+ onCall,
+ type CallableRequest,
+ HttpsError,
+ type FunctionsErrorCode,
+} from 'firebase-functions/v2/https'
+import { z } from 'zod'
+import { adminDb } from '../admin-init.js'
+import { bantayogErrorToHttps } from './https-error.js'
+import { withIdempotency } from '../idempotency/guard.js'
+import { checkRateLimit } from '../services/rate-limit.js'
+import { BantayogError, logDimension, type ReportStatus } from '@bantayog/shared-validators'
+import { type UserRole } from '@bantayog/shared-types'
+
+interface ShiftHandoff {
+ fromUid: string
+ municipalityId: string
+ notes: string
+ activeIncidentIds: string[]
+ status: 'pending' | 'accepted'
+ createdAt: Timestamp
+ expiresAt: Timestamp
+ schemaVersion: number
+}
+
+const log = logDimension('shiftHandoff')
+
+const initiateSchema = z.object({
+ notes: z.string().max(2000),
+ idempotencyKey: z.uuid(),
+})
+
+const acceptSchema = z.object({
+ handoffId: z.string().min(1),
+ idempotencyKey: z.uuid(),
+})
+
+const ADMIN_ROLES: UserRole[] = ['municipal_admin', 'agency_admin', 'provincial_superadmin']
+const ACTIVE_REPORT_STATUSES: ReportStatus[] = ['assigned', 'acknowledged', 'en_route', 'on_scene']
+const ACTIVE_DISPATCH_STATUSES = ['accepted', 'acknowledged', 'en_route', 'on_scene']
+
+export interface HandoffActor {
+ uid: string
+ claims: { role: UserRole; municipalityId?: string; active: boolean; auth_time: number }
+}
+
+export type InitiateResult =
+ | { success: true; handoffId: string }
+ | { success: false; errorCode: string }
+
+export type AcceptResult = { success: true } | { success: false; errorCode: string }
+
+export async function initiateShiftHandoffCore(
+ db: FirebaseFirestore.Firestore,
+ input: z.infer,
+ actor: HandoffActor,
+ correlationId: string,
+): Promise {
+ if (!actor.claims.active) {
+ log({
+ severity: 'ERROR',
+ code: 'handoff.initiate.inactive',
+ message: 'Caller account is not active',
+ data: { uid: actor.uid, correlationId },
+ })
+ return { success: false, errorCode: 'permission-denied' }
+ }
+
+ const municipalityId = actor.claims.municipalityId
+ if (!municipalityId) {
+ log({
+ severity: 'ERROR',
+ code: 'handoff.initiate.missing_municipality',
+ message: 'municipalityId missing',
+ data: { uid: actor.uid, correlationId },
+ })
+ return { success: false, errorCode: 'permission-denied' }
+ }
+
+ const handoffId = createHash('sha256')
+ .update(`${actor.uid}:${input.idempotencyKey}`)
+ .digest('hex')
+ .slice(0, 20)
+
+ const result = await db.runTransaction(async (tx) => {
+ const existingRef = db.collection('shift_handoffs').doc(handoffId)
+ const existing = await tx.get(existingRef)
+ if (existing.exists) {
+ return { success: true as const, handoffId }
+ }
+
+ const [opsSnap, dispatchSnap] = await Promise.all([
+ db
+ .collection('report_ops')
+ .where('municipalityId', '==', municipalityId)
+ .where('status', 'in', ACTIVE_REPORT_STATUSES)
+ .get(),
+ db
+ .collection('dispatches')
+ .where('municipalityId', '==', municipalityId)
+ .where('status', 'in', ACTIVE_DISPATCH_STATUSES)
+ .get(),
+ ])
+
+ const activeIncidentIds = [
+ ...opsSnap.docs.map((d) => d.id),
+ ...dispatchSnap.docs.map((d) => d.id),
+ ]
+
+ const now = Timestamp.now()
+
+ tx.set(existingRef, {
+ fromUid: actor.uid,
+ municipalityId,
+ notes: input.notes,
+ activeIncidentIds,
+ status: 'pending',
+ createdAt: now,
+ expiresAt: Timestamp.fromMillis(now.toMillis() + 30 * 60 * 1000),
+ schemaVersion: 1,
+ })
+
+ log({
+ severity: 'INFO',
+ code: 'handoff.initiated',
+ message: `Shift handoff ${handoffId} created by ${actor.uid}`,
+ data: { handoffId, uid: actor.uid, correlationId },
+ })
+ return { success: true as const, handoffId }
+ })
+
+ return result
+}
+
+export async function acceptShiftHandoffCore(
+ db: FirebaseFirestore.Firestore,
+ input: z.infer,
+ actor: HandoffActor,
+ correlationId: string,
+): Promise {
+ if (!actor.claims.active) {
+ log({
+ severity: 'ERROR',
+ code: 'handoff.accept.inactive',
+ message: 'Caller account is not active',
+ data: { uid: actor.uid, correlationId },
+ })
+ return { success: false, errorCode: 'permission-denied' }
+ }
+
+ const { result: cached } = await withIdempotency, AcceptResult>(
+ db,
+ { key: `acceptShiftHandoff:${actor.uid}:${input.idempotencyKey}`, payload: input },
+ async () => {
+ return db.runTransaction(async (tx) => {
+ const snap = await tx.get(db.collection('shift_handoffs').doc(input.handoffId))
+ if (!snap.exists) return { success: false, errorCode: 'not-found' }
+
+ const handoff = snap.data() as ShiftHandoff | undefined
+ if (handoff === undefined) return { success: false, errorCode: 'not-found' }
+
+ if (
+ actor.claims.role === 'municipal_admin' &&
+ handoff.municipalityId !== actor.claims.municipalityId
+ ) {
+ log({
+ severity: 'ERROR',
+ code: 'handoff.accept.wrong_municipality',
+ message: `Municipality mismatch: ${handoff.municipalityId} vs ${actor.claims.municipalityId ?? 'undefined'}`,
+ data: { handoffId: input.handoffId, uid: actor.uid, correlationId },
+ })
+ return { success: false, errorCode: 'permission-denied' }
+ }
+
+ if (handoff.fromUid === actor.uid) {
+ return { success: false, errorCode: 'failed-precondition' }
+ }
+
+ if (handoff.expiresAt.toMillis() < Date.now()) {
+ return { success: false, errorCode: 'failed-precondition' }
+ }
+
+ if (handoff.status === 'accepted') return { success: true as const }
+
+ tx.update(snap.ref, {
+ status: 'accepted',
+ toUid: actor.uid,
+ acceptedAt: Timestamp.now(),
+ })
+
+ log({
+ severity: 'INFO',
+ code: 'handoff.accepted',
+ message: `Handoff ${input.handoffId} accepted by ${actor.uid}`,
+ data: { handoffId: input.handoffId, uid: actor.uid, correlationId },
+ })
+ return { success: true as const }
+ })
+ },
+ )
+
+ return cached
+}
+
+export const initiateShiftHandoff = onCall(
+ { region: 'asia-southeast1', enforceAppCheck: true, maxInstances: 100 },
+ async (req: CallableRequest) => {
+ if (!req.auth) throw new HttpsError('unauthenticated', 'sign-in required')
+ const claims = req.auth.token as Record | null
+ if (!claims) throw new HttpsError('unauthenticated', 'token required')
+ if (!ADMIN_ROLES.includes(claims.role as UserRole)) {
+ throw new HttpsError('permission-denied', 'admin role required')
+ }
+ if (claims.active !== true) {
+ throw new HttpsError('permission-denied', 'account is not active')
+ }
+ if (claims.role === 'municipal_admin' && claims.municipalityId === undefined) {
+ throw new HttpsError('permission-denied', 'municipalityId missing from token claims')
+ }
+
+ const parsed = initiateSchema.safeParse(req.data)
+ if (!parsed.success) throw new HttpsError('invalid-argument', parsed.error.message)
+
+ const rl = await checkRateLimit(adminDb, {
+ key: `initiateShiftHandoff:${req.auth.uid}`,
+ limit: 60,
+ windowSeconds: 60,
+ now: Timestamp.now(),
+ })
+ if (!rl.allowed) {
+ throw new HttpsError('resource-exhausted', 'rate limit', {
+ retryAfterSeconds: rl.retryAfterSeconds,
+ })
+ }
+
+ const correlationId = randomUUID()
+ const actor: HandoffActor = {
+ uid: req.auth.uid,
+ claims: {
+ role: claims.role as UserRole,
+ ...(claims.municipalityId !== undefined
+ ? { municipalityId: claims.municipalityId as string }
+ : {}),
+ active: claims.active as boolean,
+ auth_time: claims.auth_time as number,
+ },
+ }
+
+ try {
+ const result = await initiateShiftHandoffCore(adminDb, parsed.data, actor, correlationId)
+ if (!result.success)
+ throw new HttpsError(result.errorCode as FunctionsErrorCode, 'initiate failed')
+ return result
+ } catch (err: unknown) {
+ if (err instanceof HttpsError) throw err
+ if (err instanceof BantayogError) throw bantayogErrorToHttps(err)
+ throw err
+ }
+ },
+)
+
+export const acceptShiftHandoff = onCall(
+ { region: 'asia-southeast1', enforceAppCheck: true, maxInstances: 100 },
+ async (req: CallableRequest) => {
+ if (!req.auth) throw new HttpsError('unauthenticated', 'sign-in required')
+ const claims = req.auth.token as Record | null
+ if (!claims) throw new HttpsError('unauthenticated', 'token required')
+ if (!ADMIN_ROLES.includes(claims.role as UserRole)) {
+ throw new HttpsError('permission-denied', 'admin role required')
+ }
+ if (claims.active !== true) {
+ throw new HttpsError('permission-denied', 'account is not active')
+ }
+ if (claims.role === 'municipal_admin' && claims.municipalityId === undefined) {
+ throw new HttpsError('permission-denied', 'municipalityId missing from token claims')
+ }
+
+ const parsed = acceptSchema.safeParse(req.data)
+ if (!parsed.success) throw new HttpsError('invalid-argument', parsed.error.message)
+
+ const rl = await checkRateLimit(adminDb, {
+ key: `acceptShiftHandoff:${req.auth.uid}`,
+ limit: 60,
+ windowSeconds: 60,
+ now: Timestamp.now(),
+ })
+ if (!rl.allowed) {
+ throw new HttpsError('resource-exhausted', 'rate limit', {
+ retryAfterSeconds: rl.retryAfterSeconds,
+ })
+ }
+
+ const correlationId = randomUUID()
+ const actor: HandoffActor = {
+ uid: req.auth.uid,
+ claims: {
+ role: claims.role as UserRole,
+ ...(claims.municipalityId !== undefined
+ ? { municipalityId: claims.municipalityId as string }
+ : {}),
+ active: claims.active as boolean,
+ auth_time: claims.auth_time as number,
+ },
+ }
+
+ try {
+ const result = await acceptShiftHandoffCore(adminDb, parsed.data, actor, correlationId)
+ if (!result.success)
+ throw new HttpsError(result.errorCode as FunctionsErrorCode, 'accept failed')
+ return result
+ } catch (err: unknown) {
+ if (err instanceof HttpsError) throw err
+ if (err instanceof BantayogError) throw bantayogErrorToHttps(err)
+ throw err
+ }
+ },
+)
diff --git a/functions/src/idempotency/guard.ts b/functions/src/idempotency/guard.ts
index d9f4b062..d962884c 100644
--- a/functions/src/idempotency/guard.ts
+++ b/functions/src/idempotency/guard.ts
@@ -13,6 +13,13 @@ export class IdempotencyMismatchError extends Error {
}
}
+export class IdempotencyInProgressError extends Error {
+ constructor(public readonly key: string) {
+ super(`IN_PROGRESS: idempotency key "${key}" is currently being processed by a concurrent call`)
+ this.name = 'IdempotencyInProgressError'
+ }
+}
+
interface WithIdempotencyOptions {
key: string
payload: TPayload
@@ -35,6 +42,7 @@ export async function withIdempotency(
key: opts.key,
payloadHash: hash,
firstSeenAt: now(),
+ processing: true,
})
return null
}
@@ -42,10 +50,14 @@ export async function withIdempotency(
payloadHash: string
firstSeenAt: number
resultPayload?: TResult
+ processing?: boolean
}
if (data.payloadHash !== hash) {
throw new IdempotencyMismatchError(opts.key, data.firstSeenAt)
}
+ if (data.processing && !('resultPayload' in data)) {
+ throw new IdempotencyInProgressError(opts.key)
+ }
return (data.resultPayload ?? null) as TResult | null
})
@@ -54,6 +66,6 @@ export async function withIdempotency(
}
const result = await op()
- await keyRef.update({ resultPayload: result, completedAt: now() })
+ await keyRef.update({ resultPayload: result, processing: false, completedAt: now() })
return { result, fromCache: false }
}
diff --git a/functions/src/index.ts b/functions/src/index.ts
index 46fcb5a1..17896da9 100644
--- a/functions/src/index.ts
+++ b/functions/src/index.ts
@@ -1,6 +1,10 @@
// Cloud Functions v2 entry point.
export { setStaffClaims, suspendStaffAccount } from './auth/account-lifecycle.js'
-export { withIdempotency, IdempotencyMismatchError } from './idempotency/guard.js'
+export {
+ withIdempotency,
+ IdempotencyMismatchError,
+ IdempotencyInProgressError,
+} from './idempotency/guard.js'
export { requestUploadUrl } from './callables/request-upload-url.js'
export { verifyReport } from './callables/verify-report.js'
export { requestLookup } from './callables/request-lookup.js'
@@ -20,6 +24,9 @@ export { enterFieldMode, exitFieldMode } from './callables/enter-field-mode.js'
export { shareReport } from './callables/share-report.js'
export { addCommandChannelMessage } from './callables/add-command-channel-message.js'
export { borderAutoShareTrigger } from './triggers/border-auto-share.js'
+export { duplicateClusterTrigger } from './triggers/duplicate-cluster-trigger.js'
+export { mergeDuplicates } from './callables/merge-duplicates.js'
+export { initiateShiftHandoff, acceptShiftHandoff } from './callables/shift-handoff.js'
// onMediaFinalize is lazily instantiated to avoid triggering Firebase Functions v2
// storage import-time env checks (FIREBASE_CONFIG) during unit testing.
diff --git a/functions/src/scheduled/admin-operations-sweep.ts b/functions/src/scheduled/admin-operations-sweep.ts
index 9dbd87ea..35a979ef 100644
--- a/functions/src/scheduled/admin-operations-sweep.ts
+++ b/functions/src/scheduled/admin-operations-sweep.ts
@@ -29,22 +29,104 @@ export async function adminOperationsSweepCore(
const BATCH_SIZE = 50
for (let i = 0; i < toEscalate.length; i += BATCH_SIZE) {
const batch = toEscalate.slice(i, i + BATCH_SIZE)
- await Promise.all(
+ const results = await Promise.allSettled(
batch.map(async (d) => {
- await d.ref.update({ escalatedAt: deps.now.toMillis() })
+ await db.runTransaction(async (tx) => {
+ const latest = await tx.get(d.ref)
+ const latestData = latest.data()
+ if (latestData?.status === 'pending' && latestData.escalatedAt == null) {
+ tx.update(d.ref, { escalatedAt: deps.now.toMillis() })
+ log({
+ severity: 'INFO',
+ code: 'sweep.agency.escalated',
+ message: `Escalated agency request ${d.id}`,
+ })
+ } else {
+ log({
+ severity: 'INFO',
+ code: 'sweep.agency.skipped',
+ message: `Skipped agency request ${d.id}: status=${String(latestData?.status)}, escalatedAt=${String(latestData?.escalatedAt)}`,
+ })
+ }
+ })
+ }),
+ )
+ results.forEach((result, idx) => {
+ if (result.status === 'rejected') {
+ const doc = batch[idx]
+ if (!doc) return
log({
- severity: 'INFO',
- code: 'sweep.agency.escalated',
- message: `Escalated agency request ${d.id}`,
+ severity: 'ERROR',
+ code: 'sweep.agency.escalate_failed',
+ message: `Failed to escalate agency request ${doc.id}: ${String(result.reason)}`,
+ data: { docId: doc.id, error: String(result.reason) },
+ })
+ }
+ })
+ }
+
+ // Shift handoff escalation: pending > 30min with no escalatedAt
+ const pendingHandoffs = await db
+ .collection('shift_handoffs')
+ .where('status', '==', 'pending')
+ .where('createdAt', '<', cutoff)
+ .where('escalatedAt', '==', null)
+ .get()
+
+ const toEscalateHandoffs = pendingHandoffs.docs
+ for (let i = 0; i < toEscalateHandoffs.length; i += BATCH_SIZE) {
+ const batch = toEscalateHandoffs.slice(i, i + BATCH_SIZE)
+ const results = await Promise.allSettled(
+ batch.map(async (d) => {
+ await db.runTransaction(async (tx) => {
+ const latest = await tx.get(d.ref)
+ const latestData = latest.data()
+ if (latestData?.status === 'pending' && latestData.escalatedAt == null) {
+ tx.update(d.ref, { escalatedAt: deps.now.toMillis() })
+ log({
+ severity: 'INFO',
+ code: 'sweep.handoff.escalated',
+ message: `Escalated handoff ${d.id}`,
+ })
+ } else {
+ log({
+ severity: 'INFO',
+ code: 'sweep.handoff.skipped',
+ message: `Skipped handoff ${d.id}: status=${String(latestData?.status)}, escalatedAt=${String(latestData?.escalatedAt)}`,
+ })
+ }
})
}),
)
+ results.forEach((result, idx) => {
+ if (result.status === 'rejected') {
+ const doc = batch[idx]
+ if (!doc) return
+ log({
+ severity: 'ERROR',
+ code: 'sweep.handoff.escalate_failed',
+ message: `Failed to escalate handoff ${doc.id}: ${String(result.reason)}`,
+ data: { docId: doc.id, error: String(result.reason) },
+ })
+ }
+ })
}
}
export const adminOperationsSweep = onSchedule(
{ schedule: 'every 10 minutes', region: 'asia-southeast1', timeoutSeconds: 120 },
async () => {
- await adminOperationsSweepCore(adminDb, { now: Timestamp.now() })
+ try {
+ await adminOperationsSweepCore(adminDb, { now: Timestamp.now() })
+ } catch (err: unknown) {
+ const message = err instanceof Error ? err.message : String(err)
+ log({
+ severity: 'ERROR',
+ code: 'sweep.failed',
+ message: `Admin operations sweep failed: ${message}`,
+ data: { error: message },
+ })
+ throw err
+ }
},
)
diff --git a/functions/src/triggers/duplicate-cluster-trigger.ts b/functions/src/triggers/duplicate-cluster-trigger.ts
new file mode 100644
index 00000000..58af6d06
--- /dev/null
+++ b/functions/src/triggers/duplicate-cluster-trigger.ts
@@ -0,0 +1,148 @@
+import { onDocumentCreated } from 'firebase-functions/v2/firestore'
+import * as ngeohash from 'ngeohash'
+import * as turf from '@turf/turf'
+import type { QueryDocumentSnapshot } from 'firebase-admin/firestore'
+import { adminDb } from '../admin-init.js'
+import { logDimension } from '@bantayog/shared-validators'
+
+const log = logDimension('duplicateClusterTrigger')
+
+const NON_TERMINAL_STATUSES = [
+ 'new',
+ 'awaiting_verify',
+ 'verified',
+ 'assigned',
+ 'acknowledged',
+ 'en_route',
+ 'on_scene',
+ 'reopened',
+]
+const TWO_H_MS = 2 * 60 * 60 * 1000
+const PROXIMITY_METERS = 200
+const BATCH_CAP = 250
+
+export async function duplicateClusterTriggerCore(
+ db: FirebaseFirestore.Firestore,
+ snap: QueryDocumentSnapshot,
+): Promise {
+ const data = snap.data()
+ const {
+ locationGeohash,
+ municipalityId,
+ reportType,
+ createdAt,
+ duplicateClusterId: existingCluster,
+ } = data
+
+ if (typeof locationGeohash !== 'string' || locationGeohash.length < 6) return
+ if (typeof municipalityId !== 'string' || municipalityId.length === 0) return
+ if (typeof reportType !== 'string' || reportType.length === 0) return
+
+ if (typeof createdAt !== 'number' || !Number.isFinite(createdAt)) return
+ const nowMs = createdAt
+ const cutoff = nowMs - TWO_H_MS
+
+ const candidates = await db
+ .collection('report_ops')
+ .where('municipalityId', '==', municipalityId)
+ .where('reportType', '==', reportType)
+ .where('status', 'in', NON_TERMINAL_STATUSES)
+ .where('createdAt', '>', cutoff)
+ .orderBy('createdAt', 'desc')
+ .limit(300)
+ .get()
+
+ const prefix = locationGeohash.slice(0, 6)
+ const neighborPrefixes = new Set([prefix, ...ngeohash.neighbors(prefix)])
+ let triggerPoint: { latitude: number; longitude: number }
+ try {
+ triggerPoint = ngeohash.decode(locationGeohash)
+ } catch {
+ return
+ }
+ const triggerCoord = turf.point([triggerPoint.longitude, triggerPoint.latitude])
+
+ const nearby = candidates.docs.filter((d) => {
+ if (d.id === snap.id) return false
+ const gh = d.data().locationGeohash
+ if (typeof gh !== 'string' || gh.length < 6) return false
+ if (!neighborPrefixes.has(gh.slice(0, 6))) return false
+ try {
+ const pt = ngeohash.decode(gh)
+ const dist = turf.distance(turf.point([pt.longitude, pt.latitude]), triggerCoord, {
+ units: 'meters',
+ })
+ return dist <= PROXIMITY_METERS
+ } catch {
+ return false
+ }
+ })
+
+ if (nearby.length === 0) return
+
+ const normalizeClusterId = (value: unknown): string | undefined => {
+ if (typeof value !== 'string') return undefined
+ const trimmed = value.trim()
+ return trimmed.length > 0 ? trimmed : undefined
+ }
+
+ const normalizedExistingCluster = normalizeClusterId(existingCluster)
+
+ const existingClusterFromNearby = nearby
+ .map((d): unknown => d.data().duplicateClusterId)
+ .map(normalizeClusterId)
+ .find((value): value is string => value !== undefined)
+
+ const clusterId = normalizedExistingCluster ?? existingClusterFromNearby ?? crypto.randomUUID()
+
+ const needsUpdate = nearby.filter((d) => d.data().duplicateClusterId !== clusterId)
+ const maxNearbyUpdates = existingCluster !== clusterId ? BATCH_CAP - 1 : BATCH_CAP
+ const toUpdate = needsUpdate.slice(0, maxNearbyUpdates)
+
+ if (needsUpdate.length > maxNearbyUpdates) {
+ log({
+ severity: 'WARNING',
+ code: 'dup.cluster.truncated',
+ message: `Truncated duplicate cluster from ${String(needsUpdate.length)} to ${String(maxNearbyUpdates)} docs`,
+ data: { reportId: snap.id, nearbyCount: needsUpdate.length, batchCap: maxNearbyUpdates },
+ })
+ }
+
+ if (toUpdate.length === 0 && existingCluster === clusterId) return
+
+ const batch = db.batch()
+ if (existingCluster !== clusterId) {
+ batch.update(snap.ref, { duplicateClusterId: clusterId })
+ }
+ for (const d of toUpdate) {
+ batch.update(d.ref, { duplicateClusterId: clusterId })
+ }
+ await batch.commit()
+
+ const assignedCount = toUpdate.length + (existingCluster !== clusterId ? 1 : 0)
+ log({
+ severity: 'INFO',
+ code: 'dup.cluster.assigned',
+ message: `Assigned ${String(assignedCount)} docs to cluster ${clusterId}`,
+ })
+}
+
+export const duplicateClusterTrigger = onDocumentCreated(
+ { document: 'report_ops/{reportId}', region: 'asia-southeast1' },
+ async (event) => {
+ const snap = event.data
+ if (!snap) return
+ try {
+ await duplicateClusterTriggerCore(adminDb, snap)
+ } catch (err: unknown) {
+ const message = err instanceof Error ? err.message : String(err)
+ log({
+ severity: 'ERROR',
+ code: 'dup.cluster.trigger_failed',
+ message: `Duplicate cluster trigger failed for ${event.params.reportId}: ${message}`,
+ data: { reportId: event.params.reportId, error: message },
+ })
+ throw err
+ }
+ },
+)
diff --git a/infra/firebase/firestore.indexes.json b/infra/firebase/firestore.indexes.json
index 11fafb86..e7aa4760 100644
--- a/infra/firebase/firestore.indexes.json
+++ b/infra/firebase/firestore.indexes.json
@@ -243,6 +243,15 @@
{ "fieldPath": "escalatedAt", "order": "ASCENDING" }
]
},
+ {
+ "collectionGroup": "shift_handoffs",
+ "queryScope": "COLLECTION",
+ "fields": [
+ { "fieldPath": "status", "order": "ASCENDING" },
+ { "fieldPath": "escalatedAt", "order": "ASCENDING" },
+ { "fieldPath": "createdAt", "order": "ASCENDING" }
+ ]
+ },
{
"collectionGroup": "shift_handoffs",
"queryScope": "COLLECTION",
@@ -287,6 +296,35 @@
{ "fieldPath": "zoneType", "order": "ASCENDING" },
{ "fieldPath": "deletedAt", "order": "ASCENDING" }
]
+ },
+ {
+ "collectionGroup": "report_ops",
+ "queryScope": "COLLECTION",
+ "fields": [
+ { "fieldPath": "municipalityId", "order": "ASCENDING" },
+ { "fieldPath": "status", "order": "ASCENDING" },
+ { "fieldPath": "createdAt", "order": "DESCENDING" }
+ ]
+ },
+ {
+ "collectionGroup": "report_ops",
+ "queryScope": "COLLECTION",
+ "fields": [
+ { "fieldPath": "municipalityId", "order": "ASCENDING" },
+ { "fieldPath": "reportType", "order": "ASCENDING" },
+ { "fieldPath": "status", "order": "ASCENDING" },
+ { "fieldPath": "createdAt", "order": "DESCENDING" }
+ ]
+ },
+ {
+ "collectionGroup": "report_ops",
+ "queryScope": "COLLECTION",
+ "fields": [
+ { "fieldPath": "municipalityId", "order": "ASCENDING" },
+ { "fieldPath": "reportType", "order": "ASCENDING" },
+ { "fieldPath": "status", "order": "ASCENDING" },
+ { "fieldPath": "createdAt", "order": "ASCENDING" }
+ ]
}
],
"fieldOverrides": []
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 93b3e1fc..20fa05f3 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -103,6 +103,9 @@ importers:
'@testing-library/react':
specifier: ^16.0.0
version: 16.3.2(@testing-library/dom@10.4.1)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)
+ '@testing-library/user-event':
+ specifier: ^14.5.2
+ version: 14.6.1(@testing-library/dom@10.4.1)
'@types/react':
specifier: ^19.2.14
version: 19.2.14
diff --git a/scripts/agent-gate-stage1.sh b/scripts/agent-gate-stage1.sh
new file mode 100755
index 00000000..4e75bd00
--- /dev/null
+++ b/scripts/agent-gate-stage1.sh
@@ -0,0 +1,72 @@
+#!/usr/bin/env bash
+# Usage: scripts/agent-gate-stage1.sh
+# Exit 0 = PASS
+# Exit 1 = FAIL (hard fail — retry or escalate)
+# Exit 2 = FAIL-OPEN (discovered_required_files populated — Claude Code decides)
+set -uo pipefail
+SLUG="${1:?Usage: agent-gate-stage1.sh }"
+WORKTREE="${2:?Usage: agent-gate-stage1.sh }"
+PLANS_DIR="$WORKTREE/.claude/plans"
+TASKS_DIR="docs/agent-tasks"
+fail() { echo "STAGE1 FAIL: $*" >&2; exit 1; }
+fail_open() { echo "STAGE1 FAIL-OPEN: $*" >&2; exit 2; }
+pass() { echo "STAGE1 PASS: $SLUG"; exit 0; }
+# 1. Companion JSON
+COMPANION_JSON=$(find "$TASKS_DIR" -name "*-${SLUG}.json" 2>/dev/null 2>&1 | head -1)
+# Workaround: find command was slightly off in the plan.
+COMPANION_JSON=$(ls $TASKS_DIR/*-$SLUG.json 2>/dev/null | head -1)
+[[ -z "$COMPANION_JSON" ]] && fail "companion JSON not found for slug: $SLUG"
+jq empty "$COMPANION_JSON" 2>/dev/null || fail "companion JSON is invalid"
+# 2. Result JSON
+RESULT_JSON="$PLANS_DIR/exxeed-${SLUG}-result.json"
+[[ ! -f "$RESULT_JSON" ]] && fail "result JSON not found: $RESULT_JSON"
+jq empty "$RESULT_JSON" 2>/dev/null || fail "result JSON is invalid"
+# 3. Re-run verification command
+VERIFICATION_CMD=$(jq -r '.verification_command' "$COMPANION_JSON")
+BASE_COMMIT=$(jq -r '.base_commit' "$COMPANION_JSON")
+[[ -z "$BASE_COMMIT" || "$BASE_COMMIT" == "null" ]] && \
+ fail "base_commit not set in companion JSON"
+echo "Stage 1: Re-running verification: $VERIFICATION_CMD"
+if ! (cd "$WORKTREE" && eval "$VERIFICATION_CMD" 2>&1); then
+ CLAIMED=$(jq -r '.verification_exit_code' "$RESULT_JSON")
+ fail "verification command failed (agent claimed exit_code=$CLAIMED)"
+fi
+# 4. File allowlist check against pinned base_commit
+ALLOWED=$(jq -r \
+ '(.allowed_files.create // [])[], (.allowed_files.modify // [])[], (.allowed_files.delete // [])[]' \
+ "$COMPANION_JSON" 2>/dev/null | sort -u)
+ACTUAL=$(git -C "$WORKTREE" diff --name-only "$BASE_COMMIT" 2>/dev/null | sort)
+DISALLOWED=$(comm -23 <(echo "$ACTUAL") <(echo "$ALLOWED") | grep -v '^$' || true)
+if [[ -n "$DISALLOWED" ]]; then
+ fail "files changed outside allowed_files:
+$DISALLOWED"
+fi
+# 5. discovered_required_files — fail-open
+DISCOVERED=$(jq '.discovered_required_files | length' "$RESULT_JSON" 2>/dev/null || echo "0")
+if [[ "$DISCOVERED" -gt 0 ]]; then
+ FILES=$(jq -r '.discovered_required_files[]' "$RESULT_JSON")
+ fail_open "agent found required files outside allowed list — update companion JSON and respawn:
+$FILES"
+fi
+# 6. No unresolved open items
+FAILED_ITEMS=$(jq -r '.open_items[] | select(contains("❌"))' "$RESULT_JSON" 2>/dev/null || true)
+[[ -n "$FAILED_ITEMS" ]] && fail "open_items contains unresolved ❌ items:
+$FAILED_ITEMS"
+# 7. modifies_lockfile consistency with dag.json
+DAG_JSON="$TASKS_DIR/$(ls $TASKS_DIR | grep 'dag.json' | head -1)"
+if [[ -f "$DAG_JSON" ]]; then
+ COMPANION_LF=$(jq -r '.modifies_lockfile' "$COMPANION_JSON")
+ DAG_LF=$(jq -r --arg s "$SLUG" '.[$s].modifies_lockfile // "null"' "$DAG_JSON")
+ [[ "$COMPANION_LF" != "$DAG_LF" ]] && \
+ fail "modifies_lockfile mismatch: companion=$COMPANION_LF dag=$DAG_LF"
+fi
+# 8. blocks/blocked_by symmetry
+while IFS= read -r blocked_slug; do
+ [[ -z "$blocked_slug" ]] && continue
+ BLOCKED_JSON=$(ls $TASKS_DIR/*-$blocked_slug.json 2>/dev/null | head -1)
+ [[ -z "$BLOCKED_JSON" ]] && continue
+ if ! jq -e --arg s "$SLUG" '.blocked_by[] | select(. == $s)' "$BLOCKED_JSON" &>/dev/null; then
+ fail "asymmetric edge: $SLUG.blocks has $blocked_slug, but $blocked_slug.blocked_by missing $SLUG"
+ fi
+done < <(jq -r '.blocks[]? // empty' "$COMPANION_JSON" 2>/dev/null)
+pass
diff --git a/scripts/agent-gate-stage2-combined.sh b/scripts/agent-gate-stage2-combined.sh
new file mode 100755
index 00000000..6fa2a00d
--- /dev/null
+++ b/scripts/agent-gate-stage2-combined.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+# Stage 2 Run B — full combined staging branch quality gate.
+# Checks out staging-branch, runs turbo affected, all rules tests, secrets, lockfile.
+set -uo pipefail
+STAGING_BRANCH="${1:?Usage: agent-gate-stage2-combined.sh [repo-path]}"
+REPO="${2:-.}"
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+fail() { echo "STAGE2 FAIL (Run B): $*" >&2; exit 1; }
+cd "$REPO"
+git checkout "$STAGING_BRANCH" || fail "cannot checkout $STAGING_BRANCH"
+echo "Stage 2 Run B: $STAGING_BRANCH"
+# Turbo affected — covers all changed packages
+echo "→ turbo lint typecheck test --affected"
+npx turbo run lint typecheck test --affected || fail "turbo affected"
+# All Firestore/RTDB/Storage rules in emulator
+echo "→ test:rules:firestore (combined)"
+firebase emulators:exec --only firestore,database,storage \
+ "pnpm --filter @bantayog/functions run test:rules:firestore" || fail "test:rules:firestore"
+# Secrets scan across entire staging branch diff
+echo "→ check-secrets (all)"
+"$SCRIPT_DIR/check-secrets.sh" all "$REPO" || fail "check-secrets"
+# Lockfile integrity
+echo "→ check-lockfile-integrity"
+"$SCRIPT_DIR/check-lockfile-integrity.sh" "$REPO" || fail "check-lockfile-integrity"
+echo "STAGE2 PASS (Run B): $STAGING_BRANCH"
diff --git a/scripts/agent-gate-stage2.sh b/scripts/agent-gate-stage2.sh
new file mode 100755
index 00000000..fd5f141c
--- /dev/null
+++ b/scripts/agent-gate-stage2.sh
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+# Usage: scripts/agent-gate-stage2.sh
+# Stage 2 Run A — per-task quality gate.
+# Emulator tests only run for @bantayog/functions tasks.
+set -uo pipefail
+FILTER="${1:?Usage: agent-gate-stage2.sh }"
+WORKTREE="${2:?Usage: agent-gate-stage2.sh }"
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
+BASELINES="$REPO_ROOT/.lint-baselines.json"
+fail() { echo "STAGE2 FAIL (Run A): $*" >&2; exit 1; }
+[[ ! -f "$BASELINES" ]] && fail ".lint-baselines.json not found — run scripts/generate-lint-baselines.sh"
+MAX_WARNINGS=$(jq -r --arg f "$FILTER" '.[$f] // 0' "$BASELINES" 2>/dev/null || echo "0")
+echo "Stage 2 Run A: filter=$FILTER worktree=$WORKTREE max_warnings=$MAX_WARNINGS"
+cd "$WORKTREE"
+# Lint
+echo "→ lint"
+pnpm --filter "$FILTER" lint -- --max-warnings="$MAX_WARNINGS" || fail "lint"
+# Typecheck
+echo "→ typecheck"
+pnpm --filter "$FILTER" typecheck || fail "typecheck"
+# Unit tests with coverage
+echo "→ test"
+pnpm --filter "$FILTER" test -- --coverage || fail "test"
+# Rules tests — only for functions (serialized by caller to avoid emulator port collisions)
+if [[ "$FILTER" == *"functions"* ]]; then
+ echo "→ test:rules:firestore (emulator)"
+ firebase emulators:exec --only firestore,database,storage \
+ "pnpm --filter $FILTER run test:rules:firestore" || fail "test:rules:firestore"
+fi
+# Check scripts
+echo "→ check-no-any"
+"$SCRIPT_DIR/check-no-any.sh" "$FILTER" "$WORKTREE" || fail "check-no-any"
+echo "→ check-no-empty-catch"
+"$SCRIPT_DIR/check-no-empty-catch.sh" "$FILTER" "$WORKTREE" || fail "check-no-empty-catch"
+echo "→ check-secrets"
+"$SCRIPT_DIR/check-secrets.sh" "$FILTER" "$WORKTREE" || fail "check-secrets"
+echo "→ check-lockfile-integrity"
+"$SCRIPT_DIR/check-lockfile-integrity.sh" "$WORKTREE" || fail "check-lockfile-integrity"
+echo "STAGE2 PASS (Run A): $FILTER"
diff --git a/scripts/check-lockfile-integrity.sh b/scripts/check-lockfile-integrity.sh
new file mode 100755
index 00000000..f7203702
--- /dev/null
+++ b/scripts/check-lockfile-integrity.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+# Usage: check-lockfile-integrity.sh [worktree-path]
+# Fails if pnpm-lock.yaml is not in sync with package.json files.
+set -uo pipefail
+WORKTREE="${1:-.}"
+cd "$WORKTREE"
+if ! pnpm install --frozen-lockfile --lockfile-only 2>/dev/null; then
+ echo "FAIL: pnpm-lock.yaml is out of sync with package.json"
+ exit 1
+fi
+echo "PASS: pnpm-lock.yaml integrity check"
diff --git a/scripts/check-no-any.sh b/scripts/check-no-any.sh
new file mode 100755
index 00000000..e706de44
--- /dev/null
+++ b/scripts/check-no-any.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+# Usage: check-no-any.sh
+# Fails if TypeScript source contains `: any` or `as any` patterns.
+set -uo pipefail
+FILTER="${1:?Usage: check-no-any.sh }"
+WORKTREE="${2:?Usage: check-no-any.sh }"
+SRC_DIR=""
+while IFS= read -r pkg_json; do
+ pkg_name=$(jq -r '.name' "$pkg_json" 2>/dev/null)
+ if [[ "$pkg_name" == "$FILTER" ]]; then
+ SRC_DIR="$(dirname "$pkg_json")/src"
+ break
+ fi
+done < <(find "$WORKTREE" -name "package.json" -not -path "*/node_modules/*" -not -path "*/.git/*")
+if [[ -z "$SRC_DIR" || ! -d "$SRC_DIR" ]]; then
+ if [[ "$FILTER" == "all" ]]; then
+ SRC_DIR="$WORKTREE"
+ else
+ echo "PASS: no src dir for $FILTER (skipping)"
+ exit 0
+ fi
+fi
+MATCHES=$(grep -rn ": any\b\|as any\b" --include="*.ts" --include="*.tsx" "$SRC_DIR" \
+ | grep -v "// eslint-disable\|catch.*: unknown\|catch {" || true)
+if [[ -n "$MATCHES" ]]; then
+ echo "FAIL: 'any' types found in $FILTER:"
+ echo "$MATCHES"
+ exit 1
+fi
+echo "PASS: no 'any' types in $FILTER"
diff --git a/scripts/check-no-empty-catch.sh b/scripts/check-no-empty-catch.sh
new file mode 100755
index 00000000..aca496dc
--- /dev/null
+++ b/scripts/check-no-empty-catch.sh
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+# Usage: check-no-empty-catch.sh
+# Fails if TypeScript source contains empty catch blocks: catch { } or catch (e) { }
+set -uo pipefail
+FILTER="${1:?Usage: check-no-empty-catch.sh }"
+WORKTREE="${2:?Usage: check-no-empty-catch.sh }"
+SRC_DIR=""
+while IFS= read -r pkg_json; do
+ pkg_name=$(jq -r '.name' "$pkg_json" 2>/dev/null)
+ if [[ "$pkg_name" == "$FILTER" ]]; then
+ SRC_DIR="$(dirname "$pkg_json")/src"
+ break
+ fi
+done < <(find "$WORKTREE" -name "package.json" -not -path "*/node_modules/*" -not -path "*/.git/*")
+if [[ -z "$SRC_DIR" || ! -d "$SRC_DIR" ]]; then
+ if [[ "$FILTER" == "all" ]]; then
+ SRC_DIR="$WORKTREE"
+ else
+ echo "PASS: no src dir for $FILTER (skipping)"
+ exit 0
+ fi
+fi
+# Match catch blocks with empty bodies (whitespace only between braces)
+# Excludes lines with comments explaining the intentional empty catch
+MATCHES=$(grep -rn "catch\s*[({][^)]*[)}]\s*{\s*}" --include="*.ts" --include="*.tsx" "$SRC_DIR" \
+ | grep -v "\/\/ intentional\|\/\/ transaction contention\|\/\/ fire-and-forget" || true)
+if [[ -n "$MATCHES" ]]; then
+ echo "FAIL: empty catch blocks found in $FILTER:"
+ echo "$MATCHES"
+ exit 1
+fi
+echo "PASS: no empty catch blocks in $FILTER"
diff --git a/scripts/check-secrets.sh b/scripts/check-secrets.sh
new file mode 100755
index 00000000..cc3e7e5b
--- /dev/null
+++ b/scripts/check-secrets.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+# Usage: check-secrets.sh
+# Scans for common secret patterns (API keys, private keys, etc.)
+set -uo pipefail
+FILTER="${1:?Usage: check-secrets.sh }"
+WORKTREE="${2:?Usage: check-secrets.sh }"
+PATTERNS=(
+ "AIza[0-9A-Za-z\\-_]{35}" # Google API Key
+ "-----BEGIN (RSA|EC|PRIVATE) KEY-----"
+ "\"[a-zA-Z0-9]{32,}\"" # Generic long hex/base64 strings
+ "xox[bp]-[0-9]{12}" # Slack tokens
+ "sqp_[a-f0-9]{40}" # SonarQube tokens
+)
+SCAN_DIR=""
+if [[ "$FILTER" == "all" ]]; then
+ SCAN_DIR="$WORKTREE"
+else
+ while IFS= read -r pkg_json; do
+ pkg_name=$(jq -r '.name' "$pkg_json" 2>/dev/null)
+ if [[ "$pkg_name" == "$FILTER" ]]; then
+ SCAN_DIR="$(dirname "$pkg_json")"
+ break
+ fi
+ done < <(find "$WORKTREE" -name "package.json" -not -path "*/node_modules/*" -not -path "*/.git/*")
+fi
+if [[ -z "$SCAN_DIR" ]]; then
+ echo "PASS: directory not found for $FILTER (skipping)"
+ exit 0
+fi
+FOUND=0
+for pattern in "${PATTERNS[@]}"; do
+ MATCHES=$(grep -rnE "$pattern" --include="*.ts" --include="*.tsx" --include="*.js" \
+ --include="*.json" --exclude-dir=node_modules --exclude-dir=.git "$SCAN_DIR" \
+ | grep -v "\.env\.example\|test\|spec\|mock\|fixture\|// " || true)
+ if [[ -n "$MATCHES" ]]; then
+ echo "FAIL: potential secret found (pattern: $pattern):"
+ echo "$MATCHES"
+ FOUND=1
+ fi
+done
+if [[ "$FOUND" -eq 1 ]]; then
+ exit 1
+fi
+echo "PASS: no secrets patterns found in $FILTER"
diff --git a/scripts/detect-flakes.sh b/scripts/detect-flakes.sh
new file mode 100755
index 00000000..817d76ab
--- /dev/null
+++ b/scripts/detect-flakes.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+# Usage: detect-flakes.sh
+# Runs the verification command 3 times.
+# Exits 0 with "flaky" if it passes >= 2/3 runs.
+# Exits 1 with "genuine_failure" if it passes <= 1/3 runs.
+set -uo pipefail
+CMD="${1:?Usage: detect-flakes.sh }"
+WORKTREE="${2:?Usage: detect-flakes.sh }"
+PASSES=0
+for i in 1 2 3; do
+ echo "Flake detection: run $i/3"
+ if (cd "$WORKTREE" && eval "$CMD" &>/dev/null); then
+ PASSES=$((PASSES + 1))
+ echo " run $i: PASS"
+ else
+ echo " run $i: FAIL"
+ fi
+done
+echo "Result: $PASSES/3 passed"
+if [[ "$PASSES" -ge 2 ]]; then
+ echo "flaky"
+ exit 0
+else
+ echo "genuine_failure"
+ exit 1
+fi
diff --git a/scripts/generate-lint-baselines.sh b/scripts/generate-lint-baselines.sh
new file mode 100755
index 00000000..2cf3fbf7
--- /dev/null
+++ b/scripts/generate-lint-baselines.sh
@@ -0,0 +1,32 @@
+#!/usr/bin/env bash
+declare -A PACKAGES=(
+ ["@bantayog/functions"]="functions"
+ ["@bantayog/shared-validators"]="packages/shared-validators"
+ ["@bantayog/shared-ui"]="packages/shared-ui"
+ ["@bantayog/shared-types"]="packages/shared-types"
+ ["@bantayog/shared-sms-parser"]="packages/shared-sms-parser"
+ ["@bantayog/shared-data"]="packages/shared-data"
+ ["@bantayog/citizen-pwa"]="apps/citizen-pwa"
+ ["@bantayog/admin-desktop"]="apps/admin-desktop"
+ ["@bantayog/responder-app"]="apps/responder-app"
+ ["@bantayog/e2e-tests"]="e2e-tests"
+)
+echo "{"
+first=true
+for pkg in "${!PACKAGES[@]}"; do
+ dir="${PACKAGES[$pkg]}"
+ if [[ ! -f "$dir/package.json" ]]; then
+ continue
+ fi
+ # Run lint and count lines that look like warnings.
+ # Use (grep -c ... || true) to safely handle zero matches.
+ count=$(pnpm --filter "$pkg" lint -- --format unix 2>&1 | (grep -c ": warning" || true))
+ count=$(echo "$count" | tr -d '[:space:]')
+ if [[ "$first" == "true" ]]; then
+ first=false
+ else
+ printf ",\n"
+ fi
+ printf ' "%s": %s' "$pkg" "$count"
+done
+printf "\n}\n"