116 lines
3.5 KiB
JavaScript
116 lines
3.5 KiB
JavaScript
|
|
import { db, getLastSync, setLastSync } from './db.js'
|
||
|
|
import { api } from './api.js'
|
||
|
|
|
||
|
|
let syncing = false
|
||
|
|
let sseSource = null
|
||
|
|
|
||
|
|
export async function syncPull() {
|
||
|
|
if (syncing) return
|
||
|
|
syncing = true
|
||
|
|
try {
|
||
|
|
const since = await getLastSync()
|
||
|
|
const data = await api.sync.pull(since)
|
||
|
|
|
||
|
|
await db.transaction('rw',
|
||
|
|
[db.event, db.attendees, db.departments, db.volunteers, db.shifts, db.volunteer_shifts],
|
||
|
|
async () => {
|
||
|
|
if (data.event) {
|
||
|
|
await db.event.put(data.event)
|
||
|
|
}
|
||
|
|
if (data.attendees?.length) {
|
||
|
|
await db.attendees.bulkPut(data.attendees)
|
||
|
|
// Purge hard-deleted records from Dexie
|
||
|
|
const deleted = data.attendees.filter(a => a.deleted_at).map(a => a.id)
|
||
|
|
if (deleted.length) await db.attendees.bulkDelete(deleted)
|
||
|
|
}
|
||
|
|
if (data.departments?.length) {
|
||
|
|
await db.departments.bulkPut(data.departments)
|
||
|
|
const deleted = data.departments.filter(d => d.deleted_at).map(d => d.id)
|
||
|
|
if (deleted.length) await db.departments.bulkDelete(deleted)
|
||
|
|
}
|
||
|
|
if (data.volunteers?.length) {
|
||
|
|
await db.volunteers.bulkPut(data.volunteers)
|
||
|
|
const deleted = data.volunteers.filter(v => v.deleted_at).map(v => v.id)
|
||
|
|
if (deleted.length) await db.volunteers.bulkDelete(deleted)
|
||
|
|
}
|
||
|
|
if (data.shifts?.length) {
|
||
|
|
await db.shifts.bulkPut(data.shifts)
|
||
|
|
const deleted = data.shifts.filter(s => s.deleted_at).map(s => s.id)
|
||
|
|
if (deleted.length) await db.shifts.bulkDelete(deleted)
|
||
|
|
}
|
||
|
|
if (data.volunteer_shifts?.length) {
|
||
|
|
await db.volunteer_shifts.bulkPut(data.volunteer_shifts)
|
||
|
|
}
|
||
|
|
}
|
||
|
|
)
|
||
|
|
|
||
|
|
await setLastSync(data.server_time)
|
||
|
|
return true
|
||
|
|
} catch (err) {
|
||
|
|
console.warn('Sync pull failed:', err.message)
|
||
|
|
return false
|
||
|
|
} finally {
|
||
|
|
syncing = false
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
export function startSSE(onEvent) {
|
||
|
|
if (sseSource) return
|
||
|
|
|
||
|
|
const connect = () => {
|
||
|
|
// Get token synchronously from Dexie — SSE doesn't support headers natively,
|
||
|
|
// so we pass the token as a query param (acceptable since it's same-origin HTTPS).
|
||
|
|
db.session.get(1).then(session => {
|
||
|
|
if (!session?.token) return
|
||
|
|
|
||
|
|
sseSource = new EventSource(`/api/sync/stream?token=${encodeURIComponent(session.token)}`)
|
||
|
|
|
||
|
|
sseSource.onmessage = (e) => {
|
||
|
|
try {
|
||
|
|
const payload = JSON.parse(e.data)
|
||
|
|
if (payload.event === 'checkin') {
|
||
|
|
// Apply check-in to local Dexie immediately
|
||
|
|
if (payload.data?.type === 'attendee' && payload.data?.attendee) {
|
||
|
|
db.attendees.put(payload.data.attendee)
|
||
|
|
}
|
||
|
|
if (payload.data?.type === 'volunteer' && payload.data?.volunteer) {
|
||
|
|
db.volunteers.put(payload.data.volunteer)
|
||
|
|
}
|
||
|
|
onEvent?.(payload)
|
||
|
|
}
|
||
|
|
} catch {}
|
||
|
|
}
|
||
|
|
|
||
|
|
sseSource.onerror = () => {
|
||
|
|
sseSource?.close()
|
||
|
|
sseSource = null
|
||
|
|
// Reconnect after 5s
|
||
|
|
setTimeout(connect, 5000)
|
||
|
|
}
|
||
|
|
})
|
||
|
|
}
|
||
|
|
|
||
|
|
connect()
|
||
|
|
}
|
||
|
|
|
||
|
|
export function stopSSE() {
|
||
|
|
sseSource?.close()
|
||
|
|
sseSource = null
|
||
|
|
}
|
||
|
|
|
||
|
|
// Poll for sync when online, with exponential backoff on failure
|
||
|
|
let syncInterval = null
|
||
|
|
|
||
|
|
export function startSyncLoop(intervalMs = 30000) {
|
||
|
|
if (syncInterval) return
|
||
|
|
syncInterval = setInterval(() => {
|
||
|
|
if (navigator.onLine) syncPull()
|
||
|
|
}, intervalMs)
|
||
|
|
window.addEventListener('online', () => syncPull())
|
||
|
|
}
|
||
|
|
|
||
|
|
export function stopSyncLoop() {
|
||
|
|
clearInterval(syncInterval)
|
||
|
|
syncInterval = null
|
||
|
|
}
|