feat(pwa): SW Pre-Cache-Orchestrator mit Fortschritt + Delta-Sync
All checks were successful
Build & Publish Docker Image / build-and-push (push) Successful in 1m19s
All checks were successful
Build & Publish Docker Image / build-and-push (push) Successful in 1m19s
Message-Handler für sync-start (initial: alle Rezepte cachen) und sync-check (delta: nur neue nachladen, gelöschte räumen). Vor dem Sync ein Storage-Quota-Check (<100 MB frei → abbrechen mit Fehler- Broadcast). Concurrency-Pool mit 4 parallelen Downloads pro Rezept (HTML, API-JSON, Bild). Fortschritt per postMessage an alle Clients, die über den sync-status-Store den SyncIndicator füllen. Das Cache-Manifest wird als JSON-Response unter /__cache-manifest__ im kochwas-meta Cache persistiert. Client triggert beim App-Start entweder sync-check (bereits kontrollierter SW) oder sync-start (erstmaliger SW-Install). Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -18,4 +18,16 @@ export async function registerServiceWorker(): Promise<void> {
|
||||
syncStatus.handle(data);
|
||||
}
|
||||
});
|
||||
|
||||
// Beim App-Start: wenn wir einen aktiven SW haben, frage ihn, ob er
|
||||
// neu synct (initial oder Delta).
|
||||
if (navigator.serviceWorker.controller) {
|
||||
navigator.serviceWorker.controller.postMessage({ type: 'sync-check' });
|
||||
} else {
|
||||
// Erste Session: SW kommt erst mit dem nächsten Reload zum Einsatz.
|
||||
// Beim nächsten Start triggert sync-check dann den Initial-Sync.
|
||||
navigator.serviceWorker.ready.then((reg) => {
|
||||
reg.active?.postMessage({ type: 'sync-start' });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
/// <reference lib="webworker" />
|
||||
import { build, files, version } from '$service-worker';
|
||||
import { resolveStrategy } from '$lib/sw/cache-strategy';
|
||||
import { diffManifest } from '$lib/sw/diff-manifest';
|
||||
|
||||
declare const self: ServiceWorkerGlobalScope;
|
||||
|
||||
@@ -76,4 +77,148 @@ async function staleWhileRevalidate(req: Request, cacheName: string): Promise<Re
|
||||
return hit ?? fetchPromise;
|
||||
}
|
||||
|
||||
const META_CACHE = 'kochwas-meta';
|
||||
const MANIFEST_KEY = '/__cache-manifest__';
|
||||
const PAGE_SIZE = 50; // /api/recipes/all limitiert auf 50
|
||||
const CONCURRENCY = 4;
|
||||
|
||||
type RecipeSummary = { id: number; image_path: string | null };
|
||||
|
||||
self.addEventListener('message', (event) => {
|
||||
const data = event.data as { type?: string } | undefined;
|
||||
if (!data) return;
|
||||
if (data.type === 'sync-start') {
|
||||
event.waitUntil(runSync(false));
|
||||
} else if (data.type === 'sync-check') {
|
||||
event.waitUntil(runSync(true));
|
||||
}
|
||||
});
|
||||
|
||||
async function runSync(isUpdate: boolean): Promise<void> {
|
||||
try {
|
||||
// Storage-Quota-Check vor dem Pre-Cache
|
||||
if (navigator.storage?.estimate) {
|
||||
const est = await navigator.storage.estimate();
|
||||
const freeBytes = (est.quota ?? 0) - (est.usage ?? 0);
|
||||
if (freeBytes < 100 * 1024 * 1024) {
|
||||
await broadcast({
|
||||
type: 'sync-error',
|
||||
message: `Nicht genug Speicher für Offline-Modus (${Math.round(freeBytes / 1024 / 1024)} MB frei)`
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const summaries = await fetchAllSummaries();
|
||||
const currentIds = summaries.map((s) => s.id);
|
||||
const cachedIds = await loadCachedIds();
|
||||
const { toAdd, toRemove } = diffManifest(currentIds, cachedIds);
|
||||
const worklist = isUpdate ? toAdd : currentIds; // initial: alles laden
|
||||
|
||||
await broadcast({ type: 'sync-start', total: worklist.length });
|
||||
|
||||
let done = 0;
|
||||
const tasks = worklist.map((id) => async () => {
|
||||
const summary = summaries.find((s) => s.id === id);
|
||||
await cacheRecipe(id, summary?.image_path ?? null);
|
||||
done += 1;
|
||||
await broadcast({ type: 'sync-progress', current: done, total: worklist.length });
|
||||
});
|
||||
await runPool(tasks, CONCURRENCY);
|
||||
|
||||
if (isUpdate && toRemove.length > 0) {
|
||||
await removeRecipes(toRemove);
|
||||
}
|
||||
|
||||
await saveCachedIds(currentIds);
|
||||
await broadcast({ type: 'sync-done', lastSynced: Date.now() });
|
||||
} catch (e) {
|
||||
await broadcast({
|
||||
type: 'sync-error',
|
||||
message: (e as Error).message ?? 'Unbekannter Sync-Fehler'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchAllSummaries(): Promise<RecipeSummary[]> {
|
||||
const result: RecipeSummary[] = [];
|
||||
let offset = 0;
|
||||
for (;;) {
|
||||
const res = await fetch(`/api/recipes/all?sort=name&limit=${PAGE_SIZE}&offset=${offset}`);
|
||||
if (!res.ok) throw new Error(`/api/recipes/all HTTP ${res.status}`);
|
||||
const body = (await res.json()) as { hits: { id: number; image_path: string | null }[] };
|
||||
result.push(...body.hits.map((h) => ({ id: h.id, image_path: h.image_path })));
|
||||
if (body.hits.length < PAGE_SIZE) break;
|
||||
offset += PAGE_SIZE;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function cacheRecipe(id: number, imagePath: string | null): Promise<void> {
|
||||
const data = await caches.open(DATA_CACHE);
|
||||
const images = await caches.open(IMAGES_CACHE);
|
||||
await Promise.all([
|
||||
addToCache(data, `/recipes/${id}`),
|
||||
addToCache(data, `/api/recipes/${id}`),
|
||||
imagePath && !/^https?:\/\//i.test(imagePath)
|
||||
? addToCache(images, `/images/${imagePath}`)
|
||||
: Promise.resolve()
|
||||
]);
|
||||
}
|
||||
|
||||
async function addToCache(cache: Cache, url: string): Promise<void> {
|
||||
try {
|
||||
const res = await fetch(url);
|
||||
if (res.ok) await cache.put(url, res);
|
||||
} catch {
|
||||
// Einzelne Fehler ignorieren — nächster Sync holt's nach.
|
||||
}
|
||||
}
|
||||
|
||||
async function removeRecipes(ids: number[]): Promise<void> {
|
||||
const data = await caches.open(DATA_CACHE);
|
||||
for (const id of ids) {
|
||||
await data.delete(`/recipes/${id}`);
|
||||
await data.delete(`/api/recipes/${id}`);
|
||||
}
|
||||
// Orphan-Bilder: wir räumen nicht aktiv — neuer Hash = neuer Entry,
|
||||
// alte Einträge stören nicht.
|
||||
}
|
||||
|
||||
async function loadCachedIds(): Promise<number[]> {
|
||||
const meta = await caches.open(META_CACHE);
|
||||
const res = await meta.match(MANIFEST_KEY);
|
||||
if (!res) return [];
|
||||
try {
|
||||
return (await res.json()) as number[];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async function saveCachedIds(ids: number[]): Promise<void> {
|
||||
const meta = await caches.open(META_CACHE);
|
||||
await meta.put(
|
||||
MANIFEST_KEY,
|
||||
new Response(JSON.stringify(ids), { headers: { 'content-type': 'application/json' } })
|
||||
);
|
||||
}
|
||||
|
||||
async function runPool<T>(tasks: (() => Promise<T>)[], limit: number): Promise<void> {
|
||||
const executing: Promise<void>[] = [];
|
||||
for (const task of tasks) {
|
||||
const p: Promise<void> = task().then(() => {
|
||||
executing.splice(executing.indexOf(p), 1);
|
||||
});
|
||||
executing.push(p);
|
||||
if (executing.length >= limit) await Promise.race(executing);
|
||||
}
|
||||
await Promise.all(executing);
|
||||
}
|
||||
|
||||
async function broadcast(msg: unknown): Promise<void> {
|
||||
const clients = await self.clients.matchAll();
|
||||
for (const client of clients) client.postMessage(msg);
|
||||
}
|
||||
|
||||
export {};
|
||||
|
||||
Reference in New Issue
Block a user