2026-04-17 15:41:20 +02:00
|
|
|
/// <reference types="@sveltejs/kit" />
|
|
|
|
|
/// <reference no-default-lib="true"/>
|
|
|
|
|
/// <reference lib="esnext" />
|
|
|
|
|
/// <reference lib="webworker" />
|
|
|
|
|
import { build, files, version } from '$service-worker';
|
2026-04-18 16:38:09 +02:00
|
|
|
import { resolveStrategy } from '$lib/sw/cache-strategy';
|
2026-04-18 16:44:48 +02:00
|
|
|
import { diffManifest } from '$lib/sw/diff-manifest';
|
2026-04-18 16:38:09 +02:00
|
|
|
|
|
|
|
|
declare const self: ServiceWorkerGlobalScope;
|
2026-04-17 15:41:20 +02:00
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
const SHELL_CACHE = `kochwas-shell-${version}`;
|
|
|
|
|
const DATA_CACHE = 'kochwas-data-v1';
|
|
|
|
|
const IMAGES_CACHE = 'kochwas-images-v1';
|
2026-04-17 15:41:20 +02:00
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
// App-Shell-Assets (Build-Output + statische Dateien, die SvelteKit kennt)
|
|
|
|
|
const SHELL_ASSETS = [...build, ...files];
|
2026-04-17 15:41:20 +02:00
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
self.addEventListener('install', (event) => {
|
2026-04-17 15:41:20 +02:00
|
|
|
event.waitUntil(
|
2026-04-18 16:38:09 +02:00
|
|
|
(async () => {
|
|
|
|
|
const cache = await caches.open(SHELL_CACHE);
|
|
|
|
|
await cache.addAll(SHELL_ASSETS);
|
2026-04-18 17:27:04 +02:00
|
|
|
// Kein self.skipWaiting() hier — der Client (pwaStore) fragt den
|
|
|
|
|
// User via UpdateToast, ob der neue SW sofort übernehmen soll, und
|
|
|
|
|
// schickt dann eine SKIP_WAITING-Message. Ohne diese Trennung
|
|
|
|
|
// würde pwaStore beim Install-Event fälschlich "Neue Version"
|
|
|
|
|
// zeigen (weil statechange='installed' + controller=alter SW), und
|
|
|
|
|
// der neue SW würde einen Tick später ungefragt übernehmen.
|
2026-04-18 16:38:09 +02:00
|
|
|
})()
|
2026-04-17 15:41:20 +02:00
|
|
|
);
|
|
|
|
|
});
|
|
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
self.addEventListener('activate', (event) => {
|
2026-04-17 15:41:20 +02:00
|
|
|
event.waitUntil(
|
|
|
|
|
(async () => {
|
2026-04-18 16:38:09 +02:00
|
|
|
// Alte Shell-Caches (vorherige Versionen) räumen
|
2026-04-17 15:41:20 +02:00
|
|
|
const keys = await caches.keys();
|
|
|
|
|
await Promise.all(
|
|
|
|
|
keys
|
2026-04-18 16:38:09 +02:00
|
|
|
.filter((k) => k.startsWith('kochwas-shell-') && k !== SHELL_CACHE)
|
2026-04-17 15:41:20 +02:00
|
|
|
.map((k) => caches.delete(k))
|
|
|
|
|
);
|
2026-04-18 16:38:09 +02:00
|
|
|
await self.clients.claim();
|
2026-04-17 15:41:20 +02:00
|
|
|
})()
|
|
|
|
|
);
|
|
|
|
|
});
|
|
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
self.addEventListener('fetch', (event) => {
|
2026-04-17 15:41:20 +02:00
|
|
|
const req = event.request;
|
2026-04-18 16:38:09 +02:00
|
|
|
if (new URL(req.url).origin !== self.location.origin) return; // Cross-Origin unangetastet
|
2026-04-17 15:41:20 +02:00
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
const strategy = resolveStrategy({ url: req.url, method: req.method });
|
|
|
|
|
if (strategy === 'network-only') return;
|
2026-04-17 15:41:20 +02:00
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
if (strategy === 'shell') {
|
|
|
|
|
event.respondWith(cacheFirst(req, SHELL_CACHE));
|
|
|
|
|
} else if (strategy === 'images') {
|
|
|
|
|
event.respondWith(cacheFirst(req, IMAGES_CACHE));
|
2026-04-20 08:29:00 +02:00
|
|
|
} else if (strategy === 'network-first') {
|
|
|
|
|
event.respondWith(networkFirstWithTimeout(req, DATA_CACHE, NETWORK_TIMEOUT_MS));
|
2026-04-17 15:41:20 +02:00
|
|
|
}
|
2026-04-18 16:38:09 +02:00
|
|
|
});
|
2026-04-17 15:41:20 +02:00
|
|
|
|
2026-04-20 08:29:00 +02:00
|
|
|
const NETWORK_TIMEOUT_MS = 3000;
|
|
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
async function cacheFirst(req: Request, cacheName: string): Promise<Response> {
|
|
|
|
|
const cache = await caches.open(cacheName);
|
|
|
|
|
const hit = await cache.match(req);
|
|
|
|
|
if (hit) return hit;
|
|
|
|
|
const fresh = await fetch(req);
|
|
|
|
|
if (fresh.ok) cache.put(req, fresh.clone()).catch(() => {});
|
|
|
|
|
return fresh;
|
|
|
|
|
}
|
2026-04-17 15:41:20 +02:00
|
|
|
|
2026-04-20 08:29:00 +02:00
|
|
|
// Network-first mit Timeout-Fallback: frische Daten gewinnen, wenn das Netz
|
|
|
|
|
// innerhalb von NETWORK_TIMEOUT_MS antwortet. Sonst wird der Cache geliefert
|
|
|
|
|
// (falls vorhanden), während der Netz-Fetch noch im Hintergrund weiterläuft
|
|
|
|
|
// und den Cache für den nächsten Request aktualisiert. Ohne Cache wartet der
|
|
|
|
|
// Client trotzdem aufs Netz, weil ein Error-Response hier nichts nützt.
|
|
|
|
|
async function networkFirstWithTimeout(
|
|
|
|
|
req: Request,
|
|
|
|
|
cacheName: string,
|
|
|
|
|
timeoutMs: number
|
|
|
|
|
): Promise<Response> {
|
2026-04-18 16:38:09 +02:00
|
|
|
const cache = await caches.open(cacheName);
|
2026-04-20 08:29:00 +02:00
|
|
|
const networkPromise: Promise<Response | null> = fetch(req)
|
2026-04-18 16:38:09 +02:00
|
|
|
.then((res) => {
|
|
|
|
|
if (res.ok) cache.put(req, res.clone()).catch(() => {});
|
|
|
|
|
return res;
|
|
|
|
|
})
|
2026-04-20 08:29:00 +02:00
|
|
|
.catch(() => null);
|
|
|
|
|
|
|
|
|
|
const timeoutPromise = new Promise<'timeout'>((resolve) =>
|
|
|
|
|
setTimeout(() => resolve('timeout'), timeoutMs)
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const winner = await Promise.race([networkPromise, timeoutPromise]);
|
|
|
|
|
if (winner instanceof Response) return winner;
|
|
|
|
|
|
|
|
|
|
// Timeout oder Netzwerk-Fehler: Cache bevorzugen, sonst auf Netz warten.
|
|
|
|
|
const hit = await cache.match(req);
|
|
|
|
|
if (hit) return hit;
|
|
|
|
|
const late = await networkPromise;
|
|
|
|
|
return late ?? Response.error();
|
2026-04-18 16:38:09 +02:00
|
|
|
}
|
|
|
|
|
|
2026-04-18 16:44:48 +02:00
|
|
|
const META_CACHE = 'kochwas-meta';
|
|
|
|
|
const MANIFEST_KEY = '/__cache-manifest__';
|
|
|
|
|
const PAGE_SIZE = 50; // /api/recipes/all limitiert auf 50
|
|
|
|
|
const CONCURRENCY = 4;
|
|
|
|
|
|
|
|
|
|
type RecipeSummary = { id: number; image_path: string | null };
|
|
|
|
|
|
|
|
|
|
self.addEventListener('message', (event) => {
|
|
|
|
|
const data = event.data as { type?: string } | undefined;
|
|
|
|
|
if (!data) return;
|
|
|
|
|
if (data.type === 'sync-start') {
|
|
|
|
|
event.waitUntil(runSync(false));
|
|
|
|
|
} else if (data.type === 'sync-check') {
|
|
|
|
|
event.waitUntil(runSync(true));
|
2026-04-18 17:27:04 +02:00
|
|
|
} else if (data.type === 'SKIP_WAITING') {
|
|
|
|
|
// Wird vom pwaStore nach User-Klick auf "Neu laden" geschickt.
|
|
|
|
|
void self.skipWaiting();
|
2026-04-18 18:06:36 +02:00
|
|
|
} else if (data.type === 'GET_VERSION') {
|
|
|
|
|
// Zombie-Schutz: Chromium hält nach einem SKIP_WAITING-Zyklus
|
|
|
|
|
// mitunter einen bit-identischen waiting-SW im Registration-Slot
|
|
|
|
|
// (Race zwischen SW-Update-Check während activate). Ohne diesen
|
|
|
|
|
// Version-Handshake zeigt init() den „Neue Version"-Toast bei jedem
|
|
|
|
|
// Reload erneut, obwohl es nichts zu aktualisieren gibt.
|
|
|
|
|
const port = event.ports[0] as MessagePort | undefined;
|
|
|
|
|
port?.postMessage({ version });
|
2026-04-18 16:44:48 +02:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
async function runSync(isUpdate: boolean): Promise<void> {
|
|
|
|
|
try {
|
|
|
|
|
// Storage-Quota-Check vor dem Pre-Cache
|
|
|
|
|
if (navigator.storage?.estimate) {
|
|
|
|
|
const est = await navigator.storage.estimate();
|
|
|
|
|
const freeBytes = (est.quota ?? 0) - (est.usage ?? 0);
|
|
|
|
|
if (freeBytes < 100 * 1024 * 1024) {
|
|
|
|
|
await broadcast({
|
|
|
|
|
type: 'sync-error',
|
|
|
|
|
message: `Nicht genug Speicher für Offline-Modus (${Math.round(freeBytes / 1024 / 1024)} MB frei)`
|
|
|
|
|
});
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const summaries = await fetchAllSummaries();
|
|
|
|
|
const currentIds = summaries.map((s) => s.id);
|
|
|
|
|
const cachedIds = await loadCachedIds();
|
|
|
|
|
const { toAdd, toRemove } = diffManifest(currentIds, cachedIds);
|
|
|
|
|
const worklist = isUpdate ? toAdd : currentIds; // initial: alles laden
|
|
|
|
|
|
|
|
|
|
await broadcast({ type: 'sync-start', total: worklist.length });
|
|
|
|
|
|
2026-04-18 16:50:26 +02:00
|
|
|
const successful = new Set<number>();
|
2026-04-18 16:44:48 +02:00
|
|
|
let done = 0;
|
|
|
|
|
const tasks = worklist.map((id) => async () => {
|
|
|
|
|
const summary = summaries.find((s) => s.id === id);
|
2026-04-18 16:50:26 +02:00
|
|
|
const ok = await cacheRecipe(id, summary?.image_path ?? null);
|
|
|
|
|
if (ok) successful.add(id);
|
2026-04-18 16:44:48 +02:00
|
|
|
done += 1;
|
|
|
|
|
await broadcast({ type: 'sync-progress', current: done, total: worklist.length });
|
|
|
|
|
});
|
|
|
|
|
await runPool(tasks, CONCURRENCY);
|
|
|
|
|
|
|
|
|
|
if (isUpdate && toRemove.length > 0) {
|
|
|
|
|
await removeRecipes(toRemove);
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-18 16:50:26 +02:00
|
|
|
// Manifest: für Update = (cached - toRemove) + neue successes
|
|
|
|
|
// Für Initial = nur die diesmal erfolgreich gecachten
|
|
|
|
|
const finalManifest = isUpdate
|
|
|
|
|
? Array.from(
|
|
|
|
|
new Set([...cachedIds.filter((id) => !toRemove.includes(id)), ...successful])
|
|
|
|
|
)
|
|
|
|
|
: Array.from(successful);
|
|
|
|
|
|
|
|
|
|
await saveCachedIds(finalManifest);
|
2026-04-18 16:44:48 +02:00
|
|
|
await broadcast({ type: 'sync-done', lastSynced: Date.now() });
|
|
|
|
|
} catch (e) {
|
|
|
|
|
await broadcast({
|
|
|
|
|
type: 'sync-error',
|
|
|
|
|
message: (e as Error).message ?? 'Unbekannter Sync-Fehler'
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function fetchAllSummaries(): Promise<RecipeSummary[]> {
|
|
|
|
|
const result: RecipeSummary[] = [];
|
|
|
|
|
let offset = 0;
|
|
|
|
|
for (;;) {
|
|
|
|
|
const res = await fetch(`/api/recipes/all?sort=name&limit=${PAGE_SIZE}&offset=${offset}`);
|
|
|
|
|
if (!res.ok) throw new Error(`/api/recipes/all HTTP ${res.status}`);
|
|
|
|
|
const body = (await res.json()) as { hits: { id: number; image_path: string | null }[] };
|
|
|
|
|
result.push(...body.hits.map((h) => ({ id: h.id, image_path: h.image_path })));
|
|
|
|
|
if (body.hits.length < PAGE_SIZE) break;
|
|
|
|
|
offset += PAGE_SIZE;
|
|
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-18 16:50:26 +02:00
|
|
|
async function cacheRecipe(id: number, imagePath: string | null): Promise<boolean> {
|
2026-04-18 16:44:48 +02:00
|
|
|
const data = await caches.open(DATA_CACHE);
|
|
|
|
|
const images = await caches.open(IMAGES_CACHE);
|
2026-04-18 16:50:26 +02:00
|
|
|
const [htmlOk, apiOk] = await Promise.all([
|
2026-04-18 16:44:48 +02:00
|
|
|
addToCache(data, `/recipes/${id}`),
|
2026-04-18 16:50:26 +02:00
|
|
|
addToCache(data, `/api/recipes/${id}`)
|
2026-04-18 16:44:48 +02:00
|
|
|
]);
|
2026-04-18 16:50:26 +02:00
|
|
|
if (imagePath && !/^https?:\/\//i.test(imagePath)) {
|
|
|
|
|
// Image-Fehler soll den Recipe-Eintrag nicht invalidieren (bei
|
|
|
|
|
// manchen Rezepten gibt es schlicht kein Bild)
|
|
|
|
|
await addToCache(images, `/images/${imagePath}`);
|
|
|
|
|
}
|
|
|
|
|
return htmlOk && apiOk;
|
2026-04-18 16:44:48 +02:00
|
|
|
}
|
|
|
|
|
|
2026-04-18 16:50:26 +02:00
|
|
|
async function addToCache(cache: Cache, url: string): Promise<boolean> {
|
2026-04-18 16:44:48 +02:00
|
|
|
try {
|
|
|
|
|
const res = await fetch(url);
|
2026-04-18 16:50:26 +02:00
|
|
|
if (!res.ok) {
|
|
|
|
|
console.warn(`[sw] cache miss ${url}: HTTP ${res.status}`);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
await cache.put(url, res);
|
|
|
|
|
return true;
|
|
|
|
|
} catch (e) {
|
|
|
|
|
console.warn(`[sw] cache error ${url}:`, e);
|
|
|
|
|
return false;
|
2026-04-18 16:44:48 +02:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function removeRecipes(ids: number[]): Promise<void> {
|
|
|
|
|
const data = await caches.open(DATA_CACHE);
|
|
|
|
|
for (const id of ids) {
|
|
|
|
|
await data.delete(`/recipes/${id}`);
|
|
|
|
|
await data.delete(`/api/recipes/${id}`);
|
|
|
|
|
}
|
|
|
|
|
// Orphan-Bilder: wir räumen nicht aktiv — neuer Hash = neuer Entry,
|
|
|
|
|
// alte Einträge stören nicht.
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function loadCachedIds(): Promise<number[]> {
|
|
|
|
|
const meta = await caches.open(META_CACHE);
|
|
|
|
|
const res = await meta.match(MANIFEST_KEY);
|
|
|
|
|
if (!res) return [];
|
|
|
|
|
try {
|
|
|
|
|
return (await res.json()) as number[];
|
|
|
|
|
} catch {
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function saveCachedIds(ids: number[]): Promise<void> {
|
|
|
|
|
const meta = await caches.open(META_CACHE);
|
|
|
|
|
await meta.put(
|
|
|
|
|
MANIFEST_KEY,
|
|
|
|
|
new Response(JSON.stringify(ids), { headers: { 'content-type': 'application/json' } })
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function runPool<T>(tasks: (() => Promise<T>)[], limit: number): Promise<void> {
|
|
|
|
|
const executing: Promise<void>[] = [];
|
|
|
|
|
for (const task of tasks) {
|
|
|
|
|
const p: Promise<void> = task().then(() => {
|
|
|
|
|
executing.splice(executing.indexOf(p), 1);
|
|
|
|
|
});
|
|
|
|
|
executing.push(p);
|
|
|
|
|
if (executing.length >= limit) await Promise.race(executing);
|
|
|
|
|
}
|
|
|
|
|
await Promise.all(executing);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function broadcast(msg: unknown): Promise<void> {
|
|
|
|
|
const clients = await self.clients.matchAll();
|
|
|
|
|
for (const client of clients) client.postMessage(msg);
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-18 16:38:09 +02:00
|
|
|
export {};
|