fix(search): Filter zuverlässiger durch allowTruncate
All checks were successful
Build & Publish Docker Image / build-and-push (push) Successful in 1m16s
All checks were successful
Build & Publish Docker Image / build-and-push (push) Successful in 1m16s
Vorher warf fetchText einen Fehler, sobald eine Seite >512 KB war — bei modernen Rezeptseiten (eingebettete Bundles, base64-Bilder) läuft das praktisch immer voll. Der Catch-Block hat dann hasRecipe auf NULL gelassen, und der Treffer ging ungefiltert durch. Neue FetchOptions.allowTruncate: true → wir bekommen die ersten 512 KB (das reicht für <head> mit og:image und JSON-LD) statt eines Throws. Timeout auf 8s erhöht, weil der Pi manchmal langsamer ist. Migration 008 räumt alte NULL-has_recipe-Einträge aus dem Cache, damit sie beim nächsten Search frisch klassifiziert werden statt weitere 30 Tage falsch gecached zu bleiben. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -3,9 +3,16 @@ export type FetchOptions = {
|
||||
timeoutMs?: number;
|
||||
userAgent?: string;
|
||||
extraHeaders?: Record<string, string>;
|
||||
/**
|
||||
* When true, return the data read up to `maxBytes` instead of throwing.
|
||||
* Useful when we only care about the page head (og:image, JSON-LD) — most
|
||||
* recipe sites are >1 MB today because of inlined bundles, but the head is
|
||||
* usually well under 512 KB.
|
||||
*/
|
||||
allowTruncate?: boolean;
|
||||
};
|
||||
|
||||
const DEFAULTS: Required<Omit<FetchOptions, 'extraHeaders'>> = {
|
||||
const DEFAULTS: Required<Omit<FetchOptions, 'extraHeaders' | 'allowTruncate'>> = {
|
||||
maxBytes: 10 * 1024 * 1024,
|
||||
timeoutMs: 10_000,
|
||||
userAgent: 'Kochwas/0.1'
|
||||
@@ -25,16 +32,23 @@ function assertSafeUrl(url: string): void {
|
||||
|
||||
async function readBody(
|
||||
response: Response,
|
||||
maxBytes: number
|
||||
): Promise<{ data: Uint8Array; total: number }> {
|
||||
maxBytes: number,
|
||||
allowTruncate: boolean
|
||||
): Promise<{ data: Uint8Array; total: number; truncated: boolean }> {
|
||||
const reader = response.body?.getReader();
|
||||
if (!reader) {
|
||||
const buf = new Uint8Array(await response.arrayBuffer());
|
||||
if (buf.byteLength > maxBytes) throw new Error(`Response exceeds ${maxBytes} bytes`);
|
||||
return { data: buf, total: buf.byteLength };
|
||||
if (buf.byteLength > maxBytes) {
|
||||
if (allowTruncate) {
|
||||
return { data: buf.slice(0, maxBytes), total: maxBytes, truncated: true };
|
||||
}
|
||||
throw new Error(`Response exceeds ${maxBytes} bytes`);
|
||||
}
|
||||
return { data: buf, total: buf.byteLength, truncated: false };
|
||||
}
|
||||
const chunks: Uint8Array[] = [];
|
||||
let total = 0;
|
||||
let truncated = false;
|
||||
for (;;) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
@@ -42,6 +56,14 @@ async function readBody(
|
||||
total += value.byteLength;
|
||||
if (total > maxBytes) {
|
||||
await reader.cancel();
|
||||
if (allowTruncate) {
|
||||
// keep what we have up to the chunk boundary; good enough for HTML head
|
||||
const keep = value.byteLength - (total - maxBytes);
|
||||
if (keep > 0) chunks.push(value.slice(0, keep));
|
||||
total = maxBytes;
|
||||
truncated = true;
|
||||
break;
|
||||
}
|
||||
throw new Error(`Response exceeds ${maxBytes} bytes`);
|
||||
}
|
||||
chunks.push(value);
|
||||
@@ -53,7 +75,7 @@ async function readBody(
|
||||
merged.set(c, offset);
|
||||
offset += c.byteLength;
|
||||
}
|
||||
return { data: merged, total };
|
||||
return { data: merged, total, truncated };
|
||||
}
|
||||
|
||||
async function doFetch(url: string, opts: FetchOptions): Promise<Response> {
|
||||
@@ -82,7 +104,7 @@ async function doFetch(url: string, opts: FetchOptions): Promise<Response> {
|
||||
export async function fetchText(url: string, opts: FetchOptions = {}): Promise<string> {
|
||||
const maxBytes = opts.maxBytes ?? DEFAULTS.maxBytes;
|
||||
const res = await doFetch(url, opts);
|
||||
const { data } = await readBody(res, maxBytes);
|
||||
const { data } = await readBody(res, maxBytes, opts.allowTruncate ?? false);
|
||||
return new TextDecoder('utf-8').decode(data);
|
||||
}
|
||||
|
||||
@@ -92,6 +114,6 @@ export async function fetchBuffer(
|
||||
): Promise<{ data: Uint8Array; contentType: string | null }> {
|
||||
const maxBytes = opts.maxBytes ?? DEFAULTS.maxBytes;
|
||||
const res = await doFetch(url, opts);
|
||||
const { data } = await readBody(res, maxBytes);
|
||||
const { data } = await readBody(res, maxBytes, opts.allowTruncate ?? false);
|
||||
return { data, contentType: res.headers.get('content-type') };
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user