feat(import): manuelle URL-Importe von allen Domains zulassen
All checks were successful
Build & Publish Docker Image / build-and-push (push) Successful in 1m14s
All checks were successful
Build & Publish Docker Image / build-and-push (push) Successful in 1m14s
Der User pastet bewusst eine URL und erwartet, dass der Import klappt — die Whitelist-Prüfung (DOMAIN_BLOCKED) im previewRecipe war da nur Reibung. Die Whitelist bleibt für die Web-Suche relevant (dort muss das Crawl-Feld eingeschränkt werden), für Imports nicht mehr. Dropped: isDomainAllowed + whitelist.ts, DOMAIN_BLOCKED-Code in ImporterError, die zugehörige Branch in mapImporterError. Tests entsprechend angepasst: statt "DOMAIN_BLOCKED wenn nicht whitelisted" prüft der Preview-Test jetzt "klappt auch ohne Whitelist-Eintrag". Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1,16 +0,0 @@
|
||||
import type Database from 'better-sqlite3';
|
||||
import { normalizeDomain } from './repository';
|
||||
|
||||
export function isDomainAllowed(db: Database.Database, urlString: string): boolean {
|
||||
let host: string;
|
||||
try {
|
||||
host = new URL(urlString).hostname;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
const normalized = normalizeDomain(host);
|
||||
const row = db
|
||||
.prepare('SELECT 1 AS ok FROM allowed_domain WHERE domain = ? LIMIT 1')
|
||||
.get(normalized);
|
||||
return row !== undefined;
|
||||
}
|
||||
@@ -4,10 +4,8 @@ import { ImporterError } from './recipes/importer';
|
||||
export function mapImporterError(e: unknown): never {
|
||||
if (e instanceof ImporterError) {
|
||||
const status =
|
||||
e.code === 'INVALID_URL' || e.code === 'DOMAIN_BLOCKED'
|
||||
? e.code === 'DOMAIN_BLOCKED'
|
||||
? 403
|
||||
: 400
|
||||
e.code === 'INVALID_URL'
|
||||
? 400
|
||||
: e.code === 'NO_RECIPE_FOUND'
|
||||
? 422
|
||||
: 502; // FETCH_FAILED
|
||||
|
||||
@@ -2,7 +2,6 @@ import type Database from 'better-sqlite3';
|
||||
import type { Recipe } from '$lib/types';
|
||||
import { fetchText } from '../http';
|
||||
import { extractRecipeFromHtml } from '../parsers/json-ld-recipe';
|
||||
import { isDomainAllowed } from '../domains/whitelist';
|
||||
import { downloadImage } from '../images/image-downloader';
|
||||
import {
|
||||
getRecipeById,
|
||||
@@ -14,7 +13,6 @@ export class ImporterError extends Error {
|
||||
constructor(
|
||||
public readonly code:
|
||||
| 'INVALID_URL'
|
||||
| 'DOMAIN_BLOCKED'
|
||||
| 'FETCH_FAILED'
|
||||
| 'NO_RECIPE_FOUND',
|
||||
message: string
|
||||
@@ -32,11 +30,12 @@ function hostnameOrThrow(url: string): string {
|
||||
}
|
||||
}
|
||||
|
||||
export async function previewRecipe(db: Database.Database, url: string): Promise<Recipe> {
|
||||
// Manuelle URL-Importe sind absichtlich NICHT mehr auf die allowed_domain-
|
||||
// Whitelist beschränkt — der User pastet bewusst eine URL und erwartet,
|
||||
// dass der Import klappt. Die Whitelist bleibt für die Web-Suche (searxng)
|
||||
// relevant, weil dort ein breites Crawl-Feld eingeschränkt werden soll.
|
||||
export async function previewRecipe(_db: Database.Database, url: string): Promise<Recipe> {
|
||||
const host = hostnameOrThrow(url);
|
||||
if (!isDomainAllowed(db, url)) {
|
||||
throw new ImporterError('DOMAIN_BLOCKED', `Domain not allowed: ${host}`);
|
||||
}
|
||||
let html: string;
|
||||
try {
|
||||
html = await fetchText(url);
|
||||
|
||||
Reference in New Issue
Block a user