fix: make search usable again

This commit is contained in:
Max Richter
2025-11-05 00:42:53 +01:00
parent 7664abe089
commit 581f1c1926
9 changed files with 120 additions and 62 deletions

View File

@@ -19,6 +19,32 @@ import { fileExtension } from "https://deno.land/x/file_extension@v2.1.0/mod.ts"
const log = createLogger("api/article");
async function fetchAndStoreCover(
imageUrl: string | undefined,
title: string,
streamResponse?: ReturnType<typeof createStreamResponse>,
): Promise<string | undefined> {
if (!imageUrl) return;
const imagePath = `articles/images/${safeFileName(title)}_cover.${
fileExtension(imageUrl)
}`;
try {
streamResponse?.enqueue("downloading image");
const res = await fetch(imageUrl);
streamResponse?.enqueue("saving image");
if (!res.ok) {
console.log(`Failed to download remote image: ${imageUrl}`, res.status);
return;
}
const buffer = await res.arrayBuffer();
await createResource(imagePath, buffer);
return `resources/${imagePath}`;
} catch (err) {
console.log(`Failed to save image: ${imageUrl}`, err);
return;
}
}
async function processCreateArticle(
{ fetchUrl, streamResponse }: {
fetchUrl: string;
@@ -49,23 +75,11 @@ async function processCreateArticle(
const title = result?.title || aiMeta?.headline || "";
let finalPath = result.image;
if (result?.image) {
const extension = fileExtension(result?.image);
const imagePath = `articles/images/${
safeFileName(title)
}_cover.${extension}`;
try {
streamResponse.enqueue("downloading image");
const res = await fetch(result.image);
streamResponse.enqueue("saving image");
const buffer = await res.arrayBuffer();
await createResource(imagePath, buffer);
finalPath = imagePath;
} catch (err) {
console.log(`Failed to save image: ${result.image}`, err);
}
}
const coverImagePath = await fetchAndStoreCover(
result.image,
title,
streamResponse,
);
const newArticle: ArticleResource["content"] = {
_type: "Article",
@@ -75,7 +89,7 @@ async function processCreateArticle(
datePublished: formatDate(
result?.published || aiMeta?.datePublished || undefined,
),
image: finalPath,
image: coverImagePath,
author: {
_type: "Person",
name: (result.schemaOrgData?.author?.name || aiMeta?.author || "")
@@ -115,16 +129,23 @@ async function processCreateYoutubeVideo(
const video = await getYoutubeVideoDetails(youtubeId);
streamResponse.enqueue("shortening title with openai");
const newId = await openai.shortenTitle(video.snippet.title);
const videoTitle = await openai.shortenTitle(video.snippet.title) ||
video.snippet.title;
const id = newId || youtubeId;
const thumbnail = video?.snippet?.thumbnails?.maxres;
const coverImagePath = await fetchAndStoreCover(
thumbnail.url,
videoTitle || video.snippet.title,
streamResponse,
);
const newArticle: ArticleResource["content"] = {
_type: "Article",
headline: video.snippet.title,
articleBody: video.snippet.description,
image: coverImagePath,
url: fetchUrl,
datePublished: new Date(video.snippet.publishedAt).toISOString(),
datePublished: formatDate(video.snippet.publishedAt),
author: {
_type: "Person",
name: video.snippet.channelTitle,
@@ -133,11 +154,14 @@ async function processCreateYoutubeVideo(
streamResponse.enqueue("creating article");
await createResource(`articles/${id}.md`, newArticle);
await createResource(
`articles/${toUrlSafeString(videoTitle)}.md`,
newArticle,
);
streamResponse.enqueue("finished");
streamResponse.enqueue("id: " + id);
streamResponse.enqueue("id: " + toUrlSafeString(videoTitle));
}
export const handler: Handlers = {

View File

@@ -1,7 +1,7 @@
import { Handlers } from "$fresh/server.ts";
import { json } from "@lib/helpers.ts";
import { AccessDeniedError } from "@lib/errors.ts";
import { searchResource } from "@lib/search.ts";
import { AccessDeniedError, BadRequestError } from "@lib/errors.ts";
import { parseResourceUrl, searchResource } from "@lib/search.ts";
export const handler: Handlers = {
async GET(req, ctx) {
@@ -10,18 +10,13 @@ export const handler: Handlers = {
throw new AccessDeniedError();
}
const url = new URL(req.url);
const s = parseResourceUrl(req.url);
if (!s) {
throw new BadRequestError();
}
const types = url.searchParams.get("types")?.split(",");
const tags = url.searchParams?.get("tags")?.split(",");
const authors = url.searchParams?.get("authors")?.split(",");
const resources = await searchResource({
q: url.searchParams.get("q") || "",
types,
tags,
authors,
});
console.log(s);
const resources = await searchResource(s);
return json(resources);
},