memorium/lib/openai.ts

207 lines
6.4 KiB
TypeScript
Raw Normal View History

2023-09-08 13:33:29 +02:00
import { OpenAI } from "https://deno.land/x/openai@1.4.2/mod.ts";
2023-08-01 21:35:21 +02:00
import { OPENAI_API_KEY } from "@lib/env.ts";
import { cacheFunction } from "@lib/cache/cache.ts";
import { hashString } from "@lib/helpers.ts";
2023-08-01 21:35:21 +02:00
const openAI = OPENAI_API_KEY && new OpenAI(OPENAI_API_KEY);
function extractListFromResponse(response?: string): string[] {
if (!response) return [];
return response
.split(/[\n,]/)
.map((line) => line.trim())
.filter((line) => !line.endsWith(":"))
.map((line) => line.replace(/^[^\s]*/, "").trim())
.filter((line) => line.length > 2);
}
2023-08-01 21:35:21 +02:00
export async function summarize(content: string) {
if (!openAI) return;
const chatCompletion = await openAI.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{ "role": "system", "content": "You are a helpful assistant." },
{ "role": "user", "content": content.slice(0, 2000) },
{
"role": "user",
"content":
"Please summarize the article in one sentence as short as possible",
},
],
});
return chatCompletion.choices[0].message.content;
}
export async function shortenTitle(content: string) {
if (!openAI) return;
const chatCompletion = await openAI.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{ "role": "system", "content": "You are a helpful assistant." },
{ "role": "user", "content": content.slice(0, 2000) },
{
"role": "user",
"content":
"Please shorten the provided website title as much as possible, don't rewrite it, just remove unneccesary informations. Please remove for example any mention of the name of the website.",
},
],
});
return chatCompletion.choices[0].message.content;
}
export async function extractAuthorName(content: string) {
if (!openAI) return;
const chatCompletion = await openAI.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{ "role": "system", "content": "You are a helpful assistant." },
{ "role": "user", "content": content.slice(0, 2000) },
{
"role": "user",
"content":
"If you are able to extract the name of the author from the text please respond with the name, otherwise respond with 'not found'",
},
],
});
const author = chatCompletion.choices[0].message.content;
if (
author?.toLowerCase().includes("not") &&
author?.toLowerCase().includes("found")
) return "";
return author;
2023-08-01 21:35:21 +02:00
}
export async function createGenres(
type: string,
description: string,
title = "unknown",
) {
2023-09-08 13:33:29 +02:00
if (!openAI) return;
const chatCompletion = await openAI.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{
"role": "system",
"content":
`you create some keywords that can be used in a recommendation system. The keywords are based on a ${type} description or title. If you do not know the title, take into account the description aswell. Create a range of keywords from very specific ones that describe the general vibe. ${
title ? `The name of the ${type} is ${title}` : ""
}`,
2023-09-08 13:33:29 +02:00
},
2023-09-08 15:24:07 +02:00
{
"role": "user",
"content": `description:
${description.slice(0, 2000)}`,
},
2023-09-08 13:33:29 +02:00
{
"role": "user",
"content": "return a list of around 20 keywords seperated by commas",
2023-09-08 13:33:29 +02:00
},
],
});
const res = chatCompletion.choices[0].message.content?.toLowerCase();
return extractListFromResponse(res)
2023-09-08 13:33:29 +02:00
.map((v) => v.replaceAll(" ", "-"));
}
export async function createKeywords(
type: string,
description: string,
title = "unknown",
) {
if (!openAI) return;
const chatCompletion = await openAI.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{
"role": "system",
"content":
`you create some keywords that can be used in a recommendation system. The keywords are based on a ${type} description or title. If you do not know the title, take into account the description aswell. Create a range of keywords from very specific ones that describe the general vibe.
title: ${title}
description: ${description.slice(0, 2000).replaceAll("\n", " ")}}
`,
},
{
"role": "user",
"content": "return a list of around 20 keywords seperated by commas",
},
],
});
const res = chatCompletion.choices[0].message.content?.toLowerCase();
return extractListFromResponse(res)
.map((v) => v.replaceAll(" ", "-"));
}
export const getMovieRecommendations = (keywords: string, exclude: string[]) =>
cacheFunction({
fn: async () => {
if (!openAI) return;
const chatCompletion = await openAI.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{
"role": "system",
"content":
`Could you recommend me 10 movies based on the following attributes:
${keywords}
The movies should be similar to but not include ${
exclude.join(", ")
} or remakes of that.
respond with a plain unordered list each item starting with the year the movie was released and then the title of the movie seperated by a -`,
},
],
});
const res = chatCompletion.choices[0].message.content?.toLowerCase();
if (!res) return;
console.log("REsult:");
console.log(res);
const list = extractListFromResponse(res);
console.log({ list });
return res.split("\n").map((entry) => {
const [year, ...title] = entry.split("-");
return {
year: parseInt(year.trim()),
title: title.join(" ").replaceAll('"', "").trim(),
};
}).filter((y) => !Number.isNaN(y.year));
},
id: `openai:movierecs:${hashString(`${keywords}:${exclude.join()}`)}`,
});
2023-08-01 21:35:21 +02:00
export async function createTags(content: string) {
if (!openAI) return;
const chatCompletion = await openAI.createChatCompletion({
model: "gpt-3.5-turbo",
messages: [
{ "role": "system", "content": "You are a helpful assistant." },
{ "role": "user", "content": content.slice(0, 2000) },
{
"role": "user",
"content":
"Please respond with a list of genres the corresponding article falls into, dont include any other informations, just a comma seperated list of top 5 categories. Please respond only with tags that make sense even if there are less than five.",
},
],
});
const res = chatCompletion.choices[0].message.content?.toLowerCase();
return extractListFromResponse(res).map((v) => v.replaceAll(" ", "-"));
2023-08-01 21:35:21 +02:00
}