diff --git a/main.ts b/main.ts index 98009a6..7823ce3 100644 --- a/main.ts +++ b/main.ts @@ -15,7 +15,11 @@ const system = `\ 全て鳥取弁で語尾が「にゃん」。`; const n = 10; -const model = "llama-3.1-70b-versatile"; +const textModel = "llama-3.2-90b-text-preview"; +const visionModel = "llama-3.2-90b-vision-preview"; +const urlPattern = new URLPattern({ pathname: "*.*" }); +const separatorRegex = /[<>]|[^\p{L}\p{N}\p{P}\p{S}]+/u; +const imageExtensionRegex = /[.](?:gif|png|jpe?g|webp)$/i; const usage = `\ https://dash.deno.com/playground/darazllm @@ -25,7 +29,10 @@ https://dash.deno.com/playground/darazllm @darazllm /help このテキストを表示 使用する会話: 最新${n}件 -モデル: ${model} +テキスト用モデル: ${textModel} +画像用モデル: ${visionModel} +区切り文字: ${separatorRegex} +画像拡張子: ${imageExtensionRegex} システムプロンプト: ${system}`; @@ -43,13 +50,36 @@ const kv = await Deno.openKv(); async function chat(messages: Messages): Promise { const res = await groq.chat.completions.create({ - model, + model: textModel, messages: [{ role: "system", content: system }, ...messages], }); return res.choices[0].message.content; } +function isImageUrl(message: string): boolean { + return urlPattern.test(message) && imageExtensionRegex.test(message); +} + +/** llama-3.2-90b-vision-preview はシステムプロンプトに対応してない、かつ、画像は1枚まで */ +async function visionChat( + content: Array, +): Promise { + const res = await groq.chat.completions.create({ + model: visionModel, + messages: [ + { + role: "user", + content: content.map((c) => + c.type === "text" ? { ...c, text: `${system}\n\n${c.text}` } : c + ), + }, + ], + }); + + return res.choices[0].message.content; +} + const app = new bolt.App({ token: SLACK_BOT_TOKEN, signingSecret: SLACK_SIGNING_SECRET, @@ -80,8 +110,23 @@ app.message(async (c) => { if (prompt) messages.push({ role: "user", content: prompt }); if (isMention || Math.floor(Math.random() * n) === 0) { + const content: Array = prompt + .split(separatorRegex) + .filter(Boolean) + .map((text) => { + if (isImageUrl(text)) { + return { type: "image_url", image_url: { url: text } }; + } else { + return { type: "text", text }; + } + }); + + const visionMode = content.some((c) => c.type === "image_url"); + try { - const res = await chat(messages.slice(-n)); + const res = visionMode + ? await visionChat(content) + : await chat(messages.slice(-n)); if (!res) throw new Error("Empty response");