way better responsiveness, better system prompt

This commit is contained in:
2025-08-03 14:37:23 +00:00
parent 2f3d16dbc5
commit b6ad54f40a
2 changed files with 8 additions and 9 deletions

View File

@ -7,7 +7,7 @@ import {
OllamaRequest,
OllamaResponse,
} from "../types.js";
import striptags from "striptags";
// import striptags from "striptags";
import { PrismaClient } from "../generated/prisma/client.js";
import {
getInstanceEmojis,
@ -62,7 +62,7 @@ const generateOllamaRequest = async (
envConfig;
try {
if (
striptags(notification.status.content).includes("!prompt") &&
// striptags(notification.status.content).includes("!prompt") &&
!notification.status.account.bot && // sanity check, sort of
notification.type === "mention" // &&
// notification.status.visibility !== "private" // for safety, let's only respond to public messages
@ -76,7 +76,7 @@ const generateOllamaRequest = async (
}
await recordPendingResponse(notification);
await storeUserData(notification);
console.log(trimInputData(notification.status.content));
// console.log(trimInputData(notification.status.content));
const ollamaRequestBody: OllamaRequest = {
model: ollamaModel,
// prompt: trimInputData(notification.status.content),
@ -150,15 +150,14 @@ const createTimelinePost = async () => {
const {
bearerToken,
ollamaModel,
// ollamaSystemPrompt,
ollamaSystemPrompt,
ollamaUrl,
pleromaInstanceUrl,
} = envConfig;
const ollamaRequestBody: OllamaRequest = {
model: ollamaModel,
prompt: "Say something random.",
system:
"You are a friendly AI assistant who loves to educate people on random topics, provide words of encouragement. You like to be as detailed as possible.",
system: ollamaSystemPrompt,
stream: false,
// options: ollamaConfig,
};