way better responsiveness, better system prompt

This commit is contained in:
2025-08-03 14:37:23 +00:00
parent 2f3d16dbc5
commit b6ad54f40a
2 changed files with 8 additions and 9 deletions

View File

@ -7,7 +7,7 @@ import {
OllamaRequest, OllamaRequest,
OllamaResponse, OllamaResponse,
} from "../types.js"; } from "../types.js";
import striptags from "striptags"; // import striptags from "striptags";
import { PrismaClient } from "../generated/prisma/client.js"; import { PrismaClient } from "../generated/prisma/client.js";
import { import {
getInstanceEmojis, getInstanceEmojis,
@ -62,7 +62,7 @@ const generateOllamaRequest = async (
envConfig; envConfig;
try { try {
if ( if (
striptags(notification.status.content).includes("!prompt") && // striptags(notification.status.content).includes("!prompt") &&
!notification.status.account.bot && // sanity check, sort of !notification.status.account.bot && // sanity check, sort of
notification.type === "mention" // && notification.type === "mention" // &&
// notification.status.visibility !== "private" // for safety, let's only respond to public messages // notification.status.visibility !== "private" // for safety, let's only respond to public messages
@ -76,7 +76,7 @@ const generateOllamaRequest = async (
} }
await recordPendingResponse(notification); await recordPendingResponse(notification);
await storeUserData(notification); await storeUserData(notification);
console.log(trimInputData(notification.status.content)); // console.log(trimInputData(notification.status.content));
const ollamaRequestBody: OllamaRequest = { const ollamaRequestBody: OllamaRequest = {
model: ollamaModel, model: ollamaModel,
// prompt: trimInputData(notification.status.content), // prompt: trimInputData(notification.status.content),
@ -150,15 +150,14 @@ const createTimelinePost = async () => {
const { const {
bearerToken, bearerToken,
ollamaModel, ollamaModel,
// ollamaSystemPrompt, ollamaSystemPrompt,
ollamaUrl, ollamaUrl,
pleromaInstanceUrl, pleromaInstanceUrl,
} = envConfig; } = envConfig;
const ollamaRequestBody: OllamaRequest = { const ollamaRequestBody: OllamaRequest = {
model: ollamaModel, model: ollamaModel,
prompt: "Say something random.", prompt: "Say something random.",
system: system: ollamaSystemPrompt,
"You are a friendly AI assistant who loves to educate people on random topics, provide words of encouragement. You like to be as detailed as possible.",
stream: false, stream: false,
// options: ollamaConfig, // options: ollamaConfig,
}; };

View File

@ -4,10 +4,10 @@ import { envConfig } from "./main.js";
import { Notification } from "../types.js"; import { Notification } from "../types.js";
const trimInputData = (input: string): string => { const trimInputData = (input: string): string => {
const strippedInput = striptags(input, [], "\n"); const strippedInput = striptags(input);
const split = strippedInput.split(" "); const split = strippedInput.split(" ");
const promptStringIndex = split.indexOf("!prompt"); // const promptStringIndex = split.indexOf("!prompt");
const botFqnIndex = split.indexOf("@nice-ai"); const botFqnIndex = split.indexOf("@nice-ai");
const botFqnIndexFull = split.indexOf("@nice-ai@nicecrew.digital"); const botFqnIndexFull = split.indexOf("@nice-ai@nicecrew.digital");
if (botFqnIndex !== -1) { if (botFqnIndex !== -1) {
@ -16,7 +16,7 @@ const trimInputData = (input: string): string => {
if (botFqnIndexFull !== -1) { if (botFqnIndexFull !== -1) {
split[botFqnIndexFull] = "Lexi"; split[botFqnIndexFull] = "Lexi";
} }
split.splice(promptStringIndex, 1); // split.splice(promptStringIndex, 1);
return split.join(" "); // returns everything after the !prompt return split.join(" "); // returns everything after the !prompt
}; };