seems to work a little better now

This commit is contained in:
2025-10-28 17:22:45 +00:00
parent 051a66ff26
commit 2a53b0a827

View File

@ -4,7 +4,7 @@ import {
OllamaConfigOptions, OllamaConfigOptions,
OllamaChatRequest, OllamaChatRequest,
OllamaChatResponse, OllamaChatResponse,
PostAncestorsForModel, // PostAncestorsForModel,
} from "../types.js"; } from "../types.js";
import { PrismaClient } from "../generated/prisma/client.js"; import { PrismaClient } from "../generated/prisma/client.js";
import { import {
@ -65,42 +65,75 @@ const generateOllamaRequest = async (
ollamaUrl, ollamaUrl,
replyWithContext, replyWithContext,
} = envConfig; } = envConfig;
let shouldDeleteNotification = false;
try { try {
if (shouldContinue(notification)) { if (!shouldContinue(notification)) {
shouldDeleteNotification = true;
return;
}
if (whitelistOnly && !isFromWhitelistedDomain(notification)) { if (whitelistOnly && !isFromWhitelistedDomain(notification)) {
await deleteNotification(notification); shouldDeleteNotification = true;
return; return;
} }
if (await alreadyRespondedTo(notification)) { if (await alreadyRespondedTo(notification)) {
shouldDeleteNotification = true;
return; return;
} }
await recordPendingResponse(notification); await recordPendingResponse(notification);
await storeUserData(notification); await storeUserData(notification);
let conversationHistory: PostAncestorsForModel[] = [];
let conversationContext = "";
if (replyWithContext) { if (replyWithContext) {
const contextPosts = await getStatusContext(notification.status.id); const contextPosts = await getStatusContext(notification.status.id);
if (!contextPosts?.ancestors) { if (!contextPosts?.ancestors) {
throw new Error(`Unable to obtain post context ancestors.`); throw new Error(`Unable to obtain post context ancestors.`);
} }
conversationHistory = contextPosts.ancestors.map((ancestor) => ({
account_fqn: ancestor.account.fqn, // Build a human-readable conversation thread
mentions: ancestor.mentions.map((mention) => mention.acct), const allPosts = [...contextPosts.ancestors];
plaintext_content: ancestor.pleroma.content["text/plain"],
})); // Include descendants (follow-up posts) if available
if (contextPosts.descendents && contextPosts.descendents.length > 0) {
allPosts.push(...contextPosts.descendents);
}
if (allPosts.length > 0) {
const conversationLines = allPosts.map((post) => {
const author = post.account.fqn;
const content = post.pleroma.content["text/plain"];
const replyingTo = post.in_reply_to_account_id
? ` (replying to another message)`
: "";
return `[@${author}${replyingTo}]: ${content}`;
});
conversationContext = `
Previous conversation thread:
${conversationLines.join("\n\n")}
---
`;
}
} }
const userMessage = notification.status.pleroma.content["text/plain"]; const userMessage = notification.status.pleroma.content["text/plain"];
const originalAuthor = notification.account.fqn;
let systemContent = ollamaSystemPrompt; let systemContent = ollamaSystemPrompt;
if (replyWithContext) { if (replyWithContext && conversationContext) {
systemContent = `${ollamaSystemPrompt} systemContent = `${ollamaSystemPrompt}
Previous conversation (JSON format):
${JSON.stringify(conversationHistory, null, 2)} ${conversationContext}
Current message from @${originalAuthor}:
"${userMessage}"
Instructions: Instructions:
- Each entry shows: account_fqn (who posted), mentions (tagged users), and plaintext_content (message) - You are replying to @${originalAuthor}
- The first mention is the direct recipient - Address them directly if appropriate
- Address users with @ before their names
- Use markdown formatting and emojis sparingly`; - Use markdown formatting and emojis sparingly`;
} }
@ -113,22 +146,47 @@ Instructions:
stream: false, stream: false,
options: { options: {
...ollamaConfig, ...ollamaConfig,
stop: ["<|im_end|>", "\n\n"], stop: ["</s>", "[INST]"], // Mistral 0.3 stop tokens
}, },
}; };
console.log(
`Generating response for notification ${notification.id} from @${originalAuthor}`
);
// Change endpoint to /api/chat // Change endpoint to /api/chat
const response = await fetch(`${ollamaUrl}/api/chat`, { const response = await fetch(`${ollamaUrl}/api/chat`, {
method: "POST", method: "POST",
body: JSON.stringify(ollamaRequestBody), body: JSON.stringify(ollamaRequestBody),
}); });
if (!response.ok) {
throw new Error(`Ollama API request failed: ${response.statusText}`);
}
const ollamaResponse: OllamaChatResponse = await response.json(); const ollamaResponse: OllamaChatResponse = await response.json();
await storePromptData(notification, ollamaResponse); await storePromptData(notification, ollamaResponse);
return ollamaResponse; return ollamaResponse;
}
} catch (error: any) { } catch (error: any) {
throw new Error(error.message); console.error(
`Error in generateOllamaRequest for notification ${notification.id}:`,
error.message
);
// Delete notification on error to prevent retry loops
shouldDeleteNotification = true;
throw error;
} finally {
if (shouldDeleteNotification) {
try {
await deleteNotification(notification);
} catch (deleteError: any) {
console.error(
`Failed to delete notification ${notification.id}:`,
deleteError.message
);
}
}
} }
}; };
@ -139,21 +197,26 @@ const postReplyToStatus = async (
const { pleromaInstanceUrl, bearerToken } = envConfig; const { pleromaInstanceUrl, bearerToken } = envConfig;
try { try {
let mentions: string[]; // Only mention the original author who triggered the bot
const originalAuthor = notification.account.acct;
console.log(
`Replying to: @${originalAuthor} (status ID: ${notification.status.id})`
);
// Sanitize LLM output - remove any stray Mistral special tokens
let sanitizedContent = ollamaResponseBody.message.content
.replace(/<\/s>/g, "") // Remove EOS token if it appears
.replace(/\[INST\]/g, "") // Remove instruction start token
.replace(/\[\/INST\]/g, "") // Remove instruction end token
.replace(/<s>/g, "") // Remove BOS token if it appears
.trim();
const statusBody: NewStatusBody = { const statusBody: NewStatusBody = {
content_type: "text/markdown", content_type: "text/markdown",
status: ollamaResponseBody.message.content, status: sanitizedContent,
in_reply_to_id: notification.status.id, in_reply_to_id: notification.status.id,
to: [originalAuthor], // Only send to the person who mentioned the bot
}; };
if (
notification.status.mentions &&
notification.status.mentions.length > 0
) {
mentions = notification.status.mentions.map((mention) => {
return mention.acct;
});
statusBody.to = mentions;
}
const response = await fetch(`${pleromaInstanceUrl}/api/v1/statuses`, { const response = await fetch(`${pleromaInstanceUrl}/api/v1/statuses`, {
method: "POST", method: "POST",
@ -168,9 +231,23 @@ const postReplyToStatus = async (
throw new Error(`New status request failed: ${response.statusText}`); throw new Error(`New status request failed: ${response.statusText}`);
} }
await deleteNotification(notification); console.log(`Successfully posted reply to @${originalAuthor}`);
} catch (error: any) { } catch (error: any) {
throw new Error(error.message); console.error(
`Error posting reply for notification ${notification.id}:`,
error.message
);
throw error;
} finally {
// Always try to delete the notification, even if posting failed
try {
await deleteNotification(notification);
} catch (deleteError: any) {
console.error(
`Failed to delete notification ${notification.id}:`,
deleteError.message
);
}
} }
}; };
@ -195,7 +272,7 @@ const createTimelinePost = async () => {
stream: false, stream: false,
options: { options: {
...ollamaConfig, ...ollamaConfig,
stop: ["<|start_header_id|>", "<|end_header_id|>", "<|eot_id|>"], stop: ["</s>", "[INST]"], // Mistral 0.3 stop tokens
}, },
}; };
try { try {
@ -239,18 +316,21 @@ const beginFetchCycle = async () => {
setInterval(async () => { setInterval(async () => {
notifications = await getNotifications(); notifications = await getNotifications();
if (notifications.length > 0) { if (notifications.length > 0) {
await Promise.all( // Process notifications sequentially to avoid race conditions
notifications.map(async (notification) => { for (const notification of notifications) {
try { try {
const ollamaResponse = await generateOllamaRequest(notification); const ollamaResponse = await generateOllamaRequest(notification);
if (ollamaResponse) { if (ollamaResponse) {
postReplyToStatus(notification, ollamaResponse); await postReplyToStatus(notification, ollamaResponse);
} }
} catch (error: any) { } catch (error: any) {
throw new Error(error.message); console.error(
} `Error processing notification ${notification.id}:`,
}) error.message
); );
// Continue processing other notifications even if one fails
}
}
} }
}, envConfig.fetchInterval); // lower intervals may cause the bot to respond multiple times to the same message, but we try to mitigate this with the deleteNotification function }, envConfig.fetchInterval); // lower intervals may cause the bot to respond multiple times to the same message, but we try to mitigate this with the deleteNotification function
}; };