3 Commits
tyler ... main

Author SHA1 Message Date
e696343a73 a crumb of changes 2025-08-12 19:07:22 +00:00
88a0710c55 update system prompt to hopefully make it easier for Lexi to understand 2025-08-04 21:08:47 +00:00
75fa4cea8b jorkin my preanits 2025-08-04 11:56:06 +00:00
2 changed files with 15 additions and 15 deletions

2
.gitignore vendored
View File

@ -1,6 +1,6 @@
node_modules
# Keep environment variables out of version control
.env
.env*
*.log
*.db
/dist

View File

@ -9,7 +9,7 @@ import {
// import striptags from "striptags";
import { PrismaClient } from "../generated/prisma/client.js";
import {
getInstanceEmojis,
// getInstanceEmojis,
deleteNotification,
getNotifications,
getStatusContext,
@ -20,7 +20,7 @@ import {
alreadyRespondedTo,
recordPendingResponse,
// trimInputData,
selectRandomEmoji,
// selectRandomEmoji,
shouldContinue,
} from "./util.js";
@ -48,10 +48,10 @@ export const envConfig = {
};
const ollamaConfig: OllamaConfigOptions = {
temperature: 0.6,
temperature: 0.9,
top_p: 0.85,
top_k: 40,
num_ctx: 8192,
top_k: 60,
num_ctx: 16384, // maximum context window for Llama 3.1
repeat_penalty: 1.1,
};
@ -97,7 +97,7 @@ const generateOllamaRequest = async (
}
// Simplified user message (remove [/INST] as it's not needed for Llama 3)
const userMessage = `${notification.status.account.fqn} says: ${notification.status.pleroma.content["text/plain"]}`;
const userMessage = `${notification.status.account.fqn} says to you: \"${notification.status.pleroma.content["text/plain"]}\".`;
let systemContent = ollamaSystemPrompt;
if (replyWithContext) {
@ -105,13 +105,13 @@ const generateOllamaRequest = async (
systemContent = `${ollamaSystemPrompt}\n\nPrevious conversation context:\n${conversationHistory
.map(
(post) =>
`${post.account_fqn} (to ${post.mentions.join(", ")}): ${
`${post.account_fqn} (said to ${post.mentions.join(", ")}): ${
post.plaintext_content
}`
)
.join(
"\n"
)}\nReply as if you are a party to the conversation. If '@nice-ai' is mentioned, respond directly. Prefix usernames with '@' when addressing them.`;
)}\nReply to the user who addressed you (you are Lexi, also known as nice-ai or nice-ai@nicecrew.digital). Examine the context of the entire conversation and make references to topics or information where appropriate. Prefix usernames with '@' when addressing them. Assume if there is no domain in the username, the domain is @nicecrew.digital (for example @matty would be @matty@nicecrew.digital)`;
}
// Switch to chat request format (messages array auto-handles Llama 3 template)
@ -145,16 +145,16 @@ const postReplyToStatus = async (
ollamaResponseBody: OllamaChatResponse
) => {
const { pleromaInstanceUrl, bearerToken } = envConfig;
const emojiList = await getInstanceEmojis();
let randomEmoji;
if (emojiList) {
randomEmoji = selectRandomEmoji(emojiList);
}
// const emojiList = await getInstanceEmojis();
// let randomEmoji;
// if (emojiList) {
// randomEmoji = selectRandomEmoji(emojiList);
// }
try {
let mentions: string[];
const statusBody: NewStatusBody = {
content_type: "text/markdown",
status: `${ollamaResponseBody.message.content} :${randomEmoji}:`,
status: `${ollamaResponseBody.message.content}`,
in_reply_to_id: notification.status.id,
};
if (