Compare commits
11 Commits
2111a47411
...
main
Author | SHA1 | Date | |
---|---|---|---|
e696343a73 | |||
88a0710c55 | |||
75fa4cea8b | |||
733a41a35c | |||
ed3467b213 | |||
0f178fcfa9 | |||
0bfff52fd0 | |||
8e90e8b71e | |||
566d6ae518 | |||
2ec367f203 | |||
a04cb9a6ad |
@ -2,6 +2,7 @@ DATABASE_URL="file:../dev.db" # SQLite database relative to the ./prisma path
|
||||
PLEROMA_INSTANCE_URL="https://instance.tld" # Pleroma instance full URL including scheme
|
||||
PLEROMA_INSTANCE_DOMAIN="instance.tld" # used if you want to only want to respond to people from a particular instance
|
||||
PLEROMA_ACCOUNT_ID="" # obtained from /api/v1/accounts/{nickname} - used so we don't spam mentions when not directly addressed
|
||||
REPLY_WITH_CONTEXT="" # set to true or false whether you want the bot to fetch context or not
|
||||
ONLY_WHITELIST="true" # change to "false" if you want to accept prompts from any and all domains - *** USE WITH CAUTION ***
|
||||
WHITELISTED_DOMAINS="" # comma separated list of domains you want to allow the bot to accept prompts from (i.e. poa.st,nicecrew.digital,detroitriotcity.com,decayable.ink)
|
||||
OLLAMA_URL="http://localhost:11434" # OLLAMA connection URL
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,6 +1,6 @@
|
||||
node_modules
|
||||
# Keep environment variables out of version control
|
||||
.env
|
||||
.env*
|
||||
*.log
|
||||
*.db
|
||||
/dist
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "pleroma-ollama-bot",
|
||||
"version": "1.0.7",
|
||||
"version": "1.1.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "tsc && node -r dotenv/config dist/main.js",
|
||||
|
35
src/api.ts
35
src/api.ts
@ -1,5 +1,5 @@
|
||||
import { envConfig, prisma } from "./main.js";
|
||||
import { PleromaEmoji, Notification } from "../types.js";
|
||||
import { PleromaEmoji, Notification, ContextResponse } from "../types.js";
|
||||
|
||||
const getNotifications = async () => {
|
||||
const { bearerToken, pleromaInstanceUrl } = envConfig;
|
||||
@ -22,6 +22,32 @@ const getNotifications = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
const getStatusContext = async (statusId: string) => {
|
||||
const { bearerToken, pleromaInstanceUrl } = envConfig;
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${pleromaInstanceUrl}/api/v1/statuses/${statusId}/context`,
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: `Bearer ${bearerToken}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Could not get conversation context: ${response.status} - ${response.statusText}`
|
||||
);
|
||||
}
|
||||
const data: ContextResponse = await response.json();
|
||||
return data;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const getInstanceEmojis = async () => {
|
||||
const { bearerToken, pleromaInstanceUrl } = envConfig;
|
||||
try {
|
||||
@ -72,4 +98,9 @@ const deleteNotification = async (notification: Notification) => {
|
||||
}
|
||||
};
|
||||
|
||||
export { deleteNotification, getInstanceEmojis, getNotifications };
|
||||
export {
|
||||
deleteNotification,
|
||||
getInstanceEmojis,
|
||||
getNotifications,
|
||||
getStatusContext,
|
||||
};
|
||||
|
123
src/main.ts
123
src/main.ts
@ -2,25 +2,26 @@ import {
|
||||
NewStatusBody,
|
||||
Notification,
|
||||
OllamaConfigOptions,
|
||||
// OllamaChatRequest,
|
||||
// OllamaChatResponse,
|
||||
OllamaRequest,
|
||||
OllamaResponse,
|
||||
OllamaChatRequest,
|
||||
OllamaChatResponse,
|
||||
PostAncestorsForModel,
|
||||
} from "../types.js";
|
||||
// import striptags from "striptags";
|
||||
import { PrismaClient } from "../generated/prisma/client.js";
|
||||
import {
|
||||
getInstanceEmojis,
|
||||
// getInstanceEmojis,
|
||||
deleteNotification,
|
||||
getNotifications,
|
||||
getStatusContext,
|
||||
} from "./api.js";
|
||||
import { storeUserData, storePromptData } from "./prisma.js";
|
||||
import {
|
||||
isFromWhitelistedDomain,
|
||||
alreadyRespondedTo,
|
||||
recordPendingResponse,
|
||||
trimInputData,
|
||||
selectRandomEmoji,
|
||||
// trimInputData,
|
||||
// selectRandomEmoji,
|
||||
shouldContinue,
|
||||
} from "./util.js";
|
||||
|
||||
export const prisma = new PrismaClient();
|
||||
@ -43,13 +44,14 @@ export const envConfig = {
|
||||
? parseInt(process.env.RANDOM_POST_INTERVAL)
|
||||
: 3600000,
|
||||
botAccountId: process.env.PLEROMA_ACCOUNT_ID,
|
||||
replyWithContext: process.env.REPLY_WITH_CONTEXT === "true" ? true : false,
|
||||
};
|
||||
|
||||
const ollamaConfig: OllamaConfigOptions = {
|
||||
temperature: 0.6,
|
||||
temperature: 0.9,
|
||||
top_p: 0.85,
|
||||
top_k: 40,
|
||||
num_ctx: 2048,
|
||||
top_k: 60,
|
||||
num_ctx: 16384, // maximum context window for Llama 3.1
|
||||
repeat_penalty: 1.1,
|
||||
};
|
||||
|
||||
@ -58,26 +60,16 @@ const ollamaConfig: OllamaConfigOptions = {
|
||||
|
||||
const generateOllamaRequest = async (
|
||||
notification: Notification
|
||||
): Promise<OllamaResponse | undefined> => {
|
||||
): Promise<OllamaChatResponse | undefined> => {
|
||||
const {
|
||||
whitelistOnly,
|
||||
ollamaModel,
|
||||
ollamaSystemPrompt,
|
||||
ollamaUrl,
|
||||
botAccountId,
|
||||
replyWithContext,
|
||||
} = envConfig;
|
||||
try {
|
||||
console.log(trimInputData(notification.status.content));
|
||||
if (
|
||||
// striptags(notification.status.content).includes("!prompt") &&
|
||||
!notification.status.account.bot && // sanity check, sort of
|
||||
notification.type === "mention" &&
|
||||
(notification.status.in_reply_to_account_id === botAccountId ||
|
||||
notification.status.in_reply_to_account_id === null) &&
|
||||
trimInputData(notification.status.content).split(" ").includes("Lexi")
|
||||
// only reply to mentions when the bot is the direct recipient or when an @ is at the top level of a conversation chain, or when the AI is @ directly
|
||||
// notification.status.visibility !== "private" // for safety, let's only respond to public messages
|
||||
) {
|
||||
if (shouldContinue(notification)) {
|
||||
if (whitelistOnly && !isFromWhitelistedDomain(notification)) {
|
||||
await deleteNotification(notification);
|
||||
return;
|
||||
@ -87,22 +79,59 @@ const generateOllamaRequest = async (
|
||||
}
|
||||
await recordPendingResponse(notification);
|
||||
await storeUserData(notification);
|
||||
// console.log(trimInputData(notification.status.content));
|
||||
const ollamaRequestBody: OllamaRequest = {
|
||||
let conversationHistory: PostAncestorsForModel[] = [];
|
||||
if (replyWithContext) {
|
||||
const contextPosts = await getStatusContext(notification.status.id);
|
||||
if (!contextPosts?.ancestors || !contextPosts) {
|
||||
throw new Error(`Unable to obtain post context ancestors.`);
|
||||
}
|
||||
conversationHistory = contextPosts.ancestors.map((ancestor) => {
|
||||
const mentions = ancestor.mentions.map((mention) => mention.acct);
|
||||
return {
|
||||
account_fqn: ancestor.account.fqn,
|
||||
mentions,
|
||||
plaintext_content: ancestor.pleroma.content["text/plain"],
|
||||
};
|
||||
});
|
||||
// console.log(conversationHistory);
|
||||
}
|
||||
|
||||
// Simplified user message (remove [/INST] as it's not needed for Llama 3)
|
||||
const userMessage = `${notification.status.account.fqn} says to you: \"${notification.status.pleroma.content["text/plain"]}\".`;
|
||||
|
||||
let systemContent = ollamaSystemPrompt;
|
||||
if (replyWithContext) {
|
||||
// Simplified context instructions (avoid heavy JSON; summarize for clarity)
|
||||
systemContent = `${ollamaSystemPrompt}\n\nPrevious conversation context:\n${conversationHistory
|
||||
.map(
|
||||
(post) =>
|
||||
`${post.account_fqn} (said to ${post.mentions.join(", ")}): ${
|
||||
post.plaintext_content
|
||||
}`
|
||||
)
|
||||
.join(
|
||||
"\n"
|
||||
)}\nReply to the user who addressed you (you are Lexi, also known as nice-ai or nice-ai@nicecrew.digital). Examine the context of the entire conversation and make references to topics or information where appropriate. Prefix usernames with '@' when addressing them. Assume if there is no domain in the username, the domain is @nicecrew.digital (for example @matty would be @matty@nicecrew.digital)`;
|
||||
}
|
||||
|
||||
// Switch to chat request format (messages array auto-handles Llama 3 template)
|
||||
const ollamaRequestBody: OllamaChatRequest = {
|
||||
model: ollamaModel,
|
||||
// prompt: trimInputData(notification.status.content),
|
||||
prompt: `${notification.status.account.fqn} says: ${trimInputData(
|
||||
notification.status.content
|
||||
)}`,
|
||||
system: ollamaSystemPrompt,
|
||||
messages: [
|
||||
{ role: "system", content: systemContent as string },
|
||||
{ role: "user", content: userMessage },
|
||||
],
|
||||
stream: false,
|
||||
options: ollamaConfig,
|
||||
};
|
||||
const response = await fetch(`${ollamaUrl}/api/generate`, {
|
||||
|
||||
// Change endpoint to /api/chat
|
||||
const response = await fetch(`${ollamaUrl}/api/chat`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(ollamaRequestBody),
|
||||
});
|
||||
const ollamaResponse: OllamaResponse = await response.json();
|
||||
const ollamaResponse: OllamaChatResponse = await response.json();
|
||||
|
||||
await storePromptData(notification, ollamaResponse);
|
||||
return ollamaResponse;
|
||||
}
|
||||
@ -113,19 +142,19 @@ const generateOllamaRequest = async (
|
||||
|
||||
const postReplyToStatus = async (
|
||||
notification: Notification,
|
||||
ollamaResponseBody: OllamaResponse
|
||||
ollamaResponseBody: OllamaChatResponse
|
||||
) => {
|
||||
const { pleromaInstanceUrl, bearerToken } = envConfig;
|
||||
const emojiList = await getInstanceEmojis();
|
||||
let randomEmoji;
|
||||
if (emojiList) {
|
||||
randomEmoji = selectRandomEmoji(emojiList);
|
||||
}
|
||||
// const emojiList = await getInstanceEmojis();
|
||||
// let randomEmoji;
|
||||
// if (emojiList) {
|
||||
// randomEmoji = selectRandomEmoji(emojiList);
|
||||
// }
|
||||
try {
|
||||
let mentions: string[];
|
||||
const statusBody: NewStatusBody = {
|
||||
content_type: "text/markdown",
|
||||
status: `${ollamaResponseBody.response} :${randomEmoji}:`,
|
||||
status: `${ollamaResponseBody.message.content}`,
|
||||
in_reply_to_id: notification.status.id,
|
||||
};
|
||||
if (
|
||||
@ -165,26 +194,28 @@ const createTimelinePost = async () => {
|
||||
ollamaUrl,
|
||||
pleromaInstanceUrl,
|
||||
} = envConfig;
|
||||
const ollamaRequestBody: OllamaRequest = {
|
||||
const ollamaRequestBody: OllamaChatRequest = {
|
||||
model: ollamaModel,
|
||||
prompt: "Say something random.",
|
||||
system: ollamaSystemPrompt,
|
||||
messages: [
|
||||
{ role: "system", content: ollamaSystemPrompt as string },
|
||||
{ role: "user", content: "Say something random." },
|
||||
],
|
||||
stream: false,
|
||||
// options: ollamaConfig,
|
||||
options: ollamaConfig,
|
||||
};
|
||||
try {
|
||||
const response = await fetch(`${ollamaUrl}/api/generate`, {
|
||||
const response = await fetch(`${ollamaUrl}/api/chat`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(ollamaRequestBody),
|
||||
});
|
||||
if (!response.ok)
|
||||
throw new Error("Error generating ad-hoc Ollama response");
|
||||
|
||||
const ollamaResponse: OllamaResponse = await response.json();
|
||||
const ollamaResponse: OllamaChatResponse = await response.json();
|
||||
|
||||
const newStatusBody: NewStatusBody = {
|
||||
content_type: "text/markdown",
|
||||
status: ollamaResponse.response,
|
||||
status: ollamaResponse.message.content,
|
||||
};
|
||||
|
||||
const pleromaResponse = await fetch(
|
||||
|
@ -1,16 +1,16 @@
|
||||
import { Notification, OllamaResponse } from "../types.js";
|
||||
import { Notification, OllamaChatResponse } from "../types.js";
|
||||
import { trimInputData } from "./util.js";
|
||||
import { prisma } from "./main.js";
|
||||
|
||||
const storePromptData = async (
|
||||
notification: Notification,
|
||||
ollamaResponseBody: OllamaResponse
|
||||
ollamaResponseBody: OllamaChatResponse
|
||||
) => {
|
||||
try {
|
||||
await prisma.response.updateMany({
|
||||
where: { pleromaNotificationId: notification.id },
|
||||
data: {
|
||||
response: ollamaResponseBody.response,
|
||||
response: ollamaResponseBody.message.content,
|
||||
request: trimInputData(notification.status.content),
|
||||
to: notification.account.fqn,
|
||||
isProcessing: false,
|
||||
|
29
src/util.ts
29
src/util.ts
@ -34,6 +34,34 @@ const recordPendingResponse = async (notification: Notification) => {
|
||||
}
|
||||
};
|
||||
|
||||
const shouldContinue = (notification: Notification) => {
|
||||
// wow this is bad
|
||||
try {
|
||||
const { botAccountId } = envConfig;
|
||||
const statusContent = trimInputData(notification.status.content);
|
||||
if (
|
||||
// notification.status.visibility !== "private" &&
|
||||
!notification.account.bot &&
|
||||
notification.type === "mention"
|
||||
) {
|
||||
if (notification.status.in_reply_to_account_id === botAccountId) {
|
||||
return true;
|
||||
} else if (
|
||||
notification.status.in_reply_to_account_id !== botAccountId &&
|
||||
statusContent.includes("Lexi")
|
||||
) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const isFromWhitelistedDomain = (notification: Notification): boolean => {
|
||||
try {
|
||||
const domain = notification.status.account.fqn.split("@")[1];
|
||||
@ -76,4 +104,5 @@ export {
|
||||
trimInputData,
|
||||
recordPendingResponse,
|
||||
isFromWhitelistedDomain,
|
||||
shouldContinue,
|
||||
};
|
||||
|
@ -5,7 +5,7 @@ After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=USERNAME_HERE
|
||||
User=bot
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
ExecStart=/usr/bin/screen -L -DmS pleroma-ollama-bot /home/bot/.nvm/versions/node/v22.11.0/bin/npm run start
|
||||
|
45
types.d.ts
vendored
45
types.d.ts
vendored
@ -6,6 +6,41 @@ export interface Notification {
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface ContextResponse {
|
||||
ancestors: ContextObject[];
|
||||
descendents: ContextObject[];
|
||||
}
|
||||
|
||||
export interface PostAncestorsForModel {
|
||||
account_fqn: string;
|
||||
mentions: string[];
|
||||
plaintext_content: string;
|
||||
}
|
||||
|
||||
interface ContextAccountObject {
|
||||
acct: string;
|
||||
avatar: string;
|
||||
bot: boolean;
|
||||
display_name: string;
|
||||
followers_count: number;
|
||||
following_count: number;
|
||||
fqn: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface ContextObject {
|
||||
content: string;
|
||||
id: string;
|
||||
in_reply_to_account_id: string | null;
|
||||
in_reply_to_id: string | null;
|
||||
media_attachments: string[];
|
||||
mentions: Mention[];
|
||||
pleroma: PleromaObjectInResponse;
|
||||
visibility: "public" | "private" | "unlisted";
|
||||
uri: string;
|
||||
account: ContextAccountObject;
|
||||
}
|
||||
|
||||
export interface NewStatusBody {
|
||||
content_type: "application/json" | "text/markdown";
|
||||
in_reply_to_id?: string;
|
||||
@ -94,9 +129,19 @@ export interface Status {
|
||||
in_reply_to_account_id: string; // account ID of the reply
|
||||
in_reply_to_id: string; // status that the user has replied to
|
||||
mentions: Mention[]; // array of mentions
|
||||
pleroma: PleromaObjectInResponse;
|
||||
visibility: "private" | "public" | "unlisted";
|
||||
}
|
||||
|
||||
interface PleromaObjectInResponse {
|
||||
content: { "text/plain": string };
|
||||
context: string;
|
||||
conversation_id: number;
|
||||
direct_conversation_id: number | null;
|
||||
local: boolean;
|
||||
in_reply_to_account_acct: string;
|
||||
}
|
||||
|
||||
export interface Mention {
|
||||
acct: string;
|
||||
id: string;
|
||||
|
Reference in New Issue
Block a user