Compare commits
42 Commits
add-websoc
...
2111a47411
Author | SHA1 | Date | |
---|---|---|---|
2111a47411 | |||
11c1332757 | |||
aaf4adcf06 | |||
b6ad54f40a | |||
2f3d16dbc5 | |||
150e2d638e | |||
0c7c176bae | |||
c3d4f1b1ff | |||
57ab59d342 | |||
71ae54930c | |||
3466a984ac | |||
cbf6b1d3eb | |||
e2ce397118 | |||
9a7cd118b3 | |||
7a60a672d4 | |||
419285487a | |||
09722507c6 | |||
41317301bf | |||
00a2eb63bc | |||
6c8f779294 | |||
ff5c7506ff | |||
5c51acc8d1 | |||
d4ee457d74 | |||
b8f6023029 | |||
ea5e783ee5 | |||
eb5282a50d | |||
9ee3663890 | |||
d85acd2179 | |||
856cc84208 | |||
ca4643092f | |||
b4b656f808 | |||
92f1366574 | |||
a64afa7e7b | |||
d63aa365e7 | |||
3759c5aa23 | |||
1a151b197b | |||
70180c5d5f | |||
dac037809c | |||
6088a2cbd3 | |||
ed8d148d0a | |||
379099dc7a | |||
c0ed38ac1a |
@ -1,9 +1,12 @@
|
||||
DATABASE_URL="file:../dev.db" # SQLite database relative to the ./prisma path
|
||||
PLEROMA_INSTANCE_URL="https://instance.tld" # Pleroma instance full URL including scheme
|
||||
PLEROMA_INSTANCE_DOMAIN="instance.tld" # used if you want to only want to respond to people from a particular instance
|
||||
ONLY_LOCAL_REPLIES="true" # reply to only users locally on your instance
|
||||
PLEROMA_ACCOUNT_ID="" # obtained from /api/v1/accounts/{nickname} - used so we don't spam mentions when not directly addressed
|
||||
ONLY_WHITELIST="true" # change to "false" if you want to accept prompts from any and all domains - *** USE WITH CAUTION ***
|
||||
WHITELISTED_DOMAINS="" # comma separated list of domains you want to allow the bot to accept prompts from (i.e. poa.st,nicecrew.digital,detroitriotcity.com,decayable.ink)
|
||||
OLLAMA_URL="http://localhost:11434" # OLLAMA connection URL
|
||||
OLLAMA_SYSTEM_PROMPT="" # system prompt - used to help tune the responses from the AI
|
||||
OLLAMA_MODEL="" # Ollama model for responses e.g dolphin-mistral:latest
|
||||
INSTANCE_BEARER_TOKEN="" # instance auth/bearer token (check the "verify_credentials" endpoint request headers in Chrome DevTools if on Soapbox)
|
||||
SOAPBOX_WS_PROTOCOL="" # this is the header required to authenticate to the websocket. No idea why Soapbox does it like this. You can get it in the request headers for the socket in Chrome DevTools
|
||||
FETCH_INTERVAL="" # interval for fetching new notifications from the instance, in milliseconds, recommend at least 15000
|
||||
RANDOM_POST_INTERVAL="" # interval for ad-hoc posts in milliseconds
|
||||
INSTANCE_BEARER_TOKEN="" # instance auth/bearer token (check the "verify_credentials" endpoint request headers in Chrome DevTools if on Soapbox)
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -4,5 +4,6 @@ node_modules
|
||||
*.log
|
||||
*.db
|
||||
/dist
|
||||
screenlog*
|
||||
|
||||
/generated/prisma
|
||||
|
@ -1,15 +1,13 @@
|
||||
## Pleroma -> Ollama Bot Setup
|
||||
|
||||
1. Clone project
|
||||
2. Install npm 22.11.0 if you don't have it already
|
||||
2. Install Node `v22.11.0` if you don't have it already
|
||||
* If using `nvm`, just `nvm install 22.11.0` and then `nvm use 22.11.0` if necessary
|
||||
3. `cd` into the project directory
|
||||
4. Run `npm install`
|
||||
6. Run `npx prisma migrate dev --name init`
|
||||
7. To run the software on a cronjob, use `npm run once`
|
||||
8. To run continuously, use `npm run ws`
|
||||
7. To start, run `npm run start`
|
||||
|
||||
### Database Migrations
|
||||
|
||||
If you add stuff to the schema, follow the [Prisma development workflow](https://www.prisma.io/docs/orm/prisma-migrate/workflows/development-and-production). This will apply the new schema to the database and generate a new Prisma client with type safety.
|
||||
|
||||
Setting as a system service will come at some point, or someone could contribute if they wanted.
|
37
package-lock.json
generated
37
package-lock.json
generated
@ -1,22 +1,21 @@
|
||||
{
|
||||
"name": "pleroma-ollama-bot",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.5",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "pleroma-ollama-bot",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.5",
|
||||
"dependencies": {
|
||||
"@prisma/client": "^6.10.1",
|
||||
"@types/node": "^24.0.5",
|
||||
"dotenv": "^17.0.0",
|
||||
"striptags": "^3.2.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.8.3",
|
||||
"ws": "^8.18.3"
|
||||
"typescript": "^5.8.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.0.10",
|
||||
"@types/ws": "^8.18.1",
|
||||
"prisma": "^6.10.1"
|
||||
}
|
||||
@ -165,10 +164,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "24.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.0.5.tgz",
|
||||
"integrity": "sha512-CXEG9E7GCTOZIre0WdDznmnhvF7xi7AmnP/zF496trmLiqlfdtxp9nPRgLVqfmJ8jgtcKcs0EcvOu2yDZSuvTg==",
|
||||
"license": "MIT",
|
||||
"version": "24.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.0.10.tgz",
|
||||
"integrity": "sha512-ENHwaH+JIRTDIEEbDK6QSQntAYGtbvdDXnMXnZaZ6k13Du1dPMmprkEHIL7ok2Wl2aZevetwTAb5S+7yIF+enA==",
|
||||
"dependencies": {
|
||||
"undici-types": "~7.8.0"
|
||||
}
|
||||
@ -356,27 +354,6 @@
|
||||
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/ws": {
|
||||
"version": "8.18.3",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
|
||||
"integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bufferutil": "^4.0.1",
|
||||
"utf-8-validate": ">=5.0.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bufferutil": {
|
||||
"optional": true
|
||||
},
|
||||
"utf-8-validate": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/yn": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "pleroma-ollama-bot",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.7",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "tsc && node -r dotenv/config dist/main.js",
|
||||
@ -9,17 +9,16 @@
|
||||
"type": "module",
|
||||
"keywords": [],
|
||||
"author": "NiceCrew",
|
||||
"description": "A simple bot that responds to activities from Pleroma instances using Ollama's API.",
|
||||
"description": "A simple bot that responds to activities from Pleroma instances using Ollama's API at a configurable interval.",
|
||||
"dependencies": {
|
||||
"@prisma/client": "^6.10.1",
|
||||
"@types/node": "^24.0.5",
|
||||
"dotenv": "^17.0.0",
|
||||
"striptags": "^3.2.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.8.3",
|
||||
"ws": "^8.18.3"
|
||||
"typescript": "^5.8.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.0.10",
|
||||
"@types/ws": "^8.18.1",
|
||||
"prisma": "^6.10.1"
|
||||
}
|
||||
|
@ -0,0 +1,18 @@
|
||||
-- RedefineTables
|
||||
PRAGMA defer_foreign_keys=ON;
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Response" (
|
||||
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
"pleromaNotificationId" TEXT NOT NULL DEFAULT 'null',
|
||||
"to" TEXT NOT NULL,
|
||||
"request" TEXT,
|
||||
"response" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"processedAt" DATETIME,
|
||||
"isProcessing" BOOLEAN NOT NULL DEFAULT true
|
||||
);
|
||||
INSERT INTO "new_Response" ("createdAt", "id", "pleromaNotificationId", "processedAt", "request", "response", "to") SELECT "createdAt", "id", "pleromaNotificationId", "processedAt", "request", "response", "to" FROM "Response";
|
||||
DROP TABLE "Response";
|
||||
ALTER TABLE "new_Response" RENAME TO "Response";
|
||||
PRAGMA foreign_keys=ON;
|
||||
PRAGMA defer_foreign_keys=OFF;
|
@ -0,0 +1,18 @@
|
||||
-- RedefineTables
|
||||
PRAGMA defer_foreign_keys=ON;
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Response" (
|
||||
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
"pleromaNotificationId" TEXT NOT NULL DEFAULT 'null',
|
||||
"to" TEXT NOT NULL DEFAULT 'null',
|
||||
"request" TEXT NOT NULL DEFAULT 'null',
|
||||
"response" TEXT NOT NULL DEFAULT 'null',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"processedAt" DATETIME,
|
||||
"isProcessing" BOOLEAN NOT NULL DEFAULT true
|
||||
);
|
||||
INSERT INTO "new_Response" ("createdAt", "id", "isProcessing", "pleromaNotificationId", "processedAt", "request", "response", "to") SELECT "createdAt", "id", "isProcessing", "pleromaNotificationId", "processedAt", coalesce("request", 'null') AS "request", coalesce("response", 'null') AS "response", "to" FROM "Response";
|
||||
DROP TABLE "Response";
|
||||
ALTER TABLE "new_Response" RENAME TO "Response";
|
||||
PRAGMA foreign_keys=ON;
|
||||
PRAGMA defer_foreign_keys=OFF;
|
@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA defer_foreign_keys=ON;
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Response" (
|
||||
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
"pleromaNotificationId" TEXT NOT NULL DEFAULT 'null',
|
||||
"to" TEXT NOT NULL DEFAULT 'null',
|
||||
"request" TEXT NOT NULL DEFAULT 'null',
|
||||
"response" TEXT NOT NULL DEFAULT 'null',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"processedAt" DATETIME,
|
||||
"isProcessing" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isComplete" BOOLEAN NOT NULL DEFAULT true
|
||||
);
|
||||
INSERT INTO "new_Response" ("createdAt", "id", "isProcessing", "pleromaNotificationId", "processedAt", "request", "response", "to") SELECT "createdAt", "id", "isProcessing", "pleromaNotificationId", "processedAt", "request", "response", "to" FROM "Response";
|
||||
DROP TABLE "Response";
|
||||
ALTER TABLE "new_Response" RENAME TO "Response";
|
||||
PRAGMA foreign_keys=ON;
|
||||
PRAGMA defer_foreign_keys=OFF;
|
@ -14,11 +14,13 @@ datasource db {
|
||||
model Response {
|
||||
id Int @id @default(autoincrement())
|
||||
pleromaNotificationId String @default("null")
|
||||
to String
|
||||
request String?
|
||||
response String?
|
||||
to String @default("null")
|
||||
request String @default("null")
|
||||
response String @default("null")
|
||||
createdAt DateTime @default(now())
|
||||
processedAt DateTime?
|
||||
isProcessing Boolean @default(true)
|
||||
isComplete Boolean @default(true)
|
||||
}
|
||||
|
||||
model User {
|
||||
|
75
src/api.ts
Normal file
75
src/api.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { envConfig, prisma } from "./main.js";
|
||||
import { PleromaEmoji, Notification } from "../types.js";
|
||||
|
||||
const getNotifications = async () => {
|
||||
const { bearerToken, pleromaInstanceUrl } = envConfig;
|
||||
try {
|
||||
const request = await fetch(
|
||||
`${pleromaInstanceUrl}/api/v1/notifications?types[]=mention`,
|
||||
{
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: `Bearer ${bearerToken}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const notifications: Notification[] = await request.json();
|
||||
|
||||
return notifications;
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const getInstanceEmojis = async () => {
|
||||
const { bearerToken, pleromaInstanceUrl } = envConfig;
|
||||
try {
|
||||
const request = await fetch(`${pleromaInstanceUrl}/api/v1/pleroma/emoji`, {
|
||||
method: "GET",
|
||||
headers: {
|
||||
Authorization: `Bearer ${bearerToken}`,
|
||||
},
|
||||
});
|
||||
if (!request.ok) {
|
||||
console.error(`Emoji GET failed: ${request.status}`);
|
||||
return;
|
||||
}
|
||||
const emojis: PleromaEmoji[] = await request.json();
|
||||
return Object.keys(emojis);
|
||||
} catch (error: any) {
|
||||
console.error(`Could not fetch emojis: ${error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const deleteNotification = async (notification: Notification) => {
|
||||
const { pleromaInstanceUrl, bearerToken } = envConfig;
|
||||
try {
|
||||
if (!notification.id) {
|
||||
return;
|
||||
}
|
||||
await prisma.response.updateMany({
|
||||
// this is probably not the best way to do this, but since we may have duplicate notifications, we have to update all of them - probably won't scale (lmao)
|
||||
where: { pleromaNotificationId: notification.id },
|
||||
data: { isProcessing: false },
|
||||
});
|
||||
const response = await fetch(
|
||||
`${pleromaInstanceUrl}/api/v1/notifications/${notification.id}/dismiss`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${bearerToken}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
if (!response.ok) {
|
||||
console.error(
|
||||
`Could not delete notification ID: ${notification.id}\nReason: ${response.status} - ${response.statusText}`
|
||||
);
|
||||
}
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
export { deleteNotification, getInstanceEmojis, getNotifications };
|
322
src/main.ts
322
src/main.ts
@ -1,125 +1,104 @@
|
||||
import {
|
||||
OllamaRequest,
|
||||
OllamaResponse,
|
||||
NewStatusBody,
|
||||
Notification,
|
||||
WSEvent,
|
||||
OllamaConfigOptions,
|
||||
// OllamaChatRequest,
|
||||
// OllamaChatResponse,
|
||||
OllamaRequest,
|
||||
OllamaResponse,
|
||||
} from "../types.js";
|
||||
import striptags from "striptags";
|
||||
// import striptags from "striptags";
|
||||
import { PrismaClient } from "../generated/prisma/client.js";
|
||||
import { createWebsocket } from "./websocket.js";
|
||||
import {
|
||||
getInstanceEmojis,
|
||||
deleteNotification,
|
||||
getNotifications,
|
||||
} from "./api.js";
|
||||
import { storeUserData, storePromptData } from "./prisma.js";
|
||||
import {
|
||||
isFromWhitelistedDomain,
|
||||
alreadyRespondedTo,
|
||||
recordPendingResponse,
|
||||
trimInputData,
|
||||
selectRandomEmoji,
|
||||
} from "./util.js";
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
export const prisma = new PrismaClient();
|
||||
|
||||
// const getNotifications = async () => {
|
||||
// try {
|
||||
// const request = await fetch(
|
||||
// `${process.env.PLEROMA_INSTANCE_URL}/api/v1/notifications?types[]=mention`,
|
||||
// {
|
||||
// method: "GET",
|
||||
// headers: {
|
||||
// Authorization: `Bearer ${process.env.INSTANCE_BEARER_TOKEN}`,
|
||||
// },
|
||||
// }
|
||||
// );
|
||||
|
||||
// const notifications: Notification[] = await request.json();
|
||||
|
||||
// return notifications;
|
||||
// } catch (error: any) {
|
||||
// throw new Error(error.message);
|
||||
// }
|
||||
// };
|
||||
|
||||
// const notifications = await getNotifications();
|
||||
|
||||
const storeUserData = async (notification: Notification): Promise<void> => {
|
||||
try {
|
||||
await prisma.user.upsert({
|
||||
where: { userFqn: notification.status.account.fqn },
|
||||
update: {
|
||||
lastRespondedTo: new Date(Date.now()),
|
||||
},
|
||||
create: {
|
||||
userFqn: notification.status.account.fqn,
|
||||
lastRespondedTo: new Date(Date.now()),
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
export const envConfig = {
|
||||
pleromaInstanceUrl: process.env.PLEROMA_INSTANCE_URL || "",
|
||||
pleromaInstanceDomain: process.env.PLEROMA_INSTANCE_DOMAIN || "",
|
||||
whitelistOnly: process.env.ONLY_WHITELIST === "true" ? true : false,
|
||||
whitelistedDomains: process.env.WHITELISTED_DOMAINS
|
||||
? process.env.WHITELISTED_DOMAINS.split(",")
|
||||
: [process.env.PLEROMA_INSTANCE_DOMAIN],
|
||||
ollamaUrl: process.env.OLLAMA_URL || "",
|
||||
ollamaSystemPrompt: process.env.OLLAMA_SYSTEM_PROMPT,
|
||||
ollamaModel: process.env.OLLAMA_MODEL || "",
|
||||
fetchInterval: process.env.FETCH_INTERVAL
|
||||
? parseInt(process.env.FETCH_INTERVAL)
|
||||
: 15000,
|
||||
bearerToken: process.env.INSTANCE_BEARER_TOKEN || "",
|
||||
adHocPostInterval: process.env.RANDOM_POST_INTERVAL
|
||||
? parseInt(process.env.RANDOM_POST_INTERVAL)
|
||||
: 3600000,
|
||||
botAccountId: process.env.PLEROMA_ACCOUNT_ID,
|
||||
};
|
||||
|
||||
const alreadyRespondedTo = async (
|
||||
notification: Notification
|
||||
): Promise<boolean> => {
|
||||
try {
|
||||
const duplicate = await prisma.response.findFirst({
|
||||
where: { pleromaNotificationId: notification.status.id },
|
||||
});
|
||||
if (duplicate) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
const ollamaConfig: OllamaConfigOptions = {
|
||||
temperature: 0.6,
|
||||
top_p: 0.85,
|
||||
top_k: 40,
|
||||
num_ctx: 2048,
|
||||
repeat_penalty: 1.1,
|
||||
};
|
||||
|
||||
const storePromptData = async (
|
||||
notification: Notification,
|
||||
ollamaResponseBody: OllamaResponse
|
||||
) => {
|
||||
try {
|
||||
await prisma.response.create({
|
||||
data: {
|
||||
response: ollamaResponseBody.response,
|
||||
request: striptags(notification.status.content),
|
||||
to: notification.account.fqn,
|
||||
pleromaNotificationId: notification.status.id,
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const trimInputData = (input: string) => {
|
||||
const strippedInput = striptags(input);
|
||||
const split = strippedInput.split(" ");
|
||||
const promptStringIndex = split.indexOf("!prompt");
|
||||
return split.slice(promptStringIndex + 1).join(" "); // returns everything after the !prompt
|
||||
};
|
||||
// this could be helpful
|
||||
// https://replicate.com/blog/how-to-prompt-llama
|
||||
|
||||
const generateOllamaRequest = async (
|
||||
notification: Notification
|
||||
): Promise<OllamaResponse | undefined> => {
|
||||
const {
|
||||
whitelistOnly,
|
||||
ollamaModel,
|
||||
ollamaSystemPrompt,
|
||||
ollamaUrl,
|
||||
botAccountId,
|
||||
} = envConfig;
|
||||
try {
|
||||
console.log(trimInputData(notification.status.content));
|
||||
if (
|
||||
striptags(notification.status.content).includes("!prompt") &&
|
||||
!notification.status.account.bot
|
||||
// striptags(notification.status.content).includes("!prompt") &&
|
||||
!notification.status.account.bot && // sanity check, sort of
|
||||
notification.type === "mention" &&
|
||||
(notification.status.in_reply_to_account_id === botAccountId ||
|
||||
notification.status.in_reply_to_account_id === null) &&
|
||||
trimInputData(notification.status.content).split(" ").includes("Lexi")
|
||||
// only reply to mentions when the bot is the direct recipient or when an @ is at the top level of a conversation chain, or when the AI is @ directly
|
||||
// notification.status.visibility !== "private" // for safety, let's only respond to public messages
|
||||
) {
|
||||
if (
|
||||
process.env.ONLY_LOCAL_REPLIES === "true" &&
|
||||
!notification.status.account.fqn.includes(
|
||||
`@${process.env.PLEROMA_INSTANCE_DOMAIN}`
|
||||
)
|
||||
) {
|
||||
if (whitelistOnly && !isFromWhitelistedDomain(notification)) {
|
||||
await deleteNotification(notification);
|
||||
return;
|
||||
}
|
||||
if (await alreadyRespondedTo(notification)) {
|
||||
return;
|
||||
}
|
||||
await recordPendingResponse(notification);
|
||||
await storeUserData(notification);
|
||||
// console.log(trimInputData(notification.status.content));
|
||||
const ollamaRequestBody: OllamaRequest = {
|
||||
model: process.env.OLLAMA_MODEL as string,
|
||||
system: process.env.OLLAMA_SYSTEM_PROMPT as string,
|
||||
prompt: `@${notification.status.account.fqn} says: ${trimInputData(
|
||||
model: ollamaModel,
|
||||
// prompt: trimInputData(notification.status.content),
|
||||
prompt: `${notification.status.account.fqn} says: ${trimInputData(
|
||||
notification.status.content
|
||||
)}`,
|
||||
system: ollamaSystemPrompt,
|
||||
stream: false,
|
||||
options: ollamaConfig,
|
||||
};
|
||||
const response = await fetch(`${process.env.OLLAMA_URL}/api/generate`, {
|
||||
const response = await fetch(`${ollamaUrl}/api/generate`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(ollamaRequestBody),
|
||||
});
|
||||
@ -136,11 +115,17 @@ const postReplyToStatus = async (
|
||||
notification: Notification,
|
||||
ollamaResponseBody: OllamaResponse
|
||||
) => {
|
||||
const { pleromaInstanceUrl, bearerToken } = envConfig;
|
||||
const emojiList = await getInstanceEmojis();
|
||||
let randomEmoji;
|
||||
if (emojiList) {
|
||||
randomEmoji = selectRandomEmoji(emojiList);
|
||||
}
|
||||
try {
|
||||
let mentions: string[];
|
||||
const statusBody: NewStatusBody = {
|
||||
content_type: "text/markdown",
|
||||
status: ollamaResponseBody.response,
|
||||
status: `${ollamaResponseBody.response} :${randomEmoji}:`,
|
||||
in_reply_to_id: notification.status.id,
|
||||
};
|
||||
if (
|
||||
@ -153,55 +138,126 @@ const postReplyToStatus = async (
|
||||
statusBody.to = mentions;
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`${process.env.PLEROMA_INSTANCE_URL}/api/v1/statuses`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${process.env.INSTANCE_BEARER_TOKEN}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(statusBody),
|
||||
}
|
||||
);
|
||||
const response = await fetch(`${pleromaInstanceUrl}/api/v1/statuses`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${bearerToken}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(statusBody),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`New status request failed: ${response.statusText}`);
|
||||
}
|
||||
|
||||
await deleteNotification(notification);
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const ws = createWebsocket();
|
||||
const createTimelinePost = async () => {
|
||||
const {
|
||||
bearerToken,
|
||||
ollamaModel,
|
||||
ollamaSystemPrompt,
|
||||
ollamaUrl,
|
||||
pleromaInstanceUrl,
|
||||
} = envConfig;
|
||||
const ollamaRequestBody: OllamaRequest = {
|
||||
model: ollamaModel,
|
||||
prompt: "Say something random.",
|
||||
system: ollamaSystemPrompt,
|
||||
stream: false,
|
||||
// options: ollamaConfig,
|
||||
};
|
||||
try {
|
||||
const response = await fetch(`${ollamaUrl}/api/generate`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(ollamaRequestBody),
|
||||
});
|
||||
if (!response.ok)
|
||||
throw new Error("Error generating ad-hoc Ollama response");
|
||||
|
||||
ws.on("upgrade", () => {
|
||||
console.log(
|
||||
`Websocket connection to ${process.env.PLEROMA_INSTANCE_DOMAIN} successful.`
|
||||
);
|
||||
});
|
||||
const ollamaResponse: OllamaResponse = await response.json();
|
||||
|
||||
ws.on("message", async (data) => {
|
||||
const message: WSEvent = JSON.parse(data.toString("utf-8"));
|
||||
if (message.event !== "notification") {
|
||||
// only watch for notification events
|
||||
return;
|
||||
const newStatusBody: NewStatusBody = {
|
||||
content_type: "text/markdown",
|
||||
status: ollamaResponse.response,
|
||||
};
|
||||
|
||||
const pleromaResponse = await fetch(
|
||||
`${pleromaInstanceUrl}/api/v1/statuses`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${bearerToken}`,
|
||||
},
|
||||
body: JSON.stringify(newStatusBody),
|
||||
}
|
||||
);
|
||||
|
||||
if (!pleromaResponse.ok)
|
||||
throw new Error("Error posting ad-hoc Ollama response to Pleroma");
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
}
|
||||
console.log("Websocket message received.");
|
||||
const payload = JSON.parse(message.payload) as Notification;
|
||||
const ollamaResponse = await generateOllamaRequest(payload);
|
||||
if (ollamaResponse) {
|
||||
await postReplyToStatus(payload, ollamaResponse);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// if (notifications) {
|
||||
// await Promise.all(
|
||||
// notifications.map(async (notification) => {
|
||||
// const ollamaResponse = await generateOllamaRequest(notification);
|
||||
// if (ollamaResponse) {
|
||||
// postReplyToStatus(notification, ollamaResponse);
|
||||
// }
|
||||
// })
|
||||
// );
|
||||
// }
|
||||
let notifications = [];
|
||||
const beginFetchCycle = async () => {
|
||||
setInterval(async () => {
|
||||
notifications = await getNotifications();
|
||||
if (notifications.length > 0) {
|
||||
await Promise.all(
|
||||
notifications.map(async (notification) => {
|
||||
try {
|
||||
const ollamaResponse = await generateOllamaRequest(notification);
|
||||
if (ollamaResponse) {
|
||||
postReplyToStatus(notification, ollamaResponse);
|
||||
}
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
}, envConfig.fetchInterval); // lower intervals may cause the bot to respond multiple times to the same message, but we try to mitigate this with the deleteNotification function
|
||||
};
|
||||
|
||||
const beginStatusPostInterval = async () => {
|
||||
setInterval(async () => {
|
||||
try {
|
||||
createTimelinePost();
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
}
|
||||
}, envConfig.adHocPostInterval);
|
||||
};
|
||||
|
||||
console.log(
|
||||
`Fetching notifications from ${envConfig.pleromaInstanceDomain}, every ${
|
||||
envConfig.fetchInterval / 1000
|
||||
} seconds.`
|
||||
);
|
||||
console.log(
|
||||
`Accepting prompts from: ${envConfig.whitelistedDomains.join(", ")}`
|
||||
);
|
||||
console.log(
|
||||
`Using model: ${envConfig.ollamaModel}\nConfig: ${JSON.stringify(
|
||||
ollamaConfig
|
||||
)}`
|
||||
);
|
||||
console.log(`System prompt: ${envConfig.ollamaSystemPrompt}`);
|
||||
|
||||
await beginFetchCycle();
|
||||
// setInterval(async () => {
|
||||
// createTimelinePost();
|
||||
// }, 10000);
|
||||
await beginStatusPostInterval();
|
||||
|
42
src/prisma.ts
Normal file
42
src/prisma.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import { Notification, OllamaResponse } from "../types.js";
|
||||
import { trimInputData } from "./util.js";
|
||||
import { prisma } from "./main.js";
|
||||
|
||||
const storePromptData = async (
|
||||
notification: Notification,
|
||||
ollamaResponseBody: OllamaResponse
|
||||
) => {
|
||||
try {
|
||||
await prisma.response.updateMany({
|
||||
where: { pleromaNotificationId: notification.id },
|
||||
data: {
|
||||
response: ollamaResponseBody.response,
|
||||
request: trimInputData(notification.status.content),
|
||||
to: notification.account.fqn,
|
||||
isProcessing: false,
|
||||
isComplete: true,
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const storeUserData = async (notification: Notification): Promise<void> => {
|
||||
try {
|
||||
await prisma.user.upsert({
|
||||
where: { userFqn: notification.status.account.fqn },
|
||||
update: {
|
||||
lastRespondedTo: new Date(Date.now()),
|
||||
},
|
||||
create: {
|
||||
userFqn: notification.status.account.fqn,
|
||||
lastRespondedTo: new Date(Date.now()),
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
export { storeUserData, storePromptData };
|
79
src/util.ts
Normal file
79
src/util.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import striptags from "striptags";
|
||||
import { prisma } from "./main.js";
|
||||
import { envConfig } from "./main.js";
|
||||
import { Notification } from "../types.js";
|
||||
|
||||
const trimInputData = (input: string): string => {
|
||||
const strippedInput = striptags(input);
|
||||
|
||||
const split = strippedInput.split(" ");
|
||||
// const promptStringIndex = split.indexOf("!prompt");
|
||||
const botFqnIndex = split.indexOf("@nice-ai");
|
||||
const botFqnIndexFull = split.indexOf("@nice-ai@nicecrew.digital");
|
||||
if (botFqnIndex !== -1) {
|
||||
split[botFqnIndex] = "Lexi";
|
||||
}
|
||||
if (botFqnIndexFull !== -1) {
|
||||
split[botFqnIndexFull] = "Lexi";
|
||||
}
|
||||
// split.splice(promptStringIndex, 1);
|
||||
return split.join(" "); // returns everything after the !prompt
|
||||
};
|
||||
|
||||
const recordPendingResponse = async (notification: Notification) => {
|
||||
try {
|
||||
await prisma.response.create({
|
||||
data: {
|
||||
pleromaNotificationId: notification.id,
|
||||
isProcessing: true,
|
||||
isComplete: false,
|
||||
},
|
||||
});
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const isFromWhitelistedDomain = (notification: Notification): boolean => {
|
||||
try {
|
||||
const domain = notification.status.account.fqn.split("@")[1];
|
||||
if (envConfig.whitelistedDomains.includes(domain)) {
|
||||
return true;
|
||||
}
|
||||
console.log(
|
||||
`Rejecting prompt request from non-whitelisted domain: ${domain}`
|
||||
);
|
||||
return false;
|
||||
} catch (error: any) {
|
||||
console.error(`Error with domain check: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const alreadyRespondedTo = async (
|
||||
notification: Notification
|
||||
): Promise<boolean> => {
|
||||
try {
|
||||
const duplicate = await prisma.response.findFirst({
|
||||
where: { pleromaNotificationId: notification.id },
|
||||
});
|
||||
if (duplicate?.isProcessing || duplicate?.isComplete) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
const selectRandomEmoji = (emojiList: string[]) => {
|
||||
return emojiList[Math.floor(Math.random() * emojiList.length)];
|
||||
};
|
||||
|
||||
export {
|
||||
alreadyRespondedTo,
|
||||
selectRandomEmoji,
|
||||
trimInputData,
|
||||
recordPendingResponse,
|
||||
isFromWhitelistedDomain,
|
||||
};
|
@ -1,22 +0,0 @@
|
||||
import { WebSocket } from "ws";
|
||||
|
||||
const scheme = process.env.PLEROMA_INSTANCE_URL?.startsWith("https")
|
||||
? "wss"
|
||||
: "ws"; // this is so nigger rigged
|
||||
const host = process.env.PLEROMA_INSTANCE_DOMAIN;
|
||||
|
||||
export const createWebsocket = (): WebSocket => {
|
||||
try {
|
||||
const ws = new WebSocket( // only connects to Soapbox frontends right now, but could pretty easily connect to Pleroma frontends with some tweaking
|
||||
`${scheme}://${host}/api/v1/streaming?stream=user`,
|
||||
[process.env.SOAPBOX_WS_PROTOCOL as string],
|
||||
{
|
||||
followRedirects: true,
|
||||
}
|
||||
);
|
||||
|
||||
return ws;
|
||||
} catch (error: any) {
|
||||
throw new Error(error);
|
||||
}
|
||||
};
|
14
systemd.service
Normal file
14
systemd.service
Normal file
@ -0,0 +1,14 @@
|
||||
[Unit]
|
||||
Description=Pleroma Ollama Bot
|
||||
Wants=network-online.target
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=USERNAME_HERE
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
ExecStart=/usr/bin/screen -L -DmS pleroma-ollama-bot /home/bot/.nvm/versions/node/v22.11.0/bin/npm run start
|
||||
WorkingDirectory=/path/to/directory
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
104
types.d.ts
vendored
104
types.d.ts
vendored
@ -1,6 +1,9 @@
|
||||
export interface Notification {
|
||||
account: Account;
|
||||
status: Status;
|
||||
id: string;
|
||||
type: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export interface NewStatusBody {
|
||||
@ -33,11 +36,46 @@ export interface OllamaRequest {
|
||||
/**
|
||||
* Whatever system prompt you'd like to add to the model to make it more unique, or force it to respond a certain way.
|
||||
*/
|
||||
system: string;
|
||||
system?: string;
|
||||
/**
|
||||
* Whether to stream responses from the API, or have it sent all as one payload.
|
||||
*/
|
||||
stream?: boolean = false; // stream response vs get response in one full message
|
||||
stream?: boolean = false;
|
||||
/**
|
||||
* Ollama configuration options
|
||||
*/
|
||||
options?: OllamaConfigOptions;
|
||||
}
|
||||
|
||||
export interface OllamaChatRequest {
|
||||
model: string;
|
||||
messages: OllamaMessages[];
|
||||
stream?: boolean = false;
|
||||
options?: OllamaConfigOptions;
|
||||
}
|
||||
|
||||
export interface OllamaChatResponse {
|
||||
model: string;
|
||||
created_at: string;
|
||||
message: OllamaChatResponseMessage;
|
||||
done_reason: "string";
|
||||
done: boolean;
|
||||
total_duration: number;
|
||||
load_duration: number;
|
||||
prompt_eval_count: number;
|
||||
prompt_eval_duration: number;
|
||||
eval_count: number;
|
||||
eval_duration: number;
|
||||
}
|
||||
|
||||
interface OllamaChatResponseMessage {
|
||||
role: "assistant";
|
||||
content: string;
|
||||
}
|
||||
|
||||
interface OllamaMessages {
|
||||
role: "system" | "user";
|
||||
content: string;
|
||||
}
|
||||
|
||||
export interface OllamaResponse {
|
||||
@ -54,8 +92,9 @@ export interface Status {
|
||||
created_at: string | Date; // when the post was created
|
||||
id: string; // ID of the reply itself
|
||||
in_reply_to_account_id: string; // account ID of the reply
|
||||
in_reply_to_id?: string; // status that the user has replied to
|
||||
mentions?: Mention[]; // array of mentions
|
||||
in_reply_to_id: string; // status that the user has replied to
|
||||
mentions: Mention[]; // array of mentions
|
||||
visibility: "private" | "public" | "unlisted";
|
||||
}
|
||||
|
||||
export interface Mention {
|
||||
@ -65,8 +104,57 @@ export interface Mention {
|
||||
username: string;
|
||||
}
|
||||
|
||||
export interface WSEvent {
|
||||
event: "update" | "status.update" | "notification";
|
||||
payload: string;
|
||||
stream: "user" | "direct";
|
||||
export interface PleromaEmoji {
|
||||
[emojiName: string]: PleromaEmojiMetadata;
|
||||
}
|
||||
|
||||
interface PleromaEmojiMetadata {
|
||||
image_url: string;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Experimental settings, I wouldn't recommend messing with these if you don't know how they work (I don't either)
|
||||
*/
|
||||
export interface OllamaConfigOptions {
|
||||
/**
|
||||
* Number of tokens guaranteed to be kept in memory during response generation. Higher values leave less
|
||||
* possible room for num_ctx
|
||||
*/
|
||||
num_keep?: number;
|
||||
seed?: number;
|
||||
/**
|
||||
* Sets maximum of tokens in the response
|
||||
*/
|
||||
num_predict?: number;
|
||||
top_k?: number;
|
||||
top_p?: number;
|
||||
min_p?: number;
|
||||
typical_p?: number;
|
||||
repeat_last_n?: number;
|
||||
/**
|
||||
* How close of a response should the response be to the original prompt - lower = more focused response
|
||||
*/
|
||||
temperature?: number;
|
||||
repeat_penalty?: number;
|
||||
presence_penalty?: number;
|
||||
frequency_penalty?: number;
|
||||
mirostat?: number;
|
||||
mirostat_tau?: number;
|
||||
mirostat_eta?: number;
|
||||
penalize_newline?: boolean;
|
||||
stop?: string[];
|
||||
numa?: boolean;
|
||||
/**
|
||||
* Number of tokens for the prompt to keep in memory for the response, minus the value of num_keep
|
||||
*/
|
||||
num_ctx?: number;
|
||||
num_batch?: number;
|
||||
num_gpu?: number;
|
||||
main_gpu?: number;
|
||||
low_vram?: boolean;
|
||||
vocab_only?: boolean;
|
||||
use_mmap?: boolean;
|
||||
use_mlock?: boolean;
|
||||
num_thread?: number;
|
||||
}
|
||||
|
Reference in New Issue
Block a user