Compare commits
3 Commits
ff5c7506ff
...
41317301bf
Author | SHA1 | Date | |
---|---|---|---|
41317301bf | |||
00a2eb63bc | |||
6c8f779294 |
@ -1,7 +1,8 @@
|
|||||||
DATABASE_URL="file:../dev.db" # SQLite database relative to the ./prisma path
|
DATABASE_URL="file:../dev.db" # SQLite database relative to the ./prisma path
|
||||||
PLEROMA_INSTANCE_URL="https://instance.tld" # Pleroma instance full URL including scheme
|
PLEROMA_INSTANCE_URL="https://instance.tld" # Pleroma instance full URL including scheme
|
||||||
PLEROMA_INSTANCE_DOMAIN="instance.tld" # used if you want to only want to respond to people from a particular instance
|
PLEROMA_INSTANCE_DOMAIN="instance.tld" # used if you want to only want to respond to people from a particular instance
|
||||||
ONLY_LOCAL_REPLIES="true" # reply to only users locally on your instance
|
ONLY_WHITELIST="true" # change to "false" if you want to accept prompts from any and all domains - *** USE WITH CAUTION ***
|
||||||
|
WHITELISTED_DOMAINS="" # comma separated list of domains you want to allow the bot to accept prompts from (i.e. poa.st,nicecrew.digital,detroitriotcity.com,decayable.ink)
|
||||||
OLLAMA_URL="http://localhost:11434" # OLLAMA connection URL
|
OLLAMA_URL="http://localhost:11434" # OLLAMA connection URL
|
||||||
OLLAMA_SYSTEM_PROMPT="" # system prompt - used to help tune the responses from the AI
|
OLLAMA_SYSTEM_PROMPT="" # system prompt - used to help tune the responses from the AI
|
||||||
OLLAMA_MODEL="" # Ollama model for responses e.g dolphin-mistral:latest
|
OLLAMA_MODEL="" # Ollama model for responses e.g dolphin-mistral:latest
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "pleroma-ollama-bot",
|
"name": "pleroma-ollama-bot",
|
||||||
"version": "1.0.5",
|
"version": "1.0.7",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "tsc && node -r dotenv/config dist/main.js",
|
"start": "tsc && node -r dotenv/config dist/main.js",
|
||||||
|
35
src/main.ts
35
src/main.ts
@ -13,7 +13,10 @@ const prisma = new PrismaClient();
|
|||||||
const envConfig = {
|
const envConfig = {
|
||||||
pleromaInstanceUrl: process.env.PLEROMA_INSTANCE_URL || "",
|
pleromaInstanceUrl: process.env.PLEROMA_INSTANCE_URL || "",
|
||||||
pleromaInstanceDomain: process.env.PLEROMA_INSTANCE_DOMAIN || "",
|
pleromaInstanceDomain: process.env.PLEROMA_INSTANCE_DOMAIN || "",
|
||||||
onlyLocalReplies: process.env.ONLY_LOCAL_REPLIES === "true" ? true : false,
|
whitelistOnly: process.env.ONLY_WHITELIST === "true" ? true : false || "true",
|
||||||
|
whitelistedDomains: process.env.WHITELISTED_DOMAINS
|
||||||
|
? process.env.WHITELISTED_DOMAINS.split(",")
|
||||||
|
: [process.env.PLEROMA_INSTANCE_DOMAIN],
|
||||||
ollamaUrl: process.env.OLLAMA_URL || "",
|
ollamaUrl: process.env.OLLAMA_URL || "",
|
||||||
ollamaSystemPrompt:
|
ollamaSystemPrompt:
|
||||||
process.env.OLLAMA_SYSTEM_PROMPT ||
|
process.env.OLLAMA_SYSTEM_PROMPT ||
|
||||||
@ -26,8 +29,8 @@ const envConfig = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const ollamaConfig: OllamaConfigOptions = {
|
const ollamaConfig: OllamaConfigOptions = {
|
||||||
temperature: 0.3,
|
temperature: 0.6,
|
||||||
num_predict: 400,
|
num_predict: 750,
|
||||||
};
|
};
|
||||||
|
|
||||||
const getNotifications = async () => {
|
const getNotifications = async () => {
|
||||||
@ -123,16 +126,24 @@ const recordPendingResponse = async (notification: Notification) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const isFromWhitelistedDomain = (fqn: string): boolean => {
|
||||||
|
try {
|
||||||
|
const domain = fqn.split("@")[1];
|
||||||
|
if (envConfig.whitelistedDomains.includes(domain)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error(`Error with domain check: ${error.message}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const generateOllamaRequest = async (
|
const generateOllamaRequest = async (
|
||||||
notification: Notification
|
notification: Notification
|
||||||
): Promise<OllamaResponse | undefined> => {
|
): Promise<OllamaResponse | undefined> => {
|
||||||
const {
|
const { whitelistOnly, ollamaModel, ollamaSystemPrompt, ollamaUrl } =
|
||||||
onlyLocalReplies,
|
envConfig;
|
||||||
pleromaInstanceDomain,
|
|
||||||
ollamaModel,
|
|
||||||
ollamaSystemPrompt,
|
|
||||||
ollamaUrl,
|
|
||||||
} = envConfig;
|
|
||||||
try {
|
try {
|
||||||
if (
|
if (
|
||||||
striptags(notification.status.content).includes("!prompt") &&
|
striptags(notification.status.content).includes("!prompt") &&
|
||||||
@ -140,8 +151,8 @@ const generateOllamaRequest = async (
|
|||||||
notification.type === "mention"
|
notification.type === "mention"
|
||||||
) {
|
) {
|
||||||
if (
|
if (
|
||||||
onlyLocalReplies &&
|
whitelistOnly &&
|
||||||
!notification.status.account.fqn.includes(`@${pleromaInstanceDomain}`)
|
!isFromWhitelistedDomain(notification.status.account.fqn)
|
||||||
) {
|
) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
5
types.d.ts
vendored
5
types.d.ts
vendored
@ -40,7 +40,10 @@ export interface OllamaRequest {
|
|||||||
/**
|
/**
|
||||||
* Whether to stream responses from the API, or have it sent all as one payload.
|
* Whether to stream responses from the API, or have it sent all as one payload.
|
||||||
*/
|
*/
|
||||||
stream?: boolean = false; // stream response vs get response in one full message
|
stream?: boolean = false;
|
||||||
|
/**
|
||||||
|
* Ollama configuration options
|
||||||
|
*/
|
||||||
options?: OllamaConfigOptions;
|
options?: OllamaConfigOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user