slight update to input processing
This commit is contained in:
@ -7,5 +7,5 @@ OLLAMA_URL="http://localhost:11434" # OLLAMA connection URL
|
|||||||
OLLAMA_SYSTEM_PROMPT="" # system prompt - used to help tune the responses from the AI
|
OLLAMA_SYSTEM_PROMPT="" # system prompt - used to help tune the responses from the AI
|
||||||
OLLAMA_MODEL="" # Ollama model for responses e.g dolphin-mistral:latest
|
OLLAMA_MODEL="" # Ollama model for responses e.g dolphin-mistral:latest
|
||||||
FETCH_INTERVAL="" # interval for fetching new notifications from the instance, in milliseconds, recommend at least 15000
|
FETCH_INTERVAL="" # interval for fetching new notifications from the instance, in milliseconds, recommend at least 15000
|
||||||
RANDOM_POST_INTERVAL="" # interval for ad-hoc posts
|
RANDOM_POST_INTERVAL="" # interval for ad-hoc posts in milliseconds
|
||||||
INSTANCE_BEARER_TOKEN="" # instance auth/bearer token (check the "verify_credentials" endpoint request headers in Chrome DevTools if on Soapbox)
|
INSTANCE_BEARER_TOKEN="" # instance auth/bearer token (check the "verify_credentials" endpoint request headers in Chrome DevTools if on Soapbox)
|
29
src/main.ts
29
src/main.ts
@ -45,10 +45,11 @@ export const envConfig = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const ollamaConfig: OllamaConfigOptions = {
|
const ollamaConfig: OllamaConfigOptions = {
|
||||||
temperature: 0.2,
|
temperature: 0.6,
|
||||||
top_p: 0.9,
|
top_p: 0.85,
|
||||||
top_k: 30,
|
top_k: 40,
|
||||||
num_ctx: 2048,
|
num_ctx: 2048,
|
||||||
|
repeat_penalty: 1.1,
|
||||||
};
|
};
|
||||||
|
|
||||||
// this could be helpful
|
// this could be helpful
|
||||||
@ -63,8 +64,8 @@ const generateOllamaRequest = async (
|
|||||||
if (
|
if (
|
||||||
striptags(notification.status.content).includes("!prompt") &&
|
striptags(notification.status.content).includes("!prompt") &&
|
||||||
!notification.status.account.bot && // sanity check, sort of
|
!notification.status.account.bot && // sanity check, sort of
|
||||||
notification.type === "mention" &&
|
notification.type === "mention" // &&
|
||||||
notification.status.visibility !== "private" // for safety, let's only respond to public messages
|
// notification.status.visibility !== "private" // for safety, let's only respond to public messages
|
||||||
) {
|
) {
|
||||||
if (whitelistOnly && !isFromWhitelistedDomain(notification)) {
|
if (whitelistOnly && !isFromWhitelistedDomain(notification)) {
|
||||||
await deleteNotification(notification);
|
await deleteNotification(notification);
|
||||||
@ -75,12 +76,16 @@ const generateOllamaRequest = async (
|
|||||||
}
|
}
|
||||||
await recordPendingResponse(notification);
|
await recordPendingResponse(notification);
|
||||||
await storeUserData(notification);
|
await storeUserData(notification);
|
||||||
|
console.log(trimInputData(notification.status.content));
|
||||||
const ollamaRequestBody: OllamaRequest = {
|
const ollamaRequestBody: OllamaRequest = {
|
||||||
model: ollamaModel,
|
model: ollamaModel,
|
||||||
prompt: trimInputData(notification.status.content),
|
// prompt: trimInputData(notification.status.content),
|
||||||
|
prompt: `${notification.status.account.fqn} says: ${trimInputData(
|
||||||
|
notification.status.content
|
||||||
|
)}`,
|
||||||
system: ollamaSystemPrompt,
|
system: ollamaSystemPrompt,
|
||||||
stream: false,
|
stream: false,
|
||||||
// options: ollamaConfig,
|
options: ollamaConfig,
|
||||||
};
|
};
|
||||||
const response = await fetch(`${ollamaUrl}/api/generate`, {
|
const response = await fetch(`${ollamaUrl}/api/generate`, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
@ -145,14 +150,15 @@ const createTimelinePost = async () => {
|
|||||||
const {
|
const {
|
||||||
bearerToken,
|
bearerToken,
|
||||||
ollamaModel,
|
ollamaModel,
|
||||||
ollamaSystemPrompt,
|
// ollamaSystemPrompt,
|
||||||
ollamaUrl,
|
ollamaUrl,
|
||||||
pleromaInstanceUrl,
|
pleromaInstanceUrl,
|
||||||
} = envConfig;
|
} = envConfig;
|
||||||
const ollamaRequestBody: OllamaRequest = {
|
const ollamaRequestBody: OllamaRequest = {
|
||||||
model: ollamaModel,
|
model: ollamaModel,
|
||||||
prompt: "Make a random post about a random topic.",
|
prompt: "Say something random.",
|
||||||
system: ollamaSystemPrompt,
|
system:
|
||||||
|
"You are a friendly AI assistant who loves to educate people on random topics, provide words of encouragement. You like to be as detailed as possible.",
|
||||||
stream: false,
|
stream: false,
|
||||||
// options: ollamaConfig,
|
// options: ollamaConfig,
|
||||||
};
|
};
|
||||||
@ -241,4 +247,7 @@ console.log(
|
|||||||
console.log(`System prompt: ${envConfig.ollamaSystemPrompt}`);
|
console.log(`System prompt: ${envConfig.ollamaSystemPrompt}`);
|
||||||
|
|
||||||
await beginFetchCycle();
|
await beginFetchCycle();
|
||||||
|
// setInterval(async () => {
|
||||||
|
// createTimelinePost();
|
||||||
|
// }, 10000);
|
||||||
await beginStatusPostInterval();
|
await beginStatusPostInterval();
|
||||||
|
11
src/util.ts
11
src/util.ts
@ -4,9 +4,18 @@ import { envConfig } from "./main.js";
|
|||||||
import { Notification } from "../types.js";
|
import { Notification } from "../types.js";
|
||||||
|
|
||||||
const trimInputData = (input: string): string => {
|
const trimInputData = (input: string): string => {
|
||||||
const strippedInput = striptags(input);
|
const strippedInput = striptags(input, [], "\n");
|
||||||
|
|
||||||
const split = strippedInput.split(" ");
|
const split = strippedInput.split(" ");
|
||||||
const promptStringIndex = split.indexOf("!prompt");
|
const promptStringIndex = split.indexOf("!prompt");
|
||||||
|
const botFqnIndex = split.indexOf("@nice-ai");
|
||||||
|
const botFqnIndexFull = split.indexOf("@nice-ai@nicecrew.digital");
|
||||||
|
if (botFqnIndex !== -1) {
|
||||||
|
split[botFqnIndex] = "Lexi";
|
||||||
|
}
|
||||||
|
if (botFqnIndexFull !== -1) {
|
||||||
|
split[botFqnIndexFull] = "Lexi";
|
||||||
|
}
|
||||||
split.splice(promptStringIndex, 1);
|
split.splice(promptStringIndex, 1);
|
||||||
return split.join(" "); // returns everything after the !prompt
|
return split.join(" "); // returns everything after the !prompt
|
||||||
};
|
};
|
||||||
|
Reference in New Issue
Block a user