way better responsiveness, better system prompt
This commit is contained in:
11
src/main.ts
11
src/main.ts
@ -7,7 +7,7 @@ import {
|
||||
OllamaRequest,
|
||||
OllamaResponse,
|
||||
} from "../types.js";
|
||||
import striptags from "striptags";
|
||||
// import striptags from "striptags";
|
||||
import { PrismaClient } from "../generated/prisma/client.js";
|
||||
import {
|
||||
getInstanceEmojis,
|
||||
@ -62,7 +62,7 @@ const generateOllamaRequest = async (
|
||||
envConfig;
|
||||
try {
|
||||
if (
|
||||
striptags(notification.status.content).includes("!prompt") &&
|
||||
// striptags(notification.status.content).includes("!prompt") &&
|
||||
!notification.status.account.bot && // sanity check, sort of
|
||||
notification.type === "mention" // &&
|
||||
// notification.status.visibility !== "private" // for safety, let's only respond to public messages
|
||||
@ -76,7 +76,7 @@ const generateOllamaRequest = async (
|
||||
}
|
||||
await recordPendingResponse(notification);
|
||||
await storeUserData(notification);
|
||||
console.log(trimInputData(notification.status.content));
|
||||
// console.log(trimInputData(notification.status.content));
|
||||
const ollamaRequestBody: OllamaRequest = {
|
||||
model: ollamaModel,
|
||||
// prompt: trimInputData(notification.status.content),
|
||||
@ -150,15 +150,14 @@ const createTimelinePost = async () => {
|
||||
const {
|
||||
bearerToken,
|
||||
ollamaModel,
|
||||
// ollamaSystemPrompt,
|
||||
ollamaSystemPrompt,
|
||||
ollamaUrl,
|
||||
pleromaInstanceUrl,
|
||||
} = envConfig;
|
||||
const ollamaRequestBody: OllamaRequest = {
|
||||
model: ollamaModel,
|
||||
prompt: "Say something random.",
|
||||
system:
|
||||
"You are a friendly AI assistant who loves to educate people on random topics, provide words of encouragement. You like to be as detailed as possible.",
|
||||
system: ollamaSystemPrompt,
|
||||
stream: false,
|
||||
// options: ollamaConfig,
|
||||
};
|
||||
|
@ -4,10 +4,10 @@ import { envConfig } from "./main.js";
|
||||
import { Notification } from "../types.js";
|
||||
|
||||
const trimInputData = (input: string): string => {
|
||||
const strippedInput = striptags(input, [], "\n");
|
||||
const strippedInput = striptags(input);
|
||||
|
||||
const split = strippedInput.split(" ");
|
||||
const promptStringIndex = split.indexOf("!prompt");
|
||||
// const promptStringIndex = split.indexOf("!prompt");
|
||||
const botFqnIndex = split.indexOf("@nice-ai");
|
||||
const botFqnIndexFull = split.indexOf("@nice-ai@nicecrew.digital");
|
||||
if (botFqnIndex !== -1) {
|
||||
@ -16,7 +16,7 @@ const trimInputData = (input: string): string => {
|
||||
if (botFqnIndexFull !== -1) {
|
||||
split[botFqnIndexFull] = "Lexi";
|
||||
}
|
||||
split.splice(promptStringIndex, 1);
|
||||
// split.splice(promptStringIndex, 1);
|
||||
return split.join(" "); // returns everything after the !prompt
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user