From 0c7c176bae86809a3e95873fd771b37b2d70b7f4 Mon Sep 17 00:00:00 2001 From: matty Date: Sat, 2 Aug 2025 22:19:13 +0000 Subject: [PATCH] I don't remember --- .gitignore | 1 + src/main.ts | 24 ++++++++++++------------ types.d.ts | 33 ++++++++++++++++++++++++++++++++- 3 files changed, 45 insertions(+), 13 deletions(-) diff --git a/.gitignore b/.gitignore index 623f547..ee57a42 100644 --- a/.gitignore +++ b/.gitignore @@ -4,5 +4,6 @@ node_modules *.log *.db /dist +screenlog* /generated/prisma diff --git a/src/main.ts b/src/main.ts index d3d1c7c..14dbbe9 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,9 +1,11 @@ import { - OllamaRequest, - OllamaResponse, NewStatusBody, Notification, OllamaConfigOptions, + // OllamaChatRequest, + // OllamaChatResponse, + OllamaRequest, + OllamaResponse, } from "../types.js"; import striptags from "striptags"; import { PrismaClient } from "../generated/prisma/client.js"; @@ -31,9 +33,7 @@ export const envConfig = { ? process.env.WHITELISTED_DOMAINS.split(",") : [process.env.PLEROMA_INSTANCE_DOMAIN], ollamaUrl: process.env.OLLAMA_URL || "", - ollamaSystemPrompt: - process.env.OLLAMA_SYSTEM_PROMPT || - "You are a helpful AI assistant. Answer all questions concisely.", + ollamaSystemPrompt: process.env.OLLAMA_SYSTEM_PROMPT, ollamaModel: process.env.OLLAMA_MODEL || "", fetchInterval: process.env.FETCH_INTERVAL ? parseInt(process.env.FETCH_INTERVAL) @@ -42,9 +42,10 @@ export const envConfig = { }; const ollamaConfig: OllamaConfigOptions = { - temperature: 1.4, - top_k: 100, - top_p: 0.8, + temperature: 0.2, + top_p: 0.9, + top_k: 30, + num_ctx: 2048, }; // this could be helpful @@ -73,12 +74,10 @@ const generateOllamaRequest = async ( await storeUserData(notification); const ollamaRequestBody: OllamaRequest = { model: ollamaModel, + prompt: trimInputData(notification.status.content), system: ollamaSystemPrompt, - prompt: `[INST] @${ - notification.status.account.fqn - } says: ${trimInputData(notification.status.content)} [/INST]`, stream: false, - options: ollamaConfig, + // options: ollamaConfig, }; const response = await fetch(`${ollamaUrl}/api/generate`, { method: "POST", @@ -173,4 +172,5 @@ console.log( ollamaConfig )}` ); +console.log(`System prompt: ${envConfig.ollamaSystemPrompt}`); await beginFetchCycle(); diff --git a/types.d.ts b/types.d.ts index c59b613..8e9655f 100644 --- a/types.d.ts +++ b/types.d.ts @@ -36,7 +36,7 @@ export interface OllamaRequest { /** * Whatever system prompt you'd like to add to the model to make it more unique, or force it to respond a certain way. */ - system: string; + system?: string; /** * Whether to stream responses from the API, or have it sent all as one payload. */ @@ -47,6 +47,37 @@ export interface OllamaRequest { options?: OllamaConfigOptions; } +export interface OllamaChatRequest { + model: string; + messages: OllamaMessages[]; + stream?: boolean = false; + options?: OllamaConfigOptions; +} + +export interface OllamaChatResponse { + model: string; + created_at: string; + message: OllamaChatResponseMessage; + done_reason: "string"; + done: boolean; + total_duration: number; + load_duration: number; + prompt_eval_count: number; + prompt_eval_duration: number; + eval_count: number; + eval_duration: number; +} + +interface OllamaChatResponseMessage { + role: "assistant"; + content: string; +} + +interface OllamaMessages { + role: "system" | "user"; + content: string; +} + export interface OllamaResponse { model: string; created_at: Date | string;