diff --git a/prisma/schema.prisma b/prisma/schema.prisma index fd5e0cc..7d50d75 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -27,6 +27,7 @@ model User { id Int @id @default(autoincrement()) userFqn String @unique lastRespondedTo DateTime? + memory UserMemory? } model Reaction { @@ -38,4 +39,37 @@ model Reaction { @@unique([statusId]) // Prevent multiple reactions to same status @@map("reactions") +} + +model UserMemory { + id Int @id @default(autoincrement()) + userFqn String @unique + personalityTraits String @default("[]") // JSON string of personality observations + runningGags String @default("[]") // JSON string of running jokes/gags + relationships String @default("[]") // JSON string of relationship dynamics with bot + interests String @default("[]") // JSON string of user interests + backstory String @default("[]") // JSON string of biographical elements + lastInteractionSummary String? // Brief summary of last chat + interactionCount Int @default(0) + lastUpdated DateTime @default(now()) @updatedAt + createdAt DateTime @default(now()) + + // Relation to existing User model + user User @relation(fields: [userFqn], references: [userFqn]) + + @@map("user_memories") +} + +model InteractionLog { + id Int @id @default(autoincrement()) + userFqn String + conversationSnapshot String // Key parts of the conversation + sentiment String // positive, negative, teasing, etc. + extractedTopics String @default("[]") // JSON string of topics discussed + memorableQuotes String @default("[]") // JSON string of funny/notable quotes + botEmotionalState String? // How the bot should "feel" about this interaction + createdAt DateTime @default(now()) + + @@map("interaction_logs") + @@index([userFqn, createdAt]) } \ No newline at end of file diff --git a/src/api.ts b/src/api.ts index 08861d3..b35dfd9 100644 --- a/src/api.ts +++ b/src/api.ts @@ -1,6 +1,8 @@ import { envConfig, prisma } from "./main.js"; import { PleromaEmoji, Notification, ContextResponse } from "../types.js"; import { selectRandomEmojis } from "./util.js"; +import { getUserMemory, parseJsonArray, stringifyJsonArray } from "./memory.js"; + const getNotifications = async () => { @@ -272,6 +274,154 @@ const handlePostReaction = async (notification: Notification): Promise => }; +/** + * Get detailed user memory for admin/debugging + */ +const getUserMemoryDetails = async (userFqn: string) => { + try { + const memory = await prisma.userMemory.findUnique({ + where: { userFqn: userFqn }, + include: { + user: true + } + }); + + if (!memory) return null; + + // Get recent interaction logs + const recentLogs = await prisma.interactionLog.findMany({ + where: { userFqn: userFqn }, + orderBy: { createdAt: 'desc' }, + take: 10 + }); + + // Parse JSON strings for better readability + const parsedMemory = { + ...memory, + personalityTraits: parseJsonArray(memory.personalityTraits), + runningGags: parseJsonArray(memory.runningGags), + relationships: parseJsonArray(memory.relationships), + interests: parseJsonArray(memory.interests), + backstory: parseJsonArray(memory.backstory), + recentInteractions: recentLogs.map(log => ({ + ...log, + extractedTopics: parseJsonArray(log.extractedTopics), + memorableQuotes: parseJsonArray(log.memorableQuotes) + })) + }; + + return parsedMemory; + } catch (error: any) { + console.error(`Error getting user memory details: ${error.message}`); + return null; + } +}; + +/** + * Manually add or remove memory elements (for admin use) + */ +const modifyUserMemory = async ( + userFqn: string, + action: 'add' | 'remove', + category: 'personalityTraits' | 'runningGags' | 'relationships' | 'interests' | 'backstory', + item: string +) => { + try { + const memory = await getUserMemory(userFqn); + if (!memory) return false; + + const currentArray = parseJsonArray(memory[category] as string); + let updatedArray: string[]; + + if (action === 'add') { + updatedArray = [...new Set([...currentArray, item])]; // Add without duplicates + } else { + updatedArray = currentArray.filter(existingItem => existingItem !== item); + } + + await prisma.userMemory.update({ + where: { userFqn: userFqn }, + data: { [category]: stringifyJsonArray(updatedArray) } + }); + + console.log(`${action === 'add' ? 'Added' : 'Removed'} "${item}" ${action === 'add' ? 'to' : 'from'} ${category} for ${userFqn}`); + return true; + } catch (error: any) { + console.error(`Error modifying user memory: ${error.message}`); + return false; + } +}; + + +const getMemoryStats = async () => { + try { + const totalUsers = await prisma.userMemory.count(); + const totalInteractions = await prisma.interactionLog.count(); + + const mostActiveUsers = await prisma.userMemory.findMany({ + orderBy: { interactionCount: 'desc' }, + take: 10, + select: { + userFqn: true, + interactionCount: true, + personalityTraits: true, + runningGags: true + } + }); + + // Parse JSON strings for the active users + const parsedActiveUsers = mostActiveUsers.map(user => ({ + ...user, + personalityTraits: parseJsonArray(user.personalityTraits), + runningGags: parseJsonArray(user.runningGags) + })); + + const sentimentStats = await prisma.interactionLog.groupBy({ + by: ['sentiment'], + _count: { sentiment: true } + }); + + return { + totalUsers, + totalInteractions, + mostActiveUsers: parsedActiveUsers, + sentimentDistribution: sentimentStats + }; + } catch (error: any) { + console.error(`Error getting memory stats: ${error.message}`); + return null; + } +}; + +const resetUserMemory = async (userFqn: string) => { + try { + await prisma.userMemory.update({ + where: { userFqn: userFqn }, + data: { + personalityTraits: stringifyJsonArray([]), + runningGags: stringifyJsonArray([]), + relationships: stringifyJsonArray([]), + interests: stringifyJsonArray([]), + backstory: stringifyJsonArray([]), + lastInteractionSummary: null, + interactionCount: 0, + } + }); + + // Optionally delete interaction logs too + await prisma.interactionLog.deleteMany({ + where: { userFqn: userFqn } + }); + + console.log(`Reset memory for ${userFqn}`); + return true; + } catch (error: any) { + console.error(`Error resetting user memory: ${error.message}`); + return false; + } +}; + + export { deleteNotification, getInstanceEmojis, @@ -280,4 +430,8 @@ export { reactToStatus, handlePostReaction, hasAlreadyReacted, + getUserMemoryDetails, + modifyUserMemory, + getMemoryStats, + resetUserMemory, }; diff --git a/src/main.ts b/src/main.ts index b2e5bf3..941fdc2 100644 --- a/src/main.ts +++ b/src/main.ts @@ -26,6 +26,11 @@ import { isLLMRefusal, shouldContinue, } from "./util.js"; +import { + analyzeInteraction, + updateUserMemory, + generateMemoryContext, +} from "./memory.js"; export const prisma = new PrismaClient(); @@ -85,6 +90,9 @@ const generateOllamaRequest = async ( await recordPendingResponse(notification); await storeUserData(notification); + const userFqn = notification.status.account.fqn; + const userMessage = notification.status.pleroma.content["text/plain"]; + let conversationHistory: PostAncestorsForModel[] = []; if (replyWithContext) { const contextPosts = await getStatusContext(notification.status.id); @@ -101,19 +109,23 @@ const generateOllamaRequest = async ( }); } - const userMessage = `${notification.status.account.fqn} says: ${notification.status.pleroma.content["text/plain"]}`; + const formattedUserMessage = `${userFqn} says: ${userMessage}`; + + // Get user memory context + const memoryContext = await generateMemoryContext(userFqn); // Get random emojis for this request const emojiList = await getInstanceEmojis(); let availableEmojis = ""; if (emojiList && emojiList.length > 0) { const randomEmojis = selectRandomEmojis(emojiList, 20); - availableEmojis = `\n\nAvailable custom emojis you can use in your response (or use none!) (format as :emoji_name:): ${randomEmojis.join(", ")}`; + availableEmojis = `\n\nAvailable custom emojis you can use in your response (format as :emoji_name:): ${randomEmojis.join(", ")}`; } - let systemContent = ollamaSystemPrompt + availableEmojis; + let systemContent = ollamaSystemPrompt + memoryContext + availableEmojis; + if (replyWithContext) { - systemContent = `${ollamaSystemPrompt}\n\nPrevious conversation context:\n${conversationHistory + systemContent = `${ollamaSystemPrompt}${memoryContext}\n\nPrevious conversation context:\n${conversationHistory .map( (post) => `${post.account_fqn} (to ${post.mentions.join(", ")}): ${ @@ -135,7 +147,7 @@ const generateOllamaRequest = async ( model: ollamaModel, messages: [ { role: "system", content: systemContent as string }, - { role: "user", content: userMessage }, + { role: "user", content: formattedUserMessage }, ], stream: false, options: currentConfig, @@ -153,6 +165,9 @@ const generateOllamaRequest = async ( return generateOllamaRequest(notification, retryAttempt + 1); } + // Analyze interaction and update user memory (async, don't block response) + analyzeAndUpdateMemory(userFqn, userMessage, ollamaResponse.message.content); + await storePromptData(notification, ollamaResponse); return ollamaResponse; } @@ -161,6 +176,31 @@ const generateOllamaRequest = async ( } }; + +/** + * Analyze interaction and update user memory (runs asynchronously) + */ +const analyzeAndUpdateMemory = async ( + userFqn: string, + userMessage: string, + botResponse: string +): Promise => { + try { + // Run analysis in background - don't await to avoid blocking response + const analysis = await analyzeInteraction(userMessage, botResponse, userFqn); + + await updateUserMemory({ + userFqn, + conversationContent: userMessage, + botResponse, + analysis, + }); + } catch (error: any) { + console.error(`Memory analysis failed for ${userFqn}: ${error.message}`); + } +}; + + const postReplyToStatus = async ( notification: Notification, ollamaResponseBody: OllamaChatResponse diff --git a/src/memory.ts b/src/memory.ts new file mode 100644 index 0000000..86453c6 --- /dev/null +++ b/src/memory.ts @@ -0,0 +1,284 @@ +// Updated memory.ts with JSON string handling for SQLite +import { prisma } from "./main.js"; +import { envConfig } from "./main.js"; +import { InteractionAnalysis, MemoryUpdateRequest, OllamaChatRequest, OllamaChatResponse } from "../types.js"; + +// Helper functions for JSON string array handling +const parseJsonArray = (jsonString: string): string[] => { + try { + const parsed = JSON.parse(jsonString); + return Array.isArray(parsed) ? parsed : []; + } catch { + return []; + } +}; + +const stringifyJsonArray = (array: string[]): string => { + return JSON.stringify(array); +}; + +/** + * Analyze a conversation to extract user personality, gags, and relationship dynamics + */ +const analyzeInteraction = async ( + userMessage: string, + botResponse: string, + userFqn: string +): Promise => { + const { ollamaUrl, ollamaModel } = envConfig; + + const analysisPrompt = `Analyze this conversation between a user and a cute female AI chatbot named Lexi. Extract personality traits, running gags, relationship dynamics, and interesting facts. + +User (${userFqn}): ${userMessage} +Bot (Lexi): ${botResponse} + +Please analyze and respond with a JSON object containing: +{ + "sentiment": "positive|negative|neutral|teasing|flirty|aggressive", + "topics": ["topic1", "topic2"], + "personalityObservations": ["trait1", "trait2"], + "runningGagUpdates": ["gag1", "gag2"], + "relationshipUpdates": ["relationship_change1"], + "interestMentions": ["interest1", "interest2"], + "backstoryElements": ["fact1", "fact2"], + "memorableQuotes": ["quote1", "quote2"] +} + +Focus on: +- Personality traits (sarcastic, teasing, protective, joker, etc.) +- Running gags and memes (fake claims, recurring jokes, etc.) +- How they treat the bot (mean, nice, flirty, protective) +- Interests and hobbies mentioned +- Any biographical info (real or fake "lore") +- Memorable or funny quotes + +Keep entries brief and specific. Empty arrays are fine if nothing notable.`; + + try { + const analysisRequest: OllamaChatRequest = { + model: ollamaModel, + messages: [ + { + role: "system", + content: "You are an expert at analyzing social interactions and extracting personality insights. Always respond with valid JSON only." + }, + { role: "user", content: analysisPrompt } + ], + stream: false, + options: { + temperature: 0.3, // Lower temperature for more consistent analysis + num_predict: 800, + } + }; + + const response = await fetch(`${ollamaUrl}/api/chat`, { + method: "POST", + body: JSON.stringify(analysisRequest), + }); + + if (!response.ok) { + throw new Error(`Analysis request failed: ${response.statusText}`); + } + + const analysisResponse: OllamaChatResponse = await response.json(); + + try { + // Parse the JSON response + const analysis: InteractionAnalysis = JSON.parse(analysisResponse.message.content.trim()); + return analysis; + } catch (parseError) { + console.error("Failed to parse analysis JSON:", analysisResponse.message.content); + // Return default analysis if parsing fails + return { + sentiment: 'neutral', + topics: [], + personalityObservations: [], + runningGagUpdates: [], + relationshipUpdates: [], + interestMentions: [], + backstoryElements: [], + memorableQuotes: [] + }; + } + } catch (error: any) { + console.error(`Error analyzing interaction: ${error.message}`); + return { + sentiment: 'neutral', + topics: [], + personalityObservations: [], + runningGagUpdates: [], + relationshipUpdates: [], + interestMentions: [], + backstoryElements: [], + memorableQuotes: [] + }; + } +}; + +/** + * Get or create user memory profile + */ +const getUserMemory = async (userFqn: string) => { + try { + let memory = await prisma.userMemory.findUnique({ + where: { userFqn: userFqn } + }); + + if (!memory) { + memory = await prisma.userMemory.create({ + data: { + userFqn: userFqn, + personalityTraits: stringifyJsonArray([]), + runningGags: stringifyJsonArray([]), + relationships: stringifyJsonArray([]), + interests: stringifyJsonArray([]), + backstory: stringifyJsonArray([]), + lastInteractionSummary: null, + interactionCount: 0, + } + }); + } + + return memory; + } catch (error: any) { + console.error(`Error getting user memory: ${error.message}`); + return null; + } +}; + +/** + * Update user memory with new interaction insights + */ +const updateUserMemory = async (request: MemoryUpdateRequest): Promise => { + try { + const { userFqn, conversationContent, botResponse, analysis } = request; + + // Get existing memory + const existingMemory = await getUserMemory(userFqn); + if (!existingMemory) return; + + // Parse existing JSON arrays + const existingPersonality = parseJsonArray(existingMemory.personalityTraits); + const existingGags = parseJsonArray(existingMemory.runningGags); + const existingRelationships = parseJsonArray(existingMemory.relationships); + const existingInterests = parseJsonArray(existingMemory.interests); + const existingBackstory = parseJsonArray(existingMemory.backstory); + + // Merge new observations with existing ones (avoiding duplicates) + const mergeArrays = (existing: string[], newItems: string[]): string[] => { + const combined = [...existing, ...newItems]; + return [...new Set(combined)]; // Remove duplicates + }; + + // Limit array sizes to prevent memory bloat + const limitArray = (arr: string[], maxSize: number = 20): string[] => { + return arr.slice(-maxSize); // Keep most recent items + }; + + const updatedMemory = { + personalityTraits: stringifyJsonArray(limitArray(mergeArrays(existingPersonality, analysis.personalityObservations))), + runningGags: stringifyJsonArray(limitArray(mergeArrays(existingGags, analysis.runningGagUpdates))), + relationships: stringifyJsonArray(limitArray(mergeArrays(existingRelationships, analysis.relationshipUpdates))), + interests: stringifyJsonArray(limitArray(mergeArrays(existingInterests, analysis.interestMentions))), + backstory: stringifyJsonArray(limitArray(mergeArrays(existingBackstory, analysis.backstoryElements))), + lastInteractionSummary: `${analysis.sentiment} conversation about ${analysis.topics.join(', ') || 'general chat'}`, + interactionCount: existingMemory.interactionCount + 1, + }; + + // Update database + await prisma.userMemory.update({ + where: { userFqn: userFqn }, + data: updatedMemory + }); + + // Log the interaction for historical reference + await prisma.interactionLog.create({ + data: { + userFqn: userFqn, + conversationSnapshot: `${userFqn}: ${conversationContent.slice(0, 200)}... | Lexi: ${botResponse.slice(0, 200)}...`, + sentiment: analysis.sentiment, + extractedTopics: stringifyJsonArray(analysis.topics), + memorableQuotes: stringifyJsonArray(analysis.memorableQuotes), + botEmotionalState: generateEmotionalState(analysis), + } + }); + + console.log(`Updated memory for ${userFqn}: ${analysis.personalityObservations.join(', ')}`); + } catch (error: any) { + console.error(`Error updating user memory: ${error.message}`); + } +}; + +/** + * Generate how the bot should "feel" about this interaction + */ +const generateEmotionalState = (analysis: InteractionAnalysis): string => { + const { sentiment, relationshipUpdates } = analysis; + + if (sentiment === 'teasing') return 'playfully_hurt'; + if (sentiment === 'flirty') return 'flustered'; + if (sentiment === 'aggressive') return 'sad'; + if (relationshipUpdates.some(rel => rel.includes('hurt') || rel.includes('mean'))) return 'hurt_feelings'; + if (relationshipUpdates.some(rel => rel.includes('cute') || rel.includes('sweet'))) return 'happy'; + return 'neutral'; +}; + +/** + * Generate memory context for system prompt + */ +const generateMemoryContext = async (userFqn: string): Promise => { + try { + const memory = await getUserMemory(userFqn); + if (!memory || memory.interactionCount === 0) { + return ""; + } + + let context = `\n\n--- User Memory for ${userFqn} ---\n`; + + const personalityTraits = parseJsonArray(memory.personalityTraits); + const runningGags = parseJsonArray(memory.runningGags); + const relationships = parseJsonArray(memory.relationships); + const interests = parseJsonArray(memory.interests); + const backstory = parseJsonArray(memory.backstory); + + if (personalityTraits.length > 0) { + context += `Personality: ${personalityTraits.join(', ')}\n`; + } + + if (runningGags.length > 0) { + context += `Running gags: ${runningGags.join(', ')}\n`; + } + + if (relationships.length > 0) { + context += `Our relationship: ${relationships.join(', ')}\n`; + } + + if (interests.length > 0) { + context += `Interests: ${interests.join(', ')}\n`; + } + + if (backstory.length > 0) { + context += `Background: ${backstory.join(', ')}\n`; + } + + if (memory.lastInteractionSummary) { + context += `Last time we talked: ${memory.lastInteractionSummary}\n`; + } + + context += `Total conversations: ${memory.interactionCount}`; + + return context; + } catch (error: any) { + console.error(`Error generating memory context: ${error.message}`); + return ""; + } +}; + +export { + analyzeInteraction, + updateUserMemory, + getUserMemory, + generateMemoryContext, + parseJsonArray, + stringifyJsonArray, +}; \ No newline at end of file diff --git a/types.d.ts b/types.d.ts index 7438261..f728c20 100644 --- a/types.d.ts +++ b/types.d.ts @@ -337,3 +337,36 @@ export interface OllamaConfigOptions { */ num_thread?: number; } + + +export interface UserMemory { + id: number; + userFqn: string; + personalityTraits: string[]; // ["teases_bot", "sarcastic", "friendly", "joker"] + runningGags: string[]; // ["claims_to_shit_pants", "pretends_to_be_cat", "always_hungry"] + relationships: string[]; // ["hurt_my_feelings_once", "called_me_cute", "protective_of_me"] + interests: string[]; // ["programming", "anime", "cooking"] + backstory: string[]; // ["works_at_tech_company", "has_three_cats", "lives_in_california"] + lastInteractionSummary: string; // Brief summary of last conversation + interactionCount: number; + lastUpdated: DateTime; + createdAt: DateTime; +} + +export interface InteractionAnalysis { + sentiment: 'positive' | 'negative' | 'neutral' | 'teasing' | 'flirty' | 'aggressive'; + topics: string[]; // Extracted topics from conversation + personalityObservations: string[]; // New traits observed + runningGagUpdates: string[]; // New or updated running gags + relationshipUpdates: string[]; // How relationship with bot changed + interestMentions: string[]; // Interests/hobbies mentioned + backstoryElements: string[]; // New biographical info (real or fake) + memorableQuotes: string[]; // Funny or notable things they said +} + +export interface MemoryUpdateRequest { + userFqn: string; + conversationContent: string; + botResponse: string; + analysis: InteractionAnalysis; +} \ No newline at end of file