Add SQLite database support for conversation history
- Introduce SQLite database to store conversation history - Implement functions to save and load conversations from the database - Replace previous file-based history storage with database-backed solution - Add new command to list and resume previous conversations - Update chat logic to generate topics for saved conversations - Add database initialization and management functions - Modify command line interface to support resuming conversations
This commit is contained in:
parent
892fdc317e
commit
780b755837
84
assistant.py
84
assistant.py
|
|
@ -3,8 +3,8 @@
|
|||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import sqlite3
|
||||
import warnings
|
||||
|
||||
import json
|
||||
from ollama import Client
|
||||
|
|
@ -19,6 +19,10 @@ from prompt_toolkit.key_binding import KeyBindings
|
|||
from prompt_toolkit import PromptSession
|
||||
|
||||
|
||||
default_assistant = "qwen3:14b"
|
||||
default_topic_llm = "qwen3:1.7b"
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
class AIAssistant:
|
||||
def __init__(self, server="http://localhost:11434", model="qwen3:14b"):
|
||||
|
|
@ -76,14 +80,32 @@ class AIAssistant:
|
|||
else:
|
||||
self.history = [self.system_prompt()]
|
||||
|
||||
def _stringify_history(self):
|
||||
s = ""
|
||||
for item in self.history:
|
||||
s += item["role"] + ":\n" + item["content"]
|
||||
return s
|
||||
|
||||
def _strip_thinking_tags(self, text: str):
|
||||
start_idx = text.find("<think>")
|
||||
if (start_idx < 0):
|
||||
return text
|
||||
|
||||
end_idx = text.find("</think>") + len("</think>")
|
||||
stripped_text = text[end_idx:]
|
||||
stripped_text = stripped_text.strip()
|
||||
return stripped_text
|
||||
|
||||
def save_history(self):
|
||||
"""Save the current conversation to the database with a generated topic."""
|
||||
# Only save if this is the first user message
|
||||
if len(self.history) == 3:
|
||||
# Generate a topic using the AI
|
||||
system_prompt = self.system_prompt()
|
||||
user_prompt = "Generate a concise, descriptive topic for this conversation based on the following content:\n"
|
||||
topic = self.client.chat(model=self.model, messages=[system_prompt, {"role": "user", "content": user_prompt}], stream=False)['message']['content'].strip()
|
||||
user_prompt = "/no_think Generate a concise, 5 word descriptive topic for this conversation based on the following content. Do not use markdown, just plaintext. KEEP IT TO 5 WORDS OR LESS.:\n\n"
|
||||
user_prompt += self._stringify_history()
|
||||
topic = self.client.chat(model=default_topic_llm, messages=[system_prompt, {"role": "user", "content": user_prompt}], stream=False)['message']['content'].strip()
|
||||
topic = self._strip_thinking_tags(topic)
|
||||
self._save_to_db(topic)
|
||||
else:
|
||||
# For subsequent messages, we can update the topic in the future
|
||||
|
|
@ -93,6 +115,7 @@ class AIAssistant:
|
|||
"""Load a conversation from the database by ID. If no ID, start a new one."""
|
||||
if conversation_id:
|
||||
self._load_from_db(conversation_id)
|
||||
print(self._stringify_history())
|
||||
else:
|
||||
self.history = [self.system_prompt()]
|
||||
|
||||
|
|
@ -223,15 +246,25 @@ class InputHandler:
|
|||
except:
|
||||
return
|
||||
|
||||
def follow_up(self, args, query):
|
||||
if type(args.follow_up) is str:
|
||||
second_input = args.follow_up
|
||||
else:
|
||||
second_input = self.improved_input()
|
||||
query += f'\n\nUser Question:\n{second_input}'
|
||||
return query
|
||||
|
||||
|
||||
def handle_piped_input(self, args):
|
||||
all_input = sys.stdin.read()
|
||||
query = f'Use the following context to answer the question. There will be no follow up questions from the user so make sure your answer is complete:\n{all_input}\n'
|
||||
if args.copy:
|
||||
query += 'Answer the question using a codeblock for any code or shell scripts\n'
|
||||
if args.follow_up:
|
||||
second_input = self.improved_input()
|
||||
query += f'\n{second_input}'
|
||||
query = self.follow_up(args, query)
|
||||
result = self.assistant.chat(query, stream=False)
|
||||
result = self.assistant._strip_thinking_tags(result)
|
||||
print(result)
|
||||
blocks = self.extract_code_block(result)
|
||||
if args.copy and len(blocks):
|
||||
self.copy_string_to_clipboard(blocks[0])
|
||||
|
|
@ -256,6 +289,14 @@ Description:\n
|
|||
self.assistant.history = [self.assistant.system_prompt()]
|
||||
self.arg_shell(args)
|
||||
exit()
|
||||
if args.follow_up:
|
||||
query = "Provide a complete answer to the user's question, there will be no follow up questions from the user.\n\n"
|
||||
query = self.follow_up(args, query)
|
||||
result = self.assistant.chat(query, stream=False)
|
||||
result = self.assistant._strip_thinking_tags(result)
|
||||
print(result)
|
||||
exit()
|
||||
|
||||
|
||||
print("\033[91massistant\033[0m: Type your message (press Ctrl+D to send):")
|
||||
while True:
|
||||
|
|
@ -313,7 +354,8 @@ class CommandParser:
|
|||
'/clear': self.handle_clear,
|
||||
'/clipboard': None,
|
||||
'/exit': self.handle_exit,
|
||||
'/copy': self.handle_copy
|
||||
'/copy': self.handle_copy,
|
||||
'/list': self.handle_list
|
||||
}
|
||||
|
||||
def parse_commands(self, text):
|
||||
|
|
@ -368,6 +410,36 @@ class CommandParser:
|
|||
self.input_handler.copy_string_to_clipboard(block)
|
||||
return True
|
||||
|
||||
def handle_list(self):
|
||||
assistant = self.assistant
|
||||
db_path = assistant.db_path
|
||||
# Connect to the database and fetch saved conversations
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT id, topic FROM conversations")
|
||||
rows = cursor.fetchall()
|
||||
conn.close()
|
||||
if not rows:
|
||||
print("No saved conversations.")
|
||||
return
|
||||
# Display saved conversations
|
||||
print("\nSaved conversations:")
|
||||
for row in rows:
|
||||
print(f"ID: {row[0]}, Topic: {row[1]}")
|
||||
# Prompt user to select an ID
|
||||
choice = input("\nEnter conversation ID to load (or 'q' to quit): ").strip()
|
||||
if choice.lower() == 'q':
|
||||
return
|
||||
try:
|
||||
conv_id = int(choice)
|
||||
assistant.load_history(conversation_id=conv_id)
|
||||
print(f"\nLoaded conversation with ID {conv_id}.")
|
||||
except ValueError:
|
||||
print("Invalid ID. Please enter a number.")
|
||||
except Exception as e:
|
||||
print(f"Error loading conversation: {e}")
|
||||
|
||||
|
||||
def handle_exit(self):
|
||||
sys.exit(0)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue