use refactored version by default now

This commit is contained in:
Hayden Johnson 2025-05-16 07:44:27 -07:00
parent 15b25aea05
commit 559be56000
6 changed files with 516 additions and 713 deletions

1
.python-version Normal file
View file

@ -0,0 +1 @@
3.13

View file

@ -44,10 +44,10 @@ Run the script with no arguments to enter interactive mode. Type your questions
**Installation**
--------------
This script requires Python 3.x and the Ollama API library (`ollama-api`). You can install these dependencies using pip:
You can install the dependencies in a virtual environment using uv
```bash
pip install ollama-api
uv sync
```
You also need to set up an Ollama server on your local machine. Please refer to the Ollama documentation for instructions.

652
assistant.py Executable file → Normal file
View file

@ -1,431 +1,339 @@
#!/bin/python
# Chat with an intelligent assistant in your terminal
# AI Assistant in the terminal
import argparse
import os
import sys
import json
from ollama import Client
import re
import pyperclip
import sys
import tty
import termios
import signal
import argparse
import pygments
from pygments.lexers import get_lexer_by_name, guess_lexer
from pygments.formatters import TerminalFormatter
import os
import json
from prompt_toolkit.key_binding import KeyBindings
from prompt_toolkit import PromptSession
server = 'localhost:11434'
model = 'gemma3:12b-it-qat'
reasoning_model='deepseek-r1:14b'
temp = 0.2
num_ctx = 4096
class AIAssistant:
def __init__(self, server="http://localhost:11434", model="qwen3:14b"):
self.server = server
self.model = model
self.client = Client(host=self.server)
self.temperature = 0.2
self.num_ctx = 4096
self.history = [self.system_prompt()]
pattern = r'```[a-z]*\n[\s\S]*?\n```'
line_pattern = r'`[a-z]*[\s\S]*?`'
def set_host(self, host):
self.server = host
self.client = Client(host=host)
history_path = os.environ.get('HOME') + '/.cache/ai-assistant.history'
def system_prompt(self):
return {"role": "system", "content": "You are a helpful, smart, kind, and efficient AI assistant. You always fulfill the user's requests accurately and concisely."}
def save_history(data, path):
with open(path, 'w+') as f:
json.dump(data, f)
def load_history(self):
path = os.environ.get('HOME') + '/.cache/ai-assistant.history'
try:
with open(path, 'r') as f:
self.history = json.load(f)
except FileNotFoundError:
pass
def load_history(path):
with open(path, 'r') as f:
return json.load(f)
def save_history(self):
path = os.environ.get('HOME') + '/.cache/ai-assistant.history'
with open(path, 'w+') as f:
json.dump(self.history, f)
def save_conversation(filename='conversation.md'):
# check if filename already exists and increment filename if so
if not filename.endswith('.md'):
filename += '.md'
base, extension = os.path.splitext(filename)
i = 1
while os.path.exists(filename):
filename = f"{base}_{i}{extension}"
i += 1
# save conversation to filename
global conversation
with open(filename, 'w') as f:
f.write(conversation)
def parse_commands(text):
# See if user wrote any commands here
# returns bool: True if command was executed, False if not
# importantly, the command doesn't need to execute succesfully for it to return True
tokens = text.split(' ')
match tokens[0]:
case '/save':
if len(tokens) > 1:
save_conversation(tokens[1])
else:
save_conversation()
return True
case '/clear':
global history
history = [ system_prompt ]
save_history(history, history_path)
return True
case '/clipboard':
context_query = '\n\nThe following is context provided by the user:\n'
context_query += get_string_from_clipboard() + '\n'
return text.split('/clipboard ')[1] + context_query
case '/copy':
blocks = extract_code_block(history[-1]['content'])
if len(blocks):
copy_string_to_clipboard(blocks[0][1:-1])
return True
case '/exit':
exit()
return False
def highlight_code(language_name, code):
# Check if the language is specified in the first line
lexer_name = language_name
if lexer_name == None:
lines = code.split('\n')
def determine_lexer(self, code_block):
lexer_name = None
lines = code_block.split('\n')
for line in lines:
if line.strip().startswith('```'):
lexer_name = line.strip().split('```')[1].strip()
break
lexer_part = line.strip().split('```')[1].strip()
if lexer_part:
lexer_name = lexer_part
break
elif line.strip().startswith('lang:'):
lexer_name = line.strip().split(':')[1].strip()
break
lexer_part = line.strip().split(':')[1].strip()
if lexer_part:
lexer_name = lexer_part
break
return lexer_name
if lexer_name:
def highlight_code(self, lexer_name, code):
try:
# Try to get the lexer by name
lexer = get_lexer_by_name(lexer_name)
lexer = get_lexer_by_name(lexer_name) if lexer_name else guess_lexer(code)
except ValueError:
# If the lexer is not found, guess it
lexer = guess_lexer('\n'.join(code.split('\n')[1:-1]))
if not lexer:
# If no lexer is guessed, default to bash
lexer = get_lexer_by_name('bash')
else:
# If no language is specified, guess the lexer
try:
lexer = guess_lexer('\n'.join(code.split('\n')[1:-1]))
if not lexer:
# If no lexer is guessed, default to bash
lexer = get_lexer_by_name('bash')
except:
lexer = get_lexer_by_name('bash')
formatter = TerminalFormatter()
highlighted_code = pygments.highlight(code, lexer, formatter)
return highlighted_code
formatter = TerminalFormatter()
newlines = '\n'.join(code.split('\n')[1:])
# if code is a code block, strip surrounding block markers
lines = code.split('\n')
if (len(lines) > 2) and ('```' in lines[0]) and ('```' in lines[-1]):
just_code = '\n'.join(code.split('\n')[1:-1])
else:
just_code = code.split('\n')[0] # Inline code
highlighted_code = pygments.highlight(just_code, lexer, formatter)
return highlighted_code + newlines
def extract_code_block(markdown_text):
# Use the regular expression pattern to find all matches in the markdown text
matches = re.finditer(pattern, markdown_text)
# Iterate over the matches and extract the code blocks
code_blocks = []
for match in matches:
code_block = match.group(0)
#highlighted_code = highlight_code(None, code_block)
# Add the highlighted code block to the list of code blocks
code_blocks.append(code_block)
if len(code_blocks) == 0:
line_matches = re.finditer(line_pattern, markdown_text)
for match in line_matches:
code_block = match.group(0)
code_blocks.append(code_block[1:-1])
return code_blocks
def copy_string_to_clipboard(string):
try:
pyperclip.copy(string)
except:
return
def get_string_from_clipboard():
try:
result = pyperclip.paste()
except:
result = ''
return result
code_history = []
system_prompt = {"role": "system", "content": "You are a helpful, smart, kind, and efficient AI assistant. You always fulfill the user's requests accurately and concisely."}
history = [ system_prompt ]
conversation = ""
def chat(message, stream=True):
history.append({"role": "user", "content": message})
completion = client.chat(
model=model,
options={"temperature":temp, "num_ctx":num_ctx},
messages=history,
def chat(self, message, stream=True):
self.history.append({"role": "user", "content": message})
completion = self.client.chat(
model=self.model,
options={"temperature": self.temperature, "num_ctx": self.num_ctx},
messages=self.history,
stream=stream
)
result = ''
language = ''
large_chunk = []
for chunk in completion:
)
result = ''
all_chunks = []
large_chunk = []
language = None
if stream:
text = chunk['message']['content']
large_chunk.append(text)
large_text = ''.join(large_chunk)
# update language if entering or leaving code block
if ('\n' in large_text) and ('```' in large_text):
language = large_text.split('```')[1].split('\n')[0]
if language == '':
language = None
print(large_text, end='', flush=True)
large_chunk = []
large_text = ''
for chunk in completion:
text = chunk['message']['content']
large_chunk.append(text)
all_chunks.append(text)
large_text = ''.join(large_chunk)
# Only print full lines
if '\n' in large_text:
output = large_text
if language:
output = highlight_code(language, output)
print(output, end='', flush=True)
large_chunk = []
result += text
if not stream:
result = completion['message']['content']
if stream:
print(large_text, flush=True)
history.append({"role": 'assistant', 'content': result})
return result
if ('```' in large_text) and ('\n' in large_text.split('```')[1]):
language = large_text.split('```')[1].split('\n')[0]
large_chunk = []
if language == '':
print(large_text, end='', flush=True)
language = None
def chat2(args, user_input, stream=True):
global conversation
global model
global reasoning_model
command_result = parse_commands(user_input)
if command_result:
if type(command_result) == bool:
return ''
elif type(command_result) == str: # sometimes I want to change the user prompt with a command
user_input = command_result
print('\033[91m' + 'assistant' + '\033[0m: ', end='')
if args.reasoning:
model = reasoning_model
result = chat(user_input, stream)
else:
result = chat(user_input, stream)
conversation += 'user: ' + user_input + '\n'
conversation += 'assistant: ' + result + '\n'
return result
def highlightify_text(full_text):
lines = full_text.split('\n')
result = ''
language = None
for line in lines:
text = line + '\n'
# update language if entering or leaving code block
if '```' in text:
language = text.split('```')[1].split('\n')[0]
if language == '':
language = None
result += text
text = ''
# Only print full lines
if '\n' in text:
output = text
if language:
output = highlight_code(language, output)
result += output
return result
def parse_args():
# Create the parser
parser = argparse.ArgumentParser(description='Copy and open a source file in TextEdit')
# Add the --follow-up (-f) argument
parser.add_argument('--follow-up', '-f', nargs='?', const=True, default=False, help='Ask a follow up question when piping in context')
# Add the --copy (-c) argument
parser.add_argument('--copy', '-c', action='store_true', help='copy a codeblock if it appears')
# Add the --shell (-s) argument
parser.add_argument('--shell', '-s', nargs='?', const=True, default=False, help='output a shell command that does as described')
# Add the --model (-m) argument
parser.add_argument('--model', '-m', nargs='?', const=True, default=False, help='Specify model')
# Add the --temp (-t) argument
parser.add_argument('--temp', '-t', nargs='?', const=True, default=False, help='Specify temperature')
# Add the --context
parser.add_argument('--context', nargs='?', const=True, default=False, help='Specify temperature')
# Add the --host argument
parser.add_argument('--host', nargs='?', const=True, default=False, help='Specify host of ollama server')
# Add the --reflect argument
parser.add_argument('--reasoning', '-r', action='store_true', help='Use the default reasoning model deepseek-r1:14b')
# Add the --new argument
parser.add_argument('--new', '-n', action='store_true', help='Start a chat with a fresh history')
# Add the --temporary argument
parser.add_argument('--temporary', action='store_true', help='Start a chat with a fresh history, without deleting old history')
# Parse the arguments
return parser.parse_args()
def reflection_mode(query, should_print=False):
reflection_prompt = """
You are a helpful ai assistant that answers every question thoroughly and accurately. You always begin your response with a <planning></planning> section where you lay out your plan for answering the question. It is important that you don't make any assumptions while planning. Then you <reflect></reflect> on your plan to make sure it correctly answers the user's question. Then, if you are confident your plan in correct, you give your <draft answer>, followed by <final reflection> to make sure the answer correctly addresses the user's question. Finally, give a <final answer> with your answer to the user. If there are any ambiguous or unknown requirements, ask the user for more information as your final answer. You must always have a <final answer> no matter what, even if you are asking for clarifying questions. If you do not have the <final answer> tags, the user will not see your response. Additionally, the user can not see your planning or reflecting, they can only see what goes in the <final answer></final answer> tags, so make sure you provide any information you want to tell the user in there.
if language and ('\n' in large_text) and large_chunk:
output = self.highlight_code(language, large_text)
print(output, end='', flush=True)
large_chunk = []
elif not language or not large_chunk:
print(text, end='', flush=True)
result = ''.join(all_chunks)
else:
result = completion['message']['content']
self.history.append({"role": 'assistant', 'content': result})
self.save_history()
return result
"""
result = chat(reflection_prompt + query, stream=False)
highlighted_result = highlightify_text(result)
class CommandLineParser:
def __init__(self):
self.parser = argparse.ArgumentParser(description='Chat with an intelligent assistant')
self.add_arguments()
# print('==DEBUG==')
# print(highlighted_result)
# print('==DEBUG==')
def add_arguments(self):
parser = self.parser
parser.add_argument('--host', nargs='?', const=True, default=False, help='Specify host of Ollama server')
parser.add_argument('--model', '-m', nargs='?', const=True, default=False, help='Specify model')
parser.add_argument('--temp', '-t', nargs='?', type=float, const=0.2, default=False, help='Specify temperature')
parser.add_argument('--context', type=int, default=4096, help='Specify context size')
parser.add_argument('--reasoning', '-r', action='store_true', help='Use the default reasoning model deepseek-r1:14b')
parser.add_argument('--new', '-n', action='store_true', help='Start a chat with a fresh history')
parser.add_argument('--follow-up', '-f', nargs='?', const=True, default=False, help='Ask a follow up question when piping in context')
parser.add_argument('--copy', '-c', action='store_true', help='Copy a codeblock if it appears')
parser.add_argument('--shell', '-s', nargs='?', const=True, default=False, help='Output a shell command that does as described')
final_answer = highlighted_result.split('<final answer>')
while len(final_answer) < 2:
final_answer = chat('Please put your final answer in <final answer></final answer> tags.', stream=False)
final_answer = highlighted_result.split('<final answer>')
final_answer = final_answer[1].split('</final answer>')[0]
def parse(self):
return self.parser.parse_args()
if should_print:
print(final_answer)
return final_answer
def set_host(host):
global server
server = host
# Keybindings
bindings = KeyBindings()
def arg_follow_up(args):
sys.stdin = open('/dev/tty')
if args.follow_up != True:
second_input = args.follow_up
else:
second_input = input('> ')
return second_input
def arg_shell(args):
query = '''
Form a shell command based on the following description. Only output a working shell command. Format the command like this: `command`
Description:
'''
if args.shell != True:
query += args.shell
else:
query += input('> ')
result = chat2(args, query, False)
result = blocks[0] if len(blocks := extract_code_block(result)) else result
print(result)
copy_string_to_clipboard(result)
def handle_piped_input(args):
all_input = sys.stdin.read()
query = 'Use the following context to answer the question. There will be no follow up questions from the user so make sure your answer is complete:\nSTART CONTEXT\n' + all_input + '\nEND CONTEXT\nAfter you answer the question, reflect on your answer and determine if it answers the question correctly.'
if args.copy:
query += 'Answer the question using a codeblock for any code or shell scripts\n'
if args.follow_up:
query += arg_follow_up(args)
query += '\n'
result = chat2(args, query)
blocks = extract_code_block(result)
if args.copy and len(blocks):
copy_string_to_clipboard(blocks[0])
kb = KeyBindings()
@kb.add('c-d')
@bindings.add('c-d')
def _(event):
event.current_buffer.validate_and_handle()
session = PromptSession(multiline=True, prompt_continuation='', key_bindings=kb)
def improved_input(prompt="> "):
"""
Returns the full text (including embedded newlines) when you press Ctrl-D.
Arrow keys edit within or across lines automatically.
"""
try:
text = session.prompt(prompt)
return text
except KeyboardInterrupt:
print("\nUser aborted input")
return None
def handle_non_piped_input(args):
if args.shell:
arg_shell(args)
exit()
if args.follow_up:
user_input = arg_follow_up(args)
result = chat2(args, user_input)
exit()
class InputHandler:
def __init__(self, assistant, command_parser):
self.assistant = assistant
self.command_parser = command_parser
self.command_parser.assistant = assistant
self.command_parser.input_handler = self
self.session = PromptSession(multiline=True, prompt_continuation='', key_bindings=bindings)
print("\033[91massistant\033[0m: Type your message (press Ctrl+D to send):")
while True:
def handle_input(self, args):
if not sys.stdin.isatty():
self.handle_piped_input(args)
else:
self.handle_interactive_input(args)
def copy_string_to_clipboard(self, s):
try:
full_input = improved_input()
if full_input is None:
break # User aborted
if full_input.strip() == '':
continue # Skip empty messages
pyperclip.copy(s)
except:
return
result = chat2(args, full_input)
save_history(history, history_path)
def handle_piped_input(self, args):
all_input = sys.stdin.read()
query = f'Use the following context to answer the question. There will be no follow up questions from the user so make sure your answer is complete:\n{all_input}\n'
if args.copy:
query += 'Answer the question using a codeblock for any code or shell scripts\n'
if args.follow_up:
second_input = self.improved_input()
query += f'\n{second_input}'
result = self.assistant.chat(query, stream=False)
blocks = self.extract_code_block(result)
if args.copy and len(blocks):
self.copy_string_to_clipboard(blocks[0])
except (EOFError, KeyboardInterrupt):
print("\nExiting...")
break
# except Exception as e:
# print(f"Error: {e}")
def arg_shell(self, args):
query = '''
Form a shell command based on the following description. Only output a working shell command. Format the command like this: `command`
Description:\n
'''
if type(args.shell) is str:
query += args.shell
else:
query += self.improved_input()
result = self.assistant.chat(query, stream=False)
result = blocks[0] if len(blocks := self.extract_code_block(result)) else result
print(result)
self.copy_string_to_clipboard(result)
def handle_interactive_input(self, args):
if args.shell:
self.assistant.history = [self.assistant.system_prompt()]
self.arg_shell(args)
exit()
print("\033[91massistant\033[0m: Type your message (press Ctrl+D to send):")
while True:
try:
full_input = self.improved_input()
if full_input is None:
break
if full_input.strip() == '':
continue
command_result = self.command_parser.parse_commands(full_input)
if type(command_result) is str:
self.assistant.chat(command_result)
print()
except (EOFError, KeyboardInterrupt):
print("\nExiting...")
break
def improved_input(self, prompt="> "):
"""
Returns the full text (including embedded newlines) when you press Ctrl-D.
Arrow keys edit within or across lines automatically.
"""
try:
text = self.session.prompt(prompt)
return text
except KeyboardInterrupt:
print("\nUser aborted input")
return None
def extract_code_block(self, text, highlight=True):
pattern = r'```[a-z]*\n[\s\S]*?\n```'
code_blocks = []
matches = re.finditer(pattern, text)
for match in matches:
code_block = match.group(0)
if highlight:
lexer_name = self.assistant.determine_lexer(code_block)
highlighted_code = self.assistant.highlight_code(lexer_name, code_block)
code_blocks.append(highlighted_code)
else:
code_blocks.append(code_block)
if not code_blocks:
line_pattern = r'`[a-z]*[\s\S]*?`'
line_matches = re.finditer(line_pattern, text)
for match in line_matches:
code_block = match.group(0)
code_blocks.append(code_block[1:-1])
return code_blocks
class CommandParser:
def __init__(self):
self.commands = {
'/save': self.handle_save,
'/clear': self.handle_clear,
'/clipboard': None,
'/exit': self.handle_exit,
'/copy': self.handle_copy
}
def parse_commands(self, text):
"""
Parses the given text to check if it contains a recognized command.
If the command is standalone returns True.
If the command requires passing the text through a string handler (e.g., /clipboard), returns the result of that handler as a string.
Otherwise, returns text if no command is recognized.
Args:
text: The input text to parse.
Returns:
True if the command is standalone
A string containing the result of the command handler if the command is not standalone.
"""
tokens = text.split(' ')
if not tokens:
return False
command = tokens[0]
if command in self.commands:
handler = self.commands[command]
if len(tokens) > 1 and command == '/clipboard':
context_query = '\n\nThe following is context provided by the user:\n'
clipboard_content = pyperclip.paste()
if clipboard_content:
context_query += clipboard_content + '\n' + text
return context_query
else:
return handler()
return text
def handle_save(self):
filename = input('Enter filename to save conversation: ')
self.save_conversation(filename)
return True
def save_conversation(self, filename='conversation.md'):
# TODO finish this
pass
def handle_clear(self):
self.assistant.history = [self.assistant.system_prompt()]
self.assistant.save_history()
return True
def handle_copy(self):
blocks = self.input_handler.extract_code_block(self.assistant.history[-1]['content'], highlight=False)
if len(blocks):
block = '\n'.join(blocks[0].split('\n')[1:-1])
self.input_handler.copy_string_to_clipboard(block)
return True
def handle_exit(self):
sys.exit(0)
client = None
def main():
args = parse_args()
args = CommandLineParser().parse()
assistant = AIAssistant()
if args.host:
set_host(args.host)
# Point to the local server
global client
client = Client(host=server)
assistant.set_host(args.host)
if args.model:
global model
model = args.model
assistant.model = args.model
if args.temp:
global temp
temp = float(args.temp)
assistant.temperature = args.temp
if args.context:
global num_ctx
num_ctx = int(args.context)
global history
assistant.num_ctx = args.context
if args.new:
history = [system_prompt]
save_history(history, history_path)
elif args.temporary:
history = [system_prompt]
assistant.history = [assistant.system_prompt()]
assistant.save_history()
else:
history = load_history(history_path)
if not sys.stdin.isatty():
handle_piped_input(args)
else:
handle_non_piped_input(args)
assistant.load_history()
command_parser = CommandParser()
input_handler = InputHandler(assistant, command_parser)
input_handler.handle_input(args)
if __name__ == '__main__':
main()

12
pyproject.toml Normal file
View file

@ -0,0 +1,12 @@
[project]
name = "ai-assistant"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"ollama==0.4.7",
"prompt-toolkit>=3.0.51",
"pygments==2.19.1",
"pyperclip==1.9.0",
]

View file

@ -1,339 +0,0 @@
#!/bin/python
# AI Assistant in the terminal
import argparse
import os
import sys
import json
from ollama import Client
import re
import pyperclip
import pygments
from pygments.lexers import get_lexer_by_name, guess_lexer
from pygments.formatters import TerminalFormatter
from prompt_toolkit.key_binding import KeyBindings
from prompt_toolkit import PromptSession
class AIAssistant:
def __init__(self, server="http://localhost:11434", model="gemma3:12b-it-qat"):
self.server = server
self.model = model
self.client = Client(host=self.server)
self.temperature = 0.2
self.num_ctx = 4096
self.history = [self.system_prompt()]
def set_host(self, host):
self.server = host
self.client = Client(host=host)
def system_prompt(self):
return {"role": "system", "content": "You are a helpful, smart, kind, and efficient AI assistant. You always fulfill the user's requests accurately and concisely."}
def load_history(self):
path = os.environ.get('HOME') + '/.cache/ai-assistant.history'
try:
with open(path, 'r') as f:
self.history = json.load(f)
except FileNotFoundError:
pass
def save_history(self):
path = os.environ.get('HOME') + '/.cache/ai-assistant.history'
with open(path, 'w+') as f:
json.dump(self.history, f)
def determine_lexer(self, code_block):
lexer_name = None
lines = code_block.split('\n')
for line in lines:
if line.strip().startswith('```'):
lexer_part = line.strip().split('```')[1].strip()
if lexer_part:
lexer_name = lexer_part
break
elif line.strip().startswith('lang:'):
lexer_part = line.strip().split(':')[1].strip()
if lexer_part:
lexer_name = lexer_part
break
return lexer_name
def highlight_code(self, lexer_name, code):
try:
lexer = get_lexer_by_name(lexer_name) if lexer_name else guess_lexer(code)
except ValueError:
lexer = guess_lexer('\n'.join(code.split('\n')[1:-1]))
if not lexer:
lexer = get_lexer_by_name('bash')
formatter = TerminalFormatter()
highlighted_code = pygments.highlight(code, lexer, formatter)
return highlighted_code
def chat(self, message, stream=True):
self.history.append({"role": "user", "content": message})
completion = self.client.chat(
model=self.model,
options={"temperature": self.temperature, "num_ctx": self.num_ctx},
messages=self.history,
stream=stream
)
result = ''
all_chunks = []
large_chunk = []
language = None
if stream:
for chunk in completion:
text = chunk['message']['content']
large_chunk.append(text)
all_chunks.append(text)
large_text = ''.join(large_chunk)
if ('```' in large_text) and ('\n' in large_text.split('```')[1]):
language = large_text.split('```')[1].split('\n')[0]
large_chunk = []
if language == '':
print(large_text, end='', flush=True)
language = None
if language and ('\n' in large_text) and large_chunk:
output = self.highlight_code(language, large_text)
print(output, end='', flush=True)
large_chunk = []
elif not language or not large_chunk:
print(text, end='', flush=True)
result = ''.join(all_chunks)
else:
result = completion['message']['content']
self.history.append({"role": 'assistant', 'content': result})
self.save_history()
return result
class CommandLineParser:
def __init__(self):
self.parser = argparse.ArgumentParser(description='Chat with an intelligent assistant')
self.add_arguments()
def add_arguments(self):
parser = self.parser
parser.add_argument('--host', nargs='?', const=True, default=False, help='Specify host of Ollama server')
parser.add_argument('--model', '-m', nargs='?', const=True, default=False, help='Specify model')
parser.add_argument('--temp', '-t', nargs='?', type=float, const=0.2, default=False, help='Specify temperature')
parser.add_argument('--context', type=int, default=4096, help='Specify context size')
parser.add_argument('--reasoning', '-r', action='store_true', help='Use the default reasoning model deepseek-r1:14b')
parser.add_argument('--new', '-n', action='store_true', help='Start a chat with a fresh history')
parser.add_argument('--follow-up', '-f', nargs='?', const=True, default=False, help='Ask a follow up question when piping in context')
parser.add_argument('--copy', '-c', action='store_true', help='Copy a codeblock if it appears')
parser.add_argument('--shell', '-s', nargs='?', const=True, default=False, help='Output a shell command that does as described')
def parse(self):
return self.parser.parse_args()
# Keybindings
bindings = KeyBindings()
@bindings.add('c-d')
def _(event):
event.current_buffer.validate_and_handle()
class InputHandler:
def __init__(self, assistant, command_parser):
self.assistant = assistant
self.command_parser = command_parser
self.command_parser.assistant = assistant
self.command_parser.input_handler = self
self.session = PromptSession(multiline=True, prompt_continuation='', key_bindings=bindings)
def handle_input(self, args):
if not sys.stdin.isatty():
self.handle_piped_input(args)
else:
self.handle_interactive_input(args)
def copy_string_to_clipboard(self, s):
try:
pyperclip.copy(s)
except:
return
def handle_piped_input(self, args):
all_input = sys.stdin.read()
query = f'Use the following context to answer the question. There will be no follow up questions from the user so make sure your answer is complete:\n{all_input}\n'
if args.copy:
query += 'Answer the question using a codeblock for any code or shell scripts\n'
if args.follow_up:
second_input = self.improved_input()
query += f'\n{second_input}'
result = self.assistant.chat(query, stream=False)
blocks = self.extract_code_block(result)
if args.copy and len(blocks):
self.copy_string_to_clipboard(blocks[0])
def arg_shell(self, args):
query = '''
Form a shell command based on the following description. Only output a working shell command. Format the command like this: `command`
Description:\n
'''
if type(args.shell) is str:
query += args.shell
else:
query += self.improved_input()
result = self.assistant.chat(query, stream=False)
result = blocks[0] if len(blocks := self.extract_code_block(result)) else result
print(result)
self.copy_string_to_clipboard(result)
def handle_interactive_input(self, args):
if args.shell:
self.assistant.history = [self.assistant.system_prompt()]
self.arg_shell(args)
exit()
print("\033[91massistant\033[0m: Type your message (press Ctrl+D to send):")
while True:
try:
full_input = self.improved_input()
if full_input is None:
break
if full_input.strip() == '':
continue
command_result = self.command_parser.parse_commands(full_input)
if type(command_result) is str:
self.assistant.chat(command_result)
print()
except (EOFError, KeyboardInterrupt):
print("\nExiting...")
break
def improved_input(self, prompt="> "):
"""
Returns the full text (including embedded newlines) when you press Ctrl-D.
Arrow keys edit within or across lines automatically.
"""
try:
text = self.session.prompt(prompt)
return text
except KeyboardInterrupt:
print("\nUser aborted input")
return None
def extract_code_block(self, text, highlight=True):
pattern = r'```[a-z]*\n[\s\S]*?\n```'
code_blocks = []
matches = re.finditer(pattern, text)
for match in matches:
code_block = match.group(0)
if highlight:
lexer_name = self.assistant.determine_lexer(code_block)
highlighted_code = self.assistant.highlight_code(lexer_name, code_block)
code_blocks.append(highlighted_code)
else:
code_blocks.append(code_block)
if not code_blocks:
line_pattern = r'`[a-z]*[\s\S]*?`'
line_matches = re.finditer(line_pattern, text)
for match in line_matches:
code_block = match.group(0)
code_blocks.append(code_block[1:-1])
return code_blocks
class CommandParser:
def __init__(self):
self.commands = {
'/save': self.handle_save,
'/clear': self.handle_clear,
'/clipboard': None,
'/exit': self.handle_exit,
'/copy': self.handle_copy
}
def parse_commands(self, text):
"""
Parses the given text to check if it contains a recognized command.
If the command is standalone returns True.
If the command requires passing the text through a string handler (e.g., /clipboard), returns the result of that handler as a string.
Otherwise, returns text if no command is recognized.
Args:
text: The input text to parse.
Returns:
True if the command is standalone
A string containing the result of the command handler if the command is not standalone.
"""
tokens = text.split(' ')
if not tokens:
return False
command = tokens[0]
if command in self.commands:
handler = self.commands[command]
if len(tokens) > 1 and command == '/clipboard':
context_query = '\n\nThe following is context provided by the user:\n'
clipboard_content = pyperclip.paste()
if clipboard_content:
context_query += clipboard_content + '\n' + text
return context_query
else:
return handler()
return text
def handle_save(self):
filename = input('Enter filename to save conversation: ')
self.save_conversation(filename)
return True
def save_conversation(self, filename='conversation.md'):
# TODO finish this
pass
def handle_clear(self):
self.assistant.history = [self.assistant.system_prompt()]
self.assistant.save_history()
return True
def handle_copy(self):
blocks = self.input_handler.extract_code_block(self.assistant.history[-1]['content'], highlight=False)
if len(blocks):
block = '\n'.join(blocks[0].split('\n')[1:-1])
self.input_handler.copy_string_to_clipboard(block)
return True
def handle_exit(self):
sys.exit(0)
def main():
args = CommandLineParser().parse()
assistant = AIAssistant()
if args.host:
assistant.set_host(args.host)
if args.model:
assistant.model = args.model
if args.temp:
assistant.temperature = args.temp
if args.context:
assistant.num_ctx = args.context
if args.new:
assistant.history = [assistant.system_prompt()]
assistant.save_history()
else:
assistant.load_history()
command_parser = CommandParser()
input_handler = InputHandler(assistant, command_parser)
input_handler.handle_input(args)
if __name__ == '__main__':
main()

221
uv.lock Normal file
View file

@ -0,0 +1,221 @@
version = 1
revision = 2
requires-python = ">=3.13"
[[package]]
name = "ai-assistant"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "ollama" },
{ name = "prompt-toolkit" },
{ name = "pygments" },
{ name = "pyperclip" },
]
[package.metadata]
requires-dist = [
{ name = "ollama", specifier = "==0.4.7" },
{ name = "prompt-toolkit", specifier = ">=3.0.51" },
{ name = "pygments", specifier = "==2.19.1" },
{ name = "pyperclip", specifier = "==1.9.0" },
]
[[package]]
name = "annotated-types"
version = "0.7.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
]
[[package]]
name = "anyio"
version = "4.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" },
]
[[package]]
name = "certifi"
version = "2025.4.26"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" },
]
[[package]]
name = "h11"
version = "0.16.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
]
[[package]]
name = "httpcore"
version = "1.0.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
]
[[package]]
name = "httpx"
version = "0.28.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "certifi" },
{ name = "httpcore" },
{ name = "idna" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
]
[[package]]
name = "idna"
version = "3.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
]
[[package]]
name = "ollama"
version = "0.4.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "pydantic" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b0/6d/dc77539c735bbed5d0c873fb029fb86aa9f0163df169b34152914331c369/ollama-0.4.7.tar.gz", hash = "sha256:891dcbe54f55397d82d289c459de0ea897e103b86a3f1fad0fdb1895922a75ff", size = 12843, upload-time = "2025-01-21T18:51:48.288Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/31/83/c3ffac86906c10184c88c2e916460806b072a2cfe34cdcaf3a0c0e836d39/ollama-0.4.7-py3-none-any.whl", hash = "sha256:85505663cca67a83707be5fb3aeff0ea72e67846cea5985529d8eca4366564a1", size = 13210, upload-time = "2025-01-21T18:51:46.199Z" },
]
[[package]]
name = "prompt-toolkit"
version = "3.0.51"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "wcwidth" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" },
]
[[package]]
name = "pydantic"
version = "2.11.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
{ name = "pydantic-core" },
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540, upload-time = "2025-04-29T20:38:55.02Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900, upload-time = "2025-04-29T20:38:52.724Z" },
]
[[package]]
name = "pydantic-core"
version = "2.33.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
]
[[package]]
name = "pygments"
version = "2.19.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" },
]
[[package]]
name = "pyperclip"
version = "1.9.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload-time = "2024-06-18T20:38:48.401Z" }
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "typing-extensions"
version = "4.13.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" },
]
[[package]]
name = "typing-inspection"
version = "0.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload-time = "2025-02-25T17:27:59.638Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" },
]
[[package]]
name = "wcwidth"
version = "0.2.13"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" },
]