Compare commits
5 Commits
7f0cb49156
...
7224111a0b
Author | SHA1 | Date |
---|---|---|
|
7224111a0b | 5 months ago |
|
0c022d4731 | 5 months ago |
|
a697f49698 | 5 months ago |
|
3218e7eb63 | 5 months ago |
|
ef789375c8 | 5 months ago |
27 changed files with 156 additions and 100 deletions
@ -1,4 +1,5 @@ |
|||||
/model/* |
/model/* |
||||
*.prof |
*.prof |
||||
__pycache__ |
__pycache__ |
||||
*.venv |
*.venv |
||||
|
*.egg-info |
@ -1,6 +1,7 @@ |
|||||
print("running __main__.-py") |
print("running __main__.-py") |
||||
|
|
||||
from llama import main |
from chatbug.llama import main |
||||
|
|
||||
|
|
||||
if __name__ == "__main__": |
if __name__ == "__main__": |
||||
main() |
main() |
@ -1,7 +1,7 @@ |
|||||
|
|
||||
|
|
||||
from inference import Inference |
from chatbug.inference import Inference |
||||
from modelconfig import Modelconfig |
from chatbug.modelconfig import Modelconfig |
||||
|
|
||||
|
|
||||
def main(): |
def main(): |
@ -0,0 +1,44 @@ |
|||||
|
import os |
||||
|
|
||||
|
|
||||
|
def check_append_file(prompt: str) -> str: |
||||
|
if "@" in prompt: |
||||
|
parts = prompt.split(" ") |
||||
|
content = [] |
||||
|
for part in parts: |
||||
|
if part.startswith("@"): |
||||
|
filename = part[1:] |
||||
|
try: |
||||
|
if os.path.exists(filename): |
||||
|
with open(filename, "r") as f: |
||||
|
content.append("%s:'''\n%s'''" % (filename, f.read())) |
||||
|
except FileNotFoundError: |
||||
|
print(f"File '{filename}' not found.") |
||||
|
content.append(prompt) |
||||
|
return "\n".join(content) |
||||
|
return prompt |
||||
|
|
||||
|
|
||||
|
|
||||
|
if __name__ == "__main__": |
||||
|
exit() # not accidentally trigger it |
||||
|
|
||||
|
# Create some sample files |
||||
|
with open("fmain.py", "w") as f: |
||||
|
f.write("# This is main.py\n") |
||||
|
with open("finference.py", "w") as f: |
||||
|
f.write("# This is inference.py\n") |
||||
|
|
||||
|
# Test cases |
||||
|
test_prompts = [ |
||||
|
"@fmain.py", |
||||
|
"@fmain.py @finference.py", |
||||
|
"@fnonexistent.py", |
||||
|
"@fmain.py @fnonexistent.py" |
||||
|
] |
||||
|
|
||||
|
for prompt in test_prompts: |
||||
|
print(f"Testing prompt: {prompt}") |
||||
|
result = check_append_file(prompt) |
||||
|
print(f"Result: {result}") |
||||
|
print("-" * 20) |
@ -1,16 +1,16 @@ |
|||||
from inference import Inference |
|
||||
from modelconfig import Modelconfig |
|
||||
import time |
import time |
||||
import nvidia_smi |
import nvidia_smi |
||||
import torch |
import torch |
||||
import gc |
import gc |
||||
|
from chatbug.inference import Inference |
||||
|
from chatbug.modelconfig import Modelconfig |
||||
|
|
||||
|
|
||||
def empty_cuda(): |
def empty_cuda(): |
||||
while True: |
while True: |
||||
gc.collect() |
gc.collect() |
||||
torch.cuda.empty_cache() |
torch.cuda.empty_cache() |
||||
time.sleep(0.5) |
time.sleep(0.5) |
||||
vram = nvidia_smi.get_gpu_stats()["memory_used"] |
vram = nvidia_smi.get_gpu_stats()["memory_used"] |
||||
print("vram: %d MB" % vram) |
print("vram: %d MB" % vram) |
||||
if vram < 200: |
if vram < 200: |
@ -0,0 +1,3 @@ |
|||||
|
from chatbug.matheval import ast |
||||
|
from chatbug.matheval import interpreter |
||||
|
from chatbug.matheval import lexer |
@ -1,6 +1,5 @@ |
|||||
|
from chatbug.matheval import lexer |
||||
import math_lexer as lexer |
from chatbug.matheval.lexer import Token |
||||
from math_lexer import Token |
|
||||
|
|
||||
|
|
||||
class Statement: |
class Statement: |
@ -1,10 +1,11 @@ |
|||||
import math_ast as ast |
|
||||
|
|
||||
|
|
||||
from sympy.parsing.sympy_parser import parse_expr |
from sympy.parsing.sympy_parser import parse_expr |
||||
from sympy.core.numbers import Integer, One, Zero |
from sympy.core.numbers import Integer, One, Zero |
||||
from sympy import symbols, Eq, solveset, linsolve, nonlinsolve |
from sympy import symbols, Eq, solveset, linsolve, nonlinsolve |
||||
from sympy.core.symbol import Symbol |
from sympy.core.symbol import Symbol |
||||
|
from chatbug.matheval import ast |
||||
|
|
||||
|
|
||||
def interpret(statement: ast.Statement) -> str: |
def interpret(statement: ast.Statement) -> str: |
@ -1,10 +1,10 @@ |
|||||
|
|
||||
from modelconfig import Modelconfig |
from chatbug.modelconfig import Modelconfig |
||||
|
|
||||
|
|
||||
|
|
||||
def get_model() -> Modelconfig: |
def get_model() -> Modelconfig: |
||||
|
|
||||
# model: NousResearch/Hermes-3-Llama-3.2-3B |
# model: NousResearch/Hermes-3-Llama-3.2-3B |
||||
# tokens: 315 tk |
# tokens: 315 tk |
||||
# time: 94.360 s |
# time: 94.360 s |
@ -0,0 +1,22 @@ |
|||||
|
from setuptools import setup, find_packages |
||||
|
|
||||
|
setup( |
||||
|
name='chatbug', |
||||
|
version='0.1.0', |
||||
|
description='A conversational AI chatbot', |
||||
|
author='Florin Tobler', |
||||
|
author_email='florin.tobler@hotmail.com', |
||||
|
packages=find_packages(exclude=["tests"]), |
||||
|
install_requires=[ |
||||
|
'transformers', |
||||
|
'accelerate', |
||||
|
'bitsandbytes', |
||||
|
'pytest', |
||||
|
'pywebview', |
||||
|
], |
||||
|
# entry_points={ |
||||
|
# 'console_scripts': [ |
||||
|
# 'chatbug=chatbug.app:main', |
||||
|
# ], |
||||
|
# }, |
||||
|
) |
@ -1 +0,0 @@ |
|||||
# empty |
|
Loading…
Reference in new issue