Compare commits
5 Commits
7f0cb49156
...
7224111a0b
Author | SHA1 | Date |
---|---|---|
|
7224111a0b | 5 months ago |
|
0c022d4731 | 5 months ago |
|
a697f49698 | 5 months ago |
|
3218e7eb63 | 5 months ago |
|
ef789375c8 | 5 months ago |
27 changed files with 156 additions and 100 deletions
@ -1,4 +1,5 @@ |
|||
/model/* |
|||
*.prof |
|||
__pycache__ |
|||
*.venv |
|||
*.venv |
|||
*.egg-info |
@ -1,6 +1,7 @@ |
|||
print("running __main__.-py") |
|||
|
|||
from llama import main |
|||
from chatbug.llama import main |
|||
|
|||
|
|||
if __name__ == "__main__": |
|||
main() |
@ -1,7 +1,7 @@ |
|||
|
|||
|
|||
from inference import Inference |
|||
from modelconfig import Modelconfig |
|||
from chatbug.inference import Inference |
|||
from chatbug.modelconfig import Modelconfig |
|||
|
|||
|
|||
def main(): |
@ -0,0 +1,44 @@ |
|||
import os |
|||
|
|||
|
|||
def check_append_file(prompt: str) -> str: |
|||
if "@" in prompt: |
|||
parts = prompt.split(" ") |
|||
content = [] |
|||
for part in parts: |
|||
if part.startswith("@"): |
|||
filename = part[1:] |
|||
try: |
|||
if os.path.exists(filename): |
|||
with open(filename, "r") as f: |
|||
content.append("%s:'''\n%s'''" % (filename, f.read())) |
|||
except FileNotFoundError: |
|||
print(f"File '{filename}' not found.") |
|||
content.append(prompt) |
|||
return "\n".join(content) |
|||
return prompt |
|||
|
|||
|
|||
|
|||
if __name__ == "__main__": |
|||
exit() # not accidentally trigger it |
|||
|
|||
# Create some sample files |
|||
with open("fmain.py", "w") as f: |
|||
f.write("# This is main.py\n") |
|||
with open("finference.py", "w") as f: |
|||
f.write("# This is inference.py\n") |
|||
|
|||
# Test cases |
|||
test_prompts = [ |
|||
"@fmain.py", |
|||
"@fmain.py @finference.py", |
|||
"@fnonexistent.py", |
|||
"@fmain.py @fnonexistent.py" |
|||
] |
|||
|
|||
for prompt in test_prompts: |
|||
print(f"Testing prompt: {prompt}") |
|||
result = check_append_file(prompt) |
|||
print(f"Result: {result}") |
|||
print("-" * 20) |
@ -1,16 +1,16 @@ |
|||
from inference import Inference |
|||
from modelconfig import Modelconfig |
|||
import time |
|||
import nvidia_smi |
|||
import torch |
|||
import gc |
|||
from chatbug.inference import Inference |
|||
from chatbug.modelconfig import Modelconfig |
|||
|
|||
|
|||
def empty_cuda(): |
|||
while True: |
|||
gc.collect() |
|||
torch.cuda.empty_cache() |
|||
time.sleep(0.5) |
|||
time.sleep(0.5) |
|||
vram = nvidia_smi.get_gpu_stats()["memory_used"] |
|||
print("vram: %d MB" % vram) |
|||
if vram < 200: |
@ -0,0 +1,3 @@ |
|||
from chatbug.matheval import ast |
|||
from chatbug.matheval import interpreter |
|||
from chatbug.matheval import lexer |
@ -1,6 +1,5 @@ |
|||
|
|||
import math_lexer as lexer |
|||
from math_lexer import Token |
|||
from chatbug.matheval import lexer |
|||
from chatbug.matheval.lexer import Token |
|||
|
|||
|
|||
class Statement: |
@ -1,10 +1,11 @@ |
|||
import math_ast as ast |
|||
|
|||
|
|||
|
|||
from sympy.parsing.sympy_parser import parse_expr |
|||
from sympy.core.numbers import Integer, One, Zero |
|||
from sympy import symbols, Eq, solveset, linsolve, nonlinsolve |
|||
from sympy.core.symbol import Symbol |
|||
from chatbug.matheval import ast |
|||
|
|||
|
|||
def interpret(statement: ast.Statement) -> str: |
@ -1,10 +1,10 @@ |
|||
|
|||
from modelconfig import Modelconfig |
|||
from chatbug.modelconfig import Modelconfig |
|||
|
|||
|
|||
|
|||
def get_model() -> Modelconfig: |
|||
|
|||
|
|||
# model: NousResearch/Hermes-3-Llama-3.2-3B |
|||
# tokens: 315 tk |
|||
# time: 94.360 s |
@ -0,0 +1,22 @@ |
|||
from setuptools import setup, find_packages |
|||
|
|||
setup( |
|||
name='chatbug', |
|||
version='0.1.0', |
|||
description='A conversational AI chatbot', |
|||
author='Florin Tobler', |
|||
author_email='florin.tobler@hotmail.com', |
|||
packages=find_packages(exclude=["tests"]), |
|||
install_requires=[ |
|||
'transformers', |
|||
'accelerate', |
|||
'bitsandbytes', |
|||
'pytest', |
|||
'pywebview', |
|||
], |
|||
# entry_points={ |
|||
# 'console_scripts': [ |
|||
# 'chatbug=chatbug.app:main', |
|||
# ], |
|||
# }, |
|||
) |
@ -1 +0,0 @@ |
|||
# empty |
Loading…
Reference in new issue