add mathematical solver engine based on sympy
This commit is contained in:
130
math_ast.py
Normal file
130
math_ast.py
Normal file
@@ -0,0 +1,130 @@
|
||||
|
||||
import math_lexer as lexer
|
||||
from math_lexer import Token
|
||||
|
||||
|
||||
class Statement:
|
||||
pass
|
||||
|
||||
class Expression(Statement):
|
||||
def __init__(self, value: str):
|
||||
self.value = value
|
||||
|
||||
class Equation:
|
||||
def __init__(self, lhs: Expression, rhs: Expression):
|
||||
self.lhs = lhs
|
||||
self.rhs = rhs
|
||||
|
||||
class Solve(Statement):
|
||||
def __init__(self, equations: list[Equation], variables: list[Expression]):
|
||||
self.equations = equations
|
||||
self.variables = variables
|
||||
|
||||
|
||||
|
||||
|
||||
class Parser:
|
||||
def __init__(self):
|
||||
self.tokens: list[Token] # tokens from lexer
|
||||
self._last_eaten = None
|
||||
|
||||
def not_eof(self) -> bool:
|
||||
return self.tokens[0].type is not lexer.END_OF_INPUT
|
||||
|
||||
def at(self) -> Token:
|
||||
return self.tokens[0]
|
||||
|
||||
def at_last(self) -> Token:
|
||||
return self._last_eaten
|
||||
|
||||
def eat(self) -> Token:
|
||||
self._last_eaten = self.tokens.pop(0)
|
||||
return self._last_eaten
|
||||
|
||||
def backtrack(self):
|
||||
if not self._last_eaten:
|
||||
raise Exception("Cannot backtrack.")
|
||||
self.tokens.insert(0, self._last_eaten)
|
||||
self._last_eaten = None
|
||||
|
||||
def eat_expect(self, token_type: int | str) -> Token:
|
||||
prev = self.eat()
|
||||
if prev.type is not token_type:
|
||||
raise Exception("expected to consume '%s' but '%s' encountered." % (str(token_type), str(prev.type)))
|
||||
return prev
|
||||
|
||||
def at_expect(self, token_type: int | str) -> Token:
|
||||
prev = self.at()
|
||||
if prev.type is not token_type:
|
||||
raise Exception("expected to be at '%s' but '%s' encountered." % (str(token_type), str(prev.type)))
|
||||
return prev
|
||||
|
||||
def parse(self, tokens: list[Token]) -> Statement:
|
||||
self.tokens = tokens
|
||||
statement = self.parse_statement()
|
||||
self.at_expect(lexer.END_OF_INPUT)
|
||||
return statement
|
||||
|
||||
def parse_statement(self) -> Statement:
|
||||
type = self.at().type
|
||||
if type is lexer.SOLVE:
|
||||
return self.parse_solve()
|
||||
return self.parse_expression(merge_commas=True)
|
||||
|
||||
def parse_solve(self) -> Solve:
|
||||
"""
|
||||
solve x = 1 for x
|
||||
solve x = y and y = 2 for x and y
|
||||
"""
|
||||
self.eat_expect(lexer.SOLVE)
|
||||
equations = [] # list of equations
|
||||
variables = [] # list of variables to solve for
|
||||
|
||||
while self.not_eof() and self.at().type is not lexer.FOR:
|
||||
equations.append(self.parse_equation())
|
||||
selfattype = self.at().type
|
||||
if selfattype is lexer.AND or selfattype is lexer.COMMA:
|
||||
self.eat()
|
||||
|
||||
self.eat_expect(lexer.FOR)
|
||||
|
||||
while self.not_eof():
|
||||
variables.append(self.parse_expression(merge_commas=False))
|
||||
selfattype = self.at().type
|
||||
if selfattype is lexer.AND or selfattype is lexer.COMMA:
|
||||
self.eat()
|
||||
|
||||
return Solve(equations, variables)
|
||||
|
||||
def parse_equation(self) -> Equation:
|
||||
lhs = self.parse_expression(merge_commas=False)
|
||||
self.eat_expect(lexer.EQUALS)
|
||||
rhs = self.parse_expression(merge_commas=False)
|
||||
return Equation(lhs, rhs)
|
||||
|
||||
def parse_expression(self, merge_commas) -> Expression:
|
||||
"""
|
||||
math expression
|
||||
e.g:
|
||||
sin(45) / 4 * pi
|
||||
"""
|
||||
|
||||
if merge_commas == True:
|
||||
values = []
|
||||
while self.not_eof():
|
||||
token = self.eat()
|
||||
if token.type is lexer.COMMA:
|
||||
values.append(lexer.COMMA)
|
||||
elif token.type is lexer.EQUALS:
|
||||
values.append(lexer.EQUALS)
|
||||
else:
|
||||
values.append(token.value)
|
||||
# token = self.eat_expect(lexer.EXPRESSION)
|
||||
# values.append(token.value)
|
||||
# if self.at() is lexer.COMMA:
|
||||
# token = self.eat()
|
||||
# values.append(lexer.COMMA)
|
||||
return Expression("".join(values))
|
||||
else:
|
||||
token = self.eat_expect(lexer.EXPRESSION)
|
||||
return Expression(token.value)
|
Reference in New Issue
Block a user