Skip to content

Commit

Permalink
Introduce type annotations
Browse files Browse the repository at this point in the history
  • Loading branch information
Tomatosoup97 committed May 4, 2019
1 parent bfb3f35 commit ba828af
Show file tree
Hide file tree
Showing 14 changed files with 134 additions and 100 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,5 @@ build
*.whl

.idea/
.mypy_cache/
.pytype/
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
language: python
python:
- 3.4
- 3.5
- 3.6
- 3.7
script:
- python run_tests.py

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[![Build Status](https://travis-ci.org/qedsoftware/bamboolean.svg?branch=master)](https://travis-ci.org/qedsoftware/bamboolean)

Supported from Python >= 3.4
Supported from Python >= 3.6

Implementation of Bamboolean - Boolean Logic Language

Expand Down
56 changes: 31 additions & 25 deletions bamboolean/ast.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,54 @@
from typing import List, Any, Union
from .lexer import Token


class AST:
"""Abstract Syntax Tree """
def tree_repr(self):
def tree_repr(self) -> List[Any]:
raise NotImplementedError

def __str__(self):
def __str__(self) -> str:
return str(self.tree_repr())

def __repr__(self):
def __repr__(self) -> str:
return self.__str__()


class TokenBasedAST(AST):
def __init__(self, token):
def __init__(self, token: Token) -> None:
self.token = token
self.value = token.value

def tree_repr(self):
return self.token.tree_repr()


class Var(TokenBasedAST):
pass


class Num(TokenBasedAST):
pass


class Bool(TokenBasedAST):
pass


class String(TokenBasedAST):
pass


ASTValueType = Union[String, Bool, Num]


class Constraint(AST):
def __init__(self, var, rel_op, value):
def __init__(self, var: Var, rel_op: Token, value: ASTValueType):
self.var = var
self.rel_op = rel_op
self.value = value

def tree_repr(self):
def tree_repr(self) -> List[Any]:
return [
self.var.tree_repr(),
self.rel_op.tree_repr(),
Expand All @@ -35,35 +57,19 @@ def tree_repr(self):


class BinOp(AST):
def __init__(self, left, op, right):
def __init__(self, left: AST, op: Token, right: AST) -> None:
self.left = left
self.op = op
self.right = right

def tree_repr(self):
def tree_repr(self) -> List[Any]:
return [
self.left.tree_repr(),
self.op.tree_repr(),
self.right.tree_repr(),
]


class Var(TokenBasedAST):
pass


class Num(TokenBasedAST):
pass


class Bool(TokenBasedAST):
pass


class String(TokenBasedAST):
pass


class NoOp(AST):
def tree_repr(self):
def tree_repr(self) -> List[Any]:
return ['noop']
4 changes: 4 additions & 0 deletions bamboolean/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,9 @@ class BambooleanParserError(BambooleanError):
pass


class BambooleanRuntimeError(BambooleanError):
pass


class NoSuchVisitorException(BambooleanError):
pass
9 changes: 5 additions & 4 deletions bamboolean/factories.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
from .ast import AST
from .lexer import Lexer
from .parser import Parser
from .interpreter import Interpreter


def ParserFactory(text):
def ParserFactory(text: str) -> Parser:
lexer = Lexer(text)
return Parser(lexer)


def InterpreterFactory(text, symbol_table):
def InterpreterFactory(text: str, symbol_table: dict) -> Interpreter:
parser = ParserFactory(text)
tree = parser.parse()
return Interpreter(tree, symbol_table)


def interpret(text, symbol_table):
def interpret(text: str, symbol_table: dict) -> bool:
return InterpreterFactory(text, symbol_table).interpret()


def parse(text):
def parse(text: str) -> AST:
return ParserFactory(text).parse()
33 changes: 20 additions & 13 deletions bamboolean/interpreter.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,43 @@
from numbers import Number
from typing import Any, NoReturn
import operator as built_in_op

from . import tokens as tok
from .ast import AST
from .exceptions import BambooleanRuntimeError
from .ast import AST, TokenBasedAST
from .node_visitor import NodeVisitor


class Interpreter(NodeVisitor):
def __init__(self, tree, symbol_table):
assert isinstance(tree, AST)
def __init__(self, tree: AST, symbol_table: dict) -> None:
self.tree = tree
self.symbol_table = {k.upper(): v for k, v in symbol_table.items()}

def interpret(self):
def interpret(self) -> bool:
if not self.tree:
return False
return self.visit(self.tree)

def visit_BinOp(self, node):
op_type = node.op.type
def error(self, extra='') -> NoReturn:
raise BambooleanRuntimeError(
"Runtime error occured. {extra}".format(extra=extra))

def visit_BinOp(self, node) -> bool:
op_type = node.op.type
if op_type == tok.AND:
return self.visit(node.left) and self.visit(node.right)
elif op_type == tok.OR:
return bool(self.visit(node.left) or self.visit(node.right))
else:
self.error("Could not evaluate binary operator")

def visit_Constraint(self, node):
def visit_Constraint(self, node) -> bool:
var_value = self.visit(node.var)
value = self.visit(node.value)
return self._handle_rel_op(node.rel_op.type, var_value, value)

@staticmethod
def _handle_rel_op(op_type, val1, val2):
def _handle_rel_op(op_type: str, val1, val2) -> bool:
mapping = {
'NE': built_in_op.ne,
'EQ': built_in_op.eq,
Expand All @@ -42,18 +49,18 @@ def _handle_rel_op(op_type, val1, val2):
op = mapping[op_type]
return op(val1, val2)

def visit_Var(self, node):
def visit_Var(self, node: TokenBasedAST) -> Any:
var_name = node.value
return self.symbol_table.get(var_name, '')

def visit_Num(self, node):
def visit_Num(self, node) -> Number:
return node.value

def visit_Bool(self, node):
def visit_Bool(self, node) -> bool:
return node.value

def visit_String(self, node):
def visit_String(self, node) -> str:
return node.value

def visit_NoOp(self, node):
def visit_NoOp(self, node) -> bool:
return True # no expression should evaluate to true
56 changes: 30 additions & 26 deletions bamboolean/lexer.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,33 @@
import re
from functools import reduce
from collections import OrderedDict
from typing import NoReturn, Optional, Dict, Tuple, Union, Callable

from .exceptions import BambooleanLexerError
from . import tokens as tok


ValueType = Optional[Union[str, bool, int, float]]


class Token:
def __init__(self, type, value):
self.type = type
self.value = value
def __init__(self, type: str, value: ValueType) -> None:
self.type: str = type
self.value: ValueType = value

def __str__(self):
def __str__(self) -> str:
return 'Token({type}, {value})'.format(
type=self.type,
value=repr(self.value))

def __repr__(self):
def __repr__(self) -> str:
return self.__str__()

def tree_repr(self):
def tree_repr(self) -> Tuple[str, ValueType]:
return self.type, self.value


RESERVED_KEYWORDS = {
RESERVED_KEYWORDS: Dict[str, Token] = {
'AND': Token(tok.AND, 'AND'),
'OR': Token(tok.OR, 'OR'),
'TRUE': Token(tok.BOOL, True),
Expand All @@ -32,38 +36,37 @@ def tree_repr(self):


class Lexer:
def __init__(self, text):
assert isinstance(text, str)
def __init__(self, text: str) -> None:
self.text = text
self.position = 0
self.current_char = self.text[self.position] if self.text else None

def error(self):
def error(self) -> NoReturn:
raise BambooleanLexerError(
("Error tokenizing input on character: "
"{} and position: {}.\nExpr: {}".format(
self.current_char, self.position, self.text))
)

def _is_eof(self, pos):
def _is_eof(self, pos: int) -> bool:
return pos > len(self.text) - 1

def next(self):
def next(self) -> None:
"""
Set pointer to next character
"""
self.position += 1
is_eof = self._is_eof(self.position)
self.current_char = self.text[self.position] if not is_eof else None

def peek(self):
def peek(self) -> Optional[str]:
"""
Check what next char will be without advancing position
"""
peek_pos = self.position + 1
return self.text[peek_pos] if not self._is_eof(peek_pos) else None

def id(self):
def id(self) -> Token:
"""
Handle identifiers and reserved keywords
"""
Expand All @@ -76,24 +79,25 @@ def id(self):
token = RESERVED_KEYWORDS.get(result, Token(tok.ID, result))
return token

def skip_whitespace(self):
def skip_whitespace(self) -> None:
while self.current_char is not None and self.current_char.isspace():
self.next()

@staticmethod
def _is_quotation_mark(char):
def _is_quotation_mark(char: str) -> bool:
return char == "'" or char == '"'

def string(self):
def string(self) -> Token:
self.next() # skip opening quotation mark
result = ''
while not self._is_quotation_mark(self.current_char):
while self.current_char is not None and \
not self._is_quotation_mark(self.current_char):
result += self.current_char
self.next()
self.next() # omit closing quote
return Token(tok.STRING, result)

def number(self):
def number(self) -> Token:
result = str(self._integer())
if self.current_char == '.':
self.next()
Expand All @@ -102,36 +106,36 @@ def number(self):
else:
return Token(tok.INTEGER, int(result))

def _integer(self):
def _integer(self) -> int:
result = ''
while self.current_char is not None and \
self.current_char.isdigit():
result += self.current_char
self.next()
return int(result)

def skip_n_chars(self, n):
def skip_n_chars(self, n: int) -> None:
for i in range(n):
self.next()

def is_token_equal(self, expected):
def is_token_equal(self, expected: str) -> bool:
return expected == reduce(
lambda actual, _: actual + str(self.peek()),
range(len(expected)-1),
self.current_char,
str(self.current_char),
)

def get_next_token(self):
def get_next_token(self) -> Token:
"""
Lexical analyzer (tokenizer). Breaks sentence apart into tokens
"""
regex_map = OrderedDict((
regex_map: Dict[str, Callable[[], Token]] = OrderedDict((
(r'("|\')', self.string),
(r'[_a-zA-Z]', self.id),
(r'\d', self.number),
))

tokens_map = OrderedDict((
tokens_map: Dict[str, Token] = OrderedDict((
('==', Token(tok.EQ, '==')),
('!=', Token(tok.NE, '!=')),
('<=', Token(tok.LTE, '<=')),
Expand Down
Loading

0 comments on commit ba828af

Please sign in to comment.