Skip to content

Commit

Permalink
snytax error support
Browse files Browse the repository at this point in the history
  • Loading branch information
bansheerubber committed Sep 1, 2020
1 parent 03b17f4 commit 4f1e651
Show file tree
Hide file tree
Showing 38 changed files with 157 additions and 121 deletions.
4 changes: 2 additions & 2 deletions expressions/argument_expression.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from expression import Expression

class ArgumentExpression(Expression):
def __init__(self, expressions=[]):
super().__init__(expressions=expressions)
def __init__(self, expressions=[], tokenizer=None, current_line_index=None, current_index=None, current_file_name=None):
super().__init__(expressions=expressions, tokenizer=tokenizer, current_line_index=current_line_index, current_index=current_index, current_file_name=current_file_name)

def __str__(self):
return f"ArgumentExpression({self.expressions})"
Expand Down
8 changes: 4 additions & 4 deletions expressions/array_access_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,16 @@
from regex import closing_bracket_token, semicolon_token

class ArrayAccessExpression(Expression):
def __init__(self, symbol):
super().__init__()
def __init__(self, symbol, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.symbol = symbol
self.argument_expressions = []
self.parent = None
self.has_arguments = True

def convert_expressions_to_arguments(self):
if len(self.expressions) > 0:
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions))
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions, current_line_index=self.current_line_index, current_index=self.current_index, current_file_name=self.current_file_name))
self.expressions = []

def __str__(self):
Expand All @@ -35,7 +35,7 @@ def to_script(self):
return f"{self.symbol.to_script()}[{re.sub(r',$', '', value.strip())}]{self.handle_semicolon()}"

def read_expression(tokenizer):
expression = ArrayAccessExpression(tokenizer.get_symbol(tokenizer.buffer))
expression = ArrayAccessExpression(tokenizer.get_symbol(tokenizer.buffer), tokenizer=tokenizer)
tokenizer.buffer = ""
tokenizer.tokenize(stop_ats=[closing_bracket_token], give_back_stop_ats=[semicolon_token], tree=expression)
expression.convert_expressions_to_arguments()
Expand Down
6 changes: 3 additions & 3 deletions expressions/break_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from regex import valid_break

class BreakExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)

def __str__(self):
return "BreakExpression()"
Expand All @@ -16,6 +16,6 @@ def to_script(self):

def read_expression(tokenizer, tree):
tokenizer.file.give_character_back()
return BreakExpression()
return BreakExpression(tokenizer=tokenizer)

Expression.add_keyword_regex(valid_break, BreakExpression)
6 changes: 3 additions & 3 deletions expressions/case_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from regex import closing_curly_bracket_token, colon_token, valid_case, valid_default

class CaseExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.conditional_expressions = []
self.is_code_block = True

Expand Down Expand Up @@ -42,7 +42,7 @@ def to_script(self):
return full_output

def read_expression(tokenizer, tree):
expression = CaseExpression()
expression = CaseExpression(tokenizer=tokenizer)
tokenizer.file.give_character_back()
tokenizer.tokenize(stop_ats=[colon_token], tree=expression)
expression.convert_expressions_to_conditionals()
Expand Down
6 changes: 3 additions & 3 deletions expressions/chaining_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from regex import chaining_token, closing_bracket_token, closing_parenthesis_token, operator_token_without_concatenation, semicolon_token, template_literal_token

class ChainingExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.parent = None

def __str__(self):
Expand All @@ -27,7 +27,7 @@ def read_expression(tokenizer, tree, inheritable_give_back_stop_at):
first_expression = tokenizer.get_symbol(tokenizer.buffer)
tokenizer.buffer = ""

chaining_expression = ChainingExpression()
chaining_expression = ChainingExpression(tokenizer=tokenizer)
tokenizer.add_expression(chaining_expression, first_expression)
tokenizer.file.give_character_back()
while tokenizer.file.read_character() == ".":
Expand Down
6 changes: 3 additions & 3 deletions expressions/conditional_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from regex import closing_curly_bracket_token, closing_parenthesis_token, opening_curly_bracket_token, semicolon_token, valid_conditional

class ConditionalExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.conditional_expressions = []
self.is_code_block = True
self.type = ""
Expand Down Expand Up @@ -48,7 +48,7 @@ def to_script(self):
return full_output

def read_expression(tokenizer, tree):
expression = ConditionalExpression()
expression = ConditionalExpression(tokenizer=tokenizer)
tokenizer.file.give_character_back()
if tokenizer.buffer == "else":
tokenizer.buffer = tokenizer.buffer + " " + tokenizer.file.read_character() + tokenizer.file.read_character()
Expand Down
6 changes: 3 additions & 3 deletions expressions/continue_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from regex import valid_continue

class ContinueExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)

def __str__(self):
return "ContinueExpression()"
Expand All @@ -16,6 +16,6 @@ def to_script(self):

def read_expression(tokenizer, tree):
tokenizer.file.give_character_back()
return ContinueExpression()
return ContinueExpression(tokenizer=tokenizer)

Expression.add_keyword_regex(valid_continue, ContinueExpression)
8 changes: 4 additions & 4 deletions expressions/datablock_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from regex import closing_curly_bracket_token, closing_parenthesis_token, colon_token, opening_curly_bracket_token, opening_parenthesis_token, valid_datablock

class DatablockExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.class_symbol = None
self.name_symbol = None
self.is_code_block = True
Expand Down Expand Up @@ -51,7 +51,7 @@ def to_script(self):
return full_output

def read_expression(tokenizer, tree):
expression = DatablockExpression()
expression = DatablockExpression(tokenizer=tokenizer)
tokenizer.file.give_character_back()

tokenizer.tokenize(stop_ats=[opening_parenthesis_token], tree=expression)
Expand All @@ -60,7 +60,7 @@ def read_expression(tokenizer, tree):
tokenizer.tokenize(stop_ats=[closing_parenthesis_token], give_back_stop_ats=[colon_token], tree=expression)

if tokenizer.file.read_character() == ":":
inheritance_expression = InheritanceExpression()
inheritance_expression = InheritanceExpression(tokenizer=tokenizer)
inheritance_expression.child_class = expression.expressions[0]
tokenizer.tokenize(stop_ats=[closing_parenthesis_token], tree=inheritance_expression)
inheritance_expression.convert_expression_to_super_class()
Expand Down
6 changes: 3 additions & 3 deletions expressions/default_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from regex import closing_curly_bracket_token, valid_case, valid_default

class DefaultExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.is_code_block = True

def __str__(self):
Expand Down Expand Up @@ -33,7 +33,7 @@ def to_script(self):
return full_output

def read_expression(tokenizer, tree):
expression = DefaultExpression()
expression = DefaultExpression(tokenizer=tokenizer)
# read up until next case, next default, or }
tokenizer.tokenize(give_back_stop_ats=[closing_curly_bracket_token], buffer_give_back_stop_at=[valid_case, valid_default], tree=expression)

Expand Down
13 changes: 12 additions & 1 deletion expressions/expression.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
keyword_regexes = {}

class Expression:
def __init__(self, expressions=None):
def __init__(self, expressions=None, tokenizer=None, no_errors=False, current_line_index=None, current_index=None, current_file_name=None):
if expressions == None:
self.expressions = []
else:
Expand All @@ -12,6 +12,17 @@ def __init__(self, expressions=None):
self.is_chainable = False
self.has_arguments = False
self.no_keywords_in_code_block = False

if current_line_index != None and current_index != None and current_file_name != None:
self.current_line_index = current_line_index
self.current_index = current_index
self.current_file_name = current_file_name
elif tokenizer != None and no_errors == False:
self.current_line_index = tokenizer.file.current_line_index
self.current_index = tokenizer.file.current_index
self.current_file_name = tokenizer.file.filename
elif no_errors == False:
raise Exception(f"Expected tokenizer argument for expression {type(self)}")

def handle_semicolon(self):
if self.parent != None and self.parent.is_code_block and self in self.parent.expressions:
Expand Down
6 changes: 3 additions & 3 deletions expressions/for_loop_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from regex import closing_curly_bracket_token, closing_parenthesis_token, opening_curly_bracket_token, semicolon_token, valid_for

class ForLoopExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.initiation_expressions = []
self.conditional_expressions = []
self.increment_expressions = []
Expand Down Expand Up @@ -63,7 +63,7 @@ def to_script(self):
return full_output

def read_expression(tokenizer, tree):
expression = ForLoopExpression()
expression = ForLoopExpression(tokenizer=tokenizer)

tokenizer.tokenize(stop_ats=[semicolon_token], tree=expression)
expression.move_initiation_expressions()
Expand Down
8 changes: 4 additions & 4 deletions expressions/function_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
import re

class FunctionExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.name_symbol = None
self.argument_expressions = []
self.parent = None
Expand All @@ -19,7 +19,7 @@ def convert_expression_to_name(self):

def convert_expressions_to_arguments(self):
if len(self.expressions) > 0:
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions))
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions, current_line_index=self.current_line_index, current_index=self.current_index, current_file_name=self.current_file_name))
self.expressions = []

def __str__(self):
Expand Down Expand Up @@ -54,7 +54,7 @@ def to_script(self):
return full_output

def read_expression(tokenizer, tree):
expression = FunctionExpression()
expression = FunctionExpression(tokenizer=tokenizer)

tokenizer.file.give_character_back()
tokenizer.tokenize(stop_ats=[opening_parenthesis_token], inheritable_give_back_stop_at=[opening_parenthesis_token], tree=expression)
Expand Down
4 changes: 2 additions & 2 deletions expressions/inheritance_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from expression import Expression

class InheritanceExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.child_class = None
self.super_class = None

Expand Down
8 changes: 4 additions & 4 deletions expressions/method_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from regex import closing_parenthesis_token, semicolon_token

class MethodExpression(Expression):
def __init__(self, method_symbol):
super().__init__()
def __init__(self, method_symbol, tokenizer=None, current_line_index=None, current_index=None, current_file_name=None):
super().__init__(tokenizer=tokenizer, current_line_index=current_line_index, current_index=current_index, current_file_name=current_file_name)
self.method_symbol = method_symbol
self.argument_expressions = []
self.parent = None
Expand All @@ -15,7 +15,7 @@ def __init__(self, method_symbol):

def convert_expressions_to_arguments(self):
if len(self.expressions) > 0:
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions))
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions, current_line_index=self.current_line_index, current_index=self.current_index, current_file_name=self.current_file_name))
self.expressions = []

def __str__(self):
Expand All @@ -36,7 +36,7 @@ def to_script(self):
return f"{self.method_symbol.to_script()}({re.sub(r',$', '', value.strip())}){self.handle_semicolon()}"

def read_expression(tokenizer):
expression = MethodExpression(tokenizer.get_symbol(tokenizer.buffer))
expression = MethodExpression(tokenizer.get_symbol(tokenizer.buffer), tokenizer=tokenizer)
tokenizer.buffer = ""
tokenizer.tokenize(stop_ats=[closing_parenthesis_token], give_back_stop_ats=[semicolon_token], tree=expression)
expression.convert_expressions_to_arguments()
Expand Down
6 changes: 3 additions & 3 deletions expressions/namespace_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
from regex import chaining_token, closing_bracket_token, closing_parenthesis_token, namespace_token, operator_token_without_concatenation, semicolon_token, template_literal_token

class NamespaceExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.parent = None
self.is_chainable = True

Expand All @@ -21,7 +21,7 @@ def to_script(self):
return output[0:-2] + self.handle_semicolon()

def read_expression(tokenizer, inheritable_give_back_stop_at):
namespace_expression = NamespaceExpression()
namespace_expression = NamespaceExpression(tokenizer=tokenizer)
tokenizer.add_expression(namespace_expression, tokenizer.get_symbol(tokenizer.buffer))
tokenizer.file.give_character_back()
while tokenizer.file.read_character() == ":" and tokenizer.file.read_character() == ":":
Expand Down
8 changes: 4 additions & 4 deletions expressions/new_object_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from regex import closing_curly_bracket_token, closing_parenthesis_token, opening_curly_bracket_token, opening_parenthesis_token, semicolon_token,valid_new

class NewObjectExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.class_expressions = None
self.argument_expressions = []
self.is_code_block = True
Expand All @@ -18,7 +18,7 @@ def convert_expressions_to_class(self):

def convert_expressions_to_arguments(self):
if len(self.expressions) > 0:
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions))
self.argument_expressions.append(ArgumentExpression(expressions=self.expressions, current_line_index=self.current_line_index, current_index=self.current_index, current_file_name=self.current_file_name))
self.expressions = []

def __str__(self):
Expand Down Expand Up @@ -58,7 +58,7 @@ def to_script(self):
return full_output + self.handle_semicolon()

def read_expression(tokenizer, tree):
expression = NewObjectExpression()
expression = NewObjectExpression(tokenizer=tokenizer)
tokenizer.file.give_character_back()

tokenizer.tokenize(stop_ats=[opening_parenthesis_token], tree=expression)
Expand Down
6 changes: 3 additions & 3 deletions expressions/operator_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from regex import modulus_next_character_token, operator_token, part_of_operator, text_operators, valid_operator

class OperatorExpression(Expression):
def __init__(self, operator):
super().__init__()
def __init__(self, operator, tokenizer=None, no_errors=False):
super().__init__(tokenizer=tokenizer, no_errors=no_errors)
self.operator = operator
self.parent = None

Expand Down Expand Up @@ -62,7 +62,7 @@ def read_expression(tokenizer):
else:
tokenizer.file.give_character_back()

return OperatorExpression(saved_operator)
return OperatorExpression(saved_operator, tokenizer=tokenizer)
else:
for i in range(0, operator_ban_index + 1):
tokenizer.file.give_character_back()
Expand Down
6 changes: 3 additions & 3 deletions expressions/package_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
from regex import closing_curly_bracket_token, opening_curly_bracket_token, valid_package

class PackageExpression(Expression):
def __init__(self):
super().__init__()
def __init__(self, tokenizer=None):
super().__init__(tokenizer=tokenizer)
self.name_symbol = None
self.is_code_block = True

Expand Down Expand Up @@ -40,7 +40,7 @@ def to_script(self):
return full_output

def read_expression(tokenizer, tree):
expression = PackageExpression()
expression = PackageExpression(tokenizer=tokenizer)

tokenizer.file.give_character_back()

Expand Down
Loading

0 comments on commit 4f1e651

Please sign in to comment.