Berry animation pull lexer and optimizations (#23969)
This commit is contained in:
parent
a4b39af066
commit
ae693ba777
@ -683,7 +683,7 @@ stack traceback:
|
||||
### Compilation Output
|
||||
|
||||
```
|
||||
dsl_compilation_error: Line 38: Transpilation failed: Line 12: Template body transpilation failed: Line 12: Expression 'animation.strip_length(engine)' cannot be used in computed expressions. This creates a new instance at each evaluation. Use either:
|
||||
dsl_compilation_error: Line 12: Transpilation failed: Line 12: Template body transpilation failed: Line 12: Expression 'animation.strip_length(engine)' cannot be used in computed expressions. This creates a new instance at each evaluation. Use either:
|
||||
set var_name = animation.strip_length(engine)() # Single function call
|
||||
set computed = (existing_var + 1) / 2 # Computation with existing values
|
||||
stack traceback:
|
||||
@ -1011,7 +1011,7 @@ SUCCESS
|
||||
### Compilation Output
|
||||
|
||||
```
|
||||
dsl_compilation_error: Line 29: Transpilation failed: Line 9: Template body transpilation failed: Line 9: Unknown function or identifier 'abs2'. Make sure it's defined before use.
|
||||
dsl_compilation_error: Line 9: Transpilation failed: Line 9: Template body transpilation failed: Line 9: Unknown function or identifier 'abs2'. Make sure it's defined before use.
|
||||
stack traceback:
|
||||
<unknown source>: in function `error`
|
||||
<unknown source>: in function `transpile`
|
||||
|
||||
@ -80,17 +80,6 @@ f.close()
|
||||
animation_dsl.load_file("my_animation.dsl")
|
||||
```
|
||||
|
||||
### Runtime Management
|
||||
|
||||
#### `animation_dsl.create_runtime()`
|
||||
Creates a DSL runtime instance for advanced control.
|
||||
|
||||
```berry
|
||||
var runtime = animation_dsl.create_runtime()
|
||||
runtime.load_dsl(dsl_source)
|
||||
runtime.execute()
|
||||
```
|
||||
|
||||
## DSL Language Overview
|
||||
|
||||
The Animation DSL uses a declarative syntax with named parameters. All animations are created with an engine-first pattern and parameters are set individually for maximum flexibility.
|
||||
@ -696,17 +685,6 @@ The DSL transpiler also generates **warnings** that don't prevent compilation bu
|
||||
var performance_critical_anim = animation.create_optimized_animation()
|
||||
```
|
||||
|
||||
3. **Minimize DSL recompilation**:
|
||||
```berry
|
||||
# Good: Compile once
|
||||
var runtime = animation_dsl.create_runtime()
|
||||
runtime.load_dsl(source)
|
||||
runtime.execute()
|
||||
|
||||
# Avoid: Recompiling same DSL repeatedly
|
||||
# animation_dsl.execute(same_source) # Don't do this in loops
|
||||
```
|
||||
|
||||
## Integration Examples
|
||||
|
||||
### With Tasmota Rules
|
||||
|
||||
@ -49,8 +49,6 @@ import "dsl/transpiler.be" as dsl_transpiler
|
||||
register_to_dsl(dsl_transpiler)
|
||||
import "dsl/symbol_table.be" as dsl_symbol_table
|
||||
register_to_dsl(dsl_symbol_table)
|
||||
import "dsl/runtime.be" as dsl_runtime
|
||||
register_to_dsl(dsl_runtime)
|
||||
import "dsl/named_colors.be" as dsl_named_colors
|
||||
register_to_dsl(dsl_named_colors)
|
||||
|
||||
@ -100,16 +98,6 @@ def load_file(filename)
|
||||
end
|
||||
animation_dsl.load_file = load_file
|
||||
|
||||
# Create a DSL runtime instance
|
||||
#
|
||||
# @return DSLRuntime - New runtime instance
|
||||
def create_runtime(strip, debug_mode)
|
||||
import animation_dsl
|
||||
var engine = animation.create_engine(strip)
|
||||
return animation_dsl.DSLRuntime(engine, debug_mode)
|
||||
end
|
||||
animation_dsl.create_runtime = create_runtime
|
||||
|
||||
# Compile .anim file to .be file
|
||||
# Takes a filename with .anim suffix and compiles to same prefix with .be suffix
|
||||
#
|
||||
|
||||
@ -1,19 +1,20 @@
|
||||
# DSL Lexer (Tokenizer) for Animation DSL
|
||||
# Converts DSL source code into a stream of tokens for the single-pass transpiler
|
||||
# Pull-Mode Lexer v2 for Animation DSL
|
||||
# Combines pull-mode interface with original lexer.be implementation
|
||||
# Reuses most of the code from lexer.be while providing pull-based token access
|
||||
|
||||
# Import token functions and Token class
|
||||
import "dsl/token.be" as token_module
|
||||
var Token = token_module["Token"]
|
||||
|
||||
#@ solidify:DSLLexer,weak
|
||||
class DSLLexer
|
||||
#@ solidify:Lexer,weak
|
||||
class Lexer
|
||||
var source # String - DSL source code
|
||||
var position # Integer - current character position
|
||||
var line # Integer - current line number (1-based)
|
||||
var column # Integer - current column number (1-based)
|
||||
var tokens # List - generated tokens
|
||||
var token_position # Integer - current token position (for compatibility)
|
||||
|
||||
# Initialize lexer with source code
|
||||
# Initialize pull lexer with source code
|
||||
#
|
||||
# @param source: string - DSL source code to tokenize
|
||||
def init(source)
|
||||
@ -21,64 +22,262 @@ class DSLLexer
|
||||
self.position = 0
|
||||
self.line = 1
|
||||
self.column = 1
|
||||
self.tokens = []
|
||||
self.token_position = 0
|
||||
end
|
||||
|
||||
# Tokenize the entire source code
|
||||
# Pull the next token from the stream
|
||||
# This is the main pull-mode interface - generates tokens on demand
|
||||
#
|
||||
# @return list - Array of Token objects
|
||||
def tokenize()
|
||||
self.tokens = []
|
||||
self.position = 0
|
||||
self.line = 1
|
||||
self.column = 1
|
||||
|
||||
# @return Token - Next token, or nil if at end
|
||||
def next_token()
|
||||
# Skip whitespace and comments until we find a meaningful token or reach end
|
||||
while !self.at_end()
|
||||
self.scan_token()
|
||||
end
|
||||
|
||||
# Add EOF token
|
||||
self.add_token(38 #-animation_dsl.Token.EOF-#, "", 0)
|
||||
|
||||
return self.tokens
|
||||
end
|
||||
|
||||
# Scan and create the next token
|
||||
def scan_token()
|
||||
var start_column = self.column
|
||||
var ch = self.advance()
|
||||
|
||||
if ch == ' ' || ch == '\t' || ch == '\r'
|
||||
# Skip whitespace (but not newlines - they can be significant)
|
||||
return
|
||||
continue
|
||||
elif ch == '\n'
|
||||
self.add_token(35 #-animation_dsl.Token.NEWLINE-#, "\n", 1)
|
||||
var token = self.create_token(35 #-animation_dsl.Token.NEWLINE-#, "\n", 1)
|
||||
self.line += 1
|
||||
self.column = 1
|
||||
return
|
||||
self.token_position += 1
|
||||
return token
|
||||
elif ch == '#'
|
||||
self.scan_comment()
|
||||
var token = self.scan_comment()
|
||||
self.token_position += 1
|
||||
return token
|
||||
elif ch == '0' && self.peek() == 'x'
|
||||
self.scan_hex_color_0x()
|
||||
var token = self.scan_hex_color_0x()
|
||||
self.token_position += 1
|
||||
return token
|
||||
elif self.is_alpha(ch) || ch == '_'
|
||||
self.scan_identifier_or_keyword()
|
||||
var token = self.scan_identifier_or_keyword()
|
||||
self.token_position += 1
|
||||
return token
|
||||
elif self.is_digit(ch)
|
||||
self.scan_number()
|
||||
var token = self.scan_number()
|
||||
self.token_position += 1
|
||||
return token
|
||||
elif ch == '"' || ch == "'"
|
||||
# Check for triple quotes
|
||||
if (ch == '"' && self.peek() == '"' && self.peek_ahead(1) == '"') ||
|
||||
(ch == "'" && self.peek() == "'" && self.peek_ahead(1) == "'")
|
||||
self.scan_triple_quoted_string(ch)
|
||||
if (ch == '"' && self.peek() == '"' && self.peek_char_ahead(1) == '"') ||
|
||||
(ch == "'" && self.peek() == "'" && self.peek_char_ahead(1) == "'")
|
||||
var token = self.scan_triple_quoted_string(ch)
|
||||
self.token_position += 1
|
||||
return token
|
||||
else
|
||||
self.scan_string(ch)
|
||||
var token = self.scan_string(ch)
|
||||
self.token_position += 1
|
||||
return token
|
||||
end
|
||||
elif ch == '$'
|
||||
self.scan_variable_reference()
|
||||
var token = self.scan_variable_reference()
|
||||
self.token_position += 1
|
||||
return token
|
||||
else
|
||||
self.scan_operator_or_delimiter(ch)
|
||||
var token = self.scan_operator_or_delimiter(ch)
|
||||
self.token_position += 1
|
||||
return token
|
||||
end
|
||||
end
|
||||
|
||||
# Reached end of source
|
||||
return nil
|
||||
end
|
||||
|
||||
# Peek at the next token without consuming it
|
||||
# Uses position saving/restoring to implement peek
|
||||
#
|
||||
# @return Token - Next token, or nil if at end
|
||||
def peek_token()
|
||||
# Save current state
|
||||
var saved_position = self.position
|
||||
var saved_line = self.line
|
||||
var saved_column = self.column
|
||||
var saved_token_position = self.token_position
|
||||
|
||||
# Get next token
|
||||
var token = self.next_token()
|
||||
if (token != nil)
|
||||
# We haven't reached the end of the file
|
||||
# Restore state
|
||||
self.position = saved_position
|
||||
self.line = saved_line
|
||||
self.column = saved_column
|
||||
self.token_position = saved_token_position
|
||||
end
|
||||
|
||||
return token
|
||||
end
|
||||
|
||||
# Peek ahead by n tokens without consuming them
|
||||
# Note: This is less efficient than the array-based version but maintains simplicity
|
||||
#
|
||||
# @param n: int - Number of tokens to look ahead (1-based)
|
||||
# @return Token - Token at position + n, or nil if beyond end
|
||||
def peek_ahead(n)
|
||||
if n <= 0 return nil end
|
||||
|
||||
# Save current state
|
||||
var saved_position = self.position
|
||||
var saved_line = self.line
|
||||
var saved_column = self.column
|
||||
var saved_token_position = self.token_position
|
||||
|
||||
# Advance n tokens
|
||||
var token = nil
|
||||
for i : 1..n
|
||||
token = self.next_token()
|
||||
if token == nil break end
|
||||
end
|
||||
|
||||
# Restore state
|
||||
self.position = saved_position
|
||||
self.line = saved_line
|
||||
self.column = saved_column
|
||||
self.token_position = saved_token_position
|
||||
|
||||
return token
|
||||
end
|
||||
|
||||
# Check if we're at the end of the source
|
||||
#
|
||||
# @return bool - True if no more characters available
|
||||
def at_end()
|
||||
return self.position >= size(self.source)
|
||||
end
|
||||
|
||||
# Reset to beginning of source
|
||||
def reset()
|
||||
self.position = 0
|
||||
self.line = 1
|
||||
self.column = 1
|
||||
self.token_position = 0
|
||||
end
|
||||
|
||||
|
||||
# Get current position in token stream (for compatibility with array-based version)
|
||||
#
|
||||
# @return int - Current token position
|
||||
def get_position()
|
||||
return self.token_position
|
||||
end
|
||||
|
||||
# Set position in token stream (for compatibility with array-based version)
|
||||
# Note: This is a simplified implementation that resets to beginning and advances
|
||||
#
|
||||
# @param pos: int - New token position
|
||||
def set_position(pos)
|
||||
if pos < 0 return end
|
||||
|
||||
# Save current state in case we need to restore it
|
||||
var saved_position = self.position
|
||||
var saved_line = self.line
|
||||
var saved_column = self.column
|
||||
var saved_token_position = self.token_position
|
||||
|
||||
# Reset to beginning
|
||||
self.position = 0
|
||||
self.line = 1
|
||||
self.column = 1
|
||||
self.token_position = 0
|
||||
|
||||
# Advance to desired token position
|
||||
while self.token_position < pos && !self.at_end()
|
||||
self.next_token()
|
||||
end
|
||||
|
||||
# If we didn't reach the desired position, it was invalid - restore state
|
||||
if self.token_position != pos
|
||||
self.position = saved_position
|
||||
self.line = saved_line
|
||||
self.column = saved_column
|
||||
self.token_position = saved_token_position
|
||||
end
|
||||
end
|
||||
|
||||
# Create a sub-lexer (for compatibility with array-based version)
|
||||
# Note: This converts token positions to character positions
|
||||
#
|
||||
# @param start_token_pos: int - Starting token position
|
||||
# @param end_token_pos: int - Ending token position (exclusive)
|
||||
# @return Lexer - New pull lexer with subset of source
|
||||
def create_sub_lexer(start_token_pos, end_token_pos)
|
||||
import animation_dsl
|
||||
# Check for invalid ranges
|
||||
if start_token_pos < 0 || end_token_pos <= start_token_pos
|
||||
# Invalid range - return empty sub-lexer
|
||||
return animation_dsl.create_lexer("")
|
||||
end
|
||||
|
||||
# Save current state
|
||||
var saved_position = self.position
|
||||
var saved_line = self.line
|
||||
var saved_column = self.column
|
||||
var saved_token_position = self.token_position
|
||||
|
||||
# Reset to beginning and find character positions for token positions
|
||||
self.position = 0
|
||||
self.line = 1
|
||||
self.column = 1
|
||||
self.token_position = 0
|
||||
|
||||
var start_char_pos = 0
|
||||
var end_char_pos = size(self.source)
|
||||
var found_start = false
|
||||
var found_end = false
|
||||
|
||||
# Find start position
|
||||
while self.token_position < start_token_pos && !self.at_end()
|
||||
start_char_pos = self.position
|
||||
self.next_token()
|
||||
end
|
||||
if self.token_position == start_token_pos
|
||||
start_char_pos = self.position
|
||||
found_start = true
|
||||
end
|
||||
|
||||
# Find end position
|
||||
while self.token_position < end_token_pos && !self.at_end()
|
||||
self.next_token()
|
||||
end
|
||||
if self.token_position == end_token_pos
|
||||
end_char_pos = self.position
|
||||
found_end = true
|
||||
end
|
||||
|
||||
# Restore state
|
||||
self.position = saved_position
|
||||
self.line = saved_line
|
||||
self.column = saved_column
|
||||
self.token_position = saved_token_position
|
||||
|
||||
# Create sub-lexer with character range
|
||||
if !found_start
|
||||
return animation_dsl.create_lexer("")
|
||||
end
|
||||
|
||||
# Clamp end position
|
||||
if end_char_pos > size(self.source) end_char_pos = size(self.source) end
|
||||
if start_char_pos >= end_char_pos
|
||||
return animation_dsl.create_lexer("")
|
||||
end
|
||||
|
||||
# Extract subset of source
|
||||
var sub_source = self.source[start_char_pos..end_char_pos-1]
|
||||
var sub_lexer = animation_dsl.create_lexer(sub_source)
|
||||
# Ensure sub-lexer starts at position 0 (should already be 0 from init, but make sure)
|
||||
sub_lexer.position = 0
|
||||
sub_lexer.line = 1
|
||||
sub_lexer.column = 1
|
||||
sub_lexer.token_position = 0
|
||||
return sub_lexer
|
||||
end
|
||||
|
||||
# === TOKEN SCANNING METHODS (from original lexer.be) ===
|
||||
|
||||
# Scan comment (now unambiguous - only starts with #)
|
||||
def scan_comment()
|
||||
var start_pos = self.position - 1
|
||||
@ -90,7 +289,24 @@ class DSLLexer
|
||||
end
|
||||
|
||||
var comment_text = self.source[start_pos..self.position-1]
|
||||
self.add_token(37 #-animation_dsl.Token.COMMENT-#, comment_text, self.position - start_pos)
|
||||
|
||||
# Trim trailing whitespace from comment text manually
|
||||
# Find the last non-whitespace character in the comment content
|
||||
var trimmed_text = comment_text
|
||||
var end_pos = size(comment_text) - 1
|
||||
while end_pos >= 0 && (comment_text[end_pos] == ' ' || comment_text[end_pos] == '\t' || comment_text[end_pos] == '\r')
|
||||
end_pos -= 1
|
||||
end
|
||||
|
||||
# Extract trimmed comment text
|
||||
if end_pos >= 0
|
||||
trimmed_text = comment_text[0 .. end_pos]
|
||||
else
|
||||
trimmed_text = "#" # Keep at least the # character for empty comments
|
||||
end
|
||||
|
||||
# Use trimmed text but keep original position tracking
|
||||
return self.create_token(37 #-animation_dsl.Token.COMMENT-#, trimmed_text, self.position - start_pos)
|
||||
end
|
||||
|
||||
# Scan hex color (0xRRGGBB, 0xAARRGGBB)
|
||||
@ -112,7 +328,7 @@ class DSLLexer
|
||||
|
||||
# Validate hex color format - support 6 (RGB) or 8 (ARGB) digits
|
||||
if hex_digits == 6 || hex_digits == 8
|
||||
self.add_token(4 #-animation_dsl.Token.COLOR-#, color_value, size(color_value))
|
||||
return self.create_token(4 #-animation_dsl.Token.COLOR-#, color_value, size(color_value))
|
||||
else
|
||||
self.error("Invalid hex color format: " + color_value + " (expected 0xRRGGBB or 0xAARRGGBB)")
|
||||
end
|
||||
@ -141,7 +357,7 @@ class DSLLexer
|
||||
token_type = 1 #-animation_dsl.Token.IDENTIFIER-#
|
||||
end
|
||||
|
||||
self.add_token(token_type, text, size(text))
|
||||
return self.create_token(token_type, text, size(text))
|
||||
end
|
||||
|
||||
# Scan numeric literal (with optional time/percentage/multiplier suffix)
|
||||
@ -172,18 +388,18 @@ class DSLLexer
|
||||
# Check for time unit suffixes
|
||||
if self.check_time_suffix()
|
||||
var suffix = self.scan_time_suffix()
|
||||
self.add_token(5 #-animation_dsl.Token.TIME-#, number_text + suffix, size(number_text + suffix))
|
||||
return self.create_token(5 #-animation_dsl.Token.TIME-#, number_text + suffix, size(number_text + suffix))
|
||||
# Check for percentage suffix
|
||||
elif !self.at_end() && self.peek() == '%'
|
||||
self.advance()
|
||||
self.add_token(6 #-animation_dsl.Token.PERCENTAGE-#, number_text + "%", size(number_text) + 1)
|
||||
return self.create_token(6 #-animation_dsl.Token.PERCENTAGE-#, number_text + "%", size(number_text) + 1)
|
||||
# Check for multiplier suffix
|
||||
elif !self.at_end() && self.peek() == 'x'
|
||||
self.advance()
|
||||
self.add_token(7 #-animation_dsl.Token.MULTIPLIER-#, number_text + "x", size(number_text) + 1)
|
||||
return self.create_token(7 #-animation_dsl.Token.MULTIPLIER-#, number_text + "x", size(number_text) + 1)
|
||||
else
|
||||
# Plain number
|
||||
self.add_token(2 #-animation_dsl.Token.NUMBER-#, number_text, size(number_text))
|
||||
return self.create_token(2 #-animation_dsl.Token.NUMBER-#, number_text, size(number_text))
|
||||
end
|
||||
end
|
||||
|
||||
@ -266,7 +482,7 @@ class DSLLexer
|
||||
else
|
||||
# Consume closing quote
|
||||
self.advance()
|
||||
self.add_token(3 #-animation_dsl.Token.STRING-#, value, self.position - start_pos)
|
||||
return self.create_token(3 #-animation_dsl.Token.STRING-#, value, self.position - start_pos)
|
||||
end
|
||||
end
|
||||
|
||||
@ -286,8 +502,8 @@ class DSLLexer
|
||||
|
||||
# Check for closing triple quotes
|
||||
if ch == quote_char &&
|
||||
self.peek_ahead(1) == quote_char &&
|
||||
self.peek_ahead(2) == quote_char
|
||||
self.peek_char_ahead(1) == quote_char &&
|
||||
self.peek_char_ahead(2) == quote_char
|
||||
# Found closing triple quotes - consume them
|
||||
self.advance() # first closing quote
|
||||
self.advance() # second closing quote
|
||||
@ -308,7 +524,7 @@ class DSLLexer
|
||||
if self.at_end() && !(self.source[self.position-3..self.position-1] == quote_char + quote_char + quote_char)
|
||||
self.error("Unterminated triple-quoted string literal")
|
||||
else
|
||||
self.add_token(3 #-animation_dsl.Token.STRING-#, value, self.position - start_pos)
|
||||
return self.create_token(3 #-animation_dsl.Token.STRING-#, value, self.position - start_pos)
|
||||
end
|
||||
end
|
||||
|
||||
@ -327,7 +543,7 @@ class DSLLexer
|
||||
end
|
||||
|
||||
var var_ref = self.source[start_pos..self.position-1]
|
||||
self.add_token(36 #-animation_dsl.Token.VARIABLE_REF-#, var_ref, size(var_ref))
|
||||
return self.create_token(36 #-animation_dsl.Token.VARIABLE_REF-#, var_ref, size(var_ref))
|
||||
end
|
||||
|
||||
# Scan operator or delimiter
|
||||
@ -336,99 +552,89 @@ class DSLLexer
|
||||
|
||||
if ch == '='
|
||||
if self.match('=')
|
||||
self.add_token(15 #-animation_dsl.Token.EQUAL-#, "==", 2)
|
||||
return self.create_token(15 #-animation_dsl.Token.EQUAL-#, "==", 2)
|
||||
else
|
||||
self.add_token(8 #-animation_dsl.Token.ASSIGN-#, "=", 1)
|
||||
return self.create_token(8 #-animation_dsl.Token.ASSIGN-#, "=", 1)
|
||||
end
|
||||
elif ch == '!'
|
||||
if self.match('=')
|
||||
self.add_token(16 #-animation_dsl.Token.NOT_EQUAL-#, "!=", 2)
|
||||
return self.create_token(16 #-animation_dsl.Token.NOT_EQUAL-#, "!=", 2)
|
||||
else
|
||||
self.add_token(23 #-animation_dsl.Token.LOGICAL_NOT-#, "!", 1)
|
||||
return self.create_token(23 #-animation_dsl.Token.LOGICAL_NOT-#, "!", 1)
|
||||
end
|
||||
elif ch == '<'
|
||||
if self.match('=')
|
||||
self.add_token(18 #-animation_dsl.Token.LESS_EQUAL-#, "<=", 2)
|
||||
return self.create_token(18 #-animation_dsl.Token.LESS_EQUAL-#, "<=", 2)
|
||||
elif self.match('<')
|
||||
# Left shift - not used in DSL but included for completeness
|
||||
self.error("Left shift operator '<<' not supported in DSL")
|
||||
else
|
||||
self.add_token(17 #-animation_dsl.Token.LESS_THAN-#, "<", 1)
|
||||
return self.create_token(17 #-animation_dsl.Token.LESS_THAN-#, "<", 1)
|
||||
end
|
||||
elif ch == '>'
|
||||
if self.match('=')
|
||||
self.add_token(20 #-animation_dsl.Token.GREATER_EQUAL-#, ">=", 2)
|
||||
return self.create_token(20 #-animation_dsl.Token.GREATER_EQUAL-#, ">=", 2)
|
||||
elif self.match('>')
|
||||
# Right shift - not used in DSL but included for completeness
|
||||
self.error("Right shift operator '>>' not supported in DSL")
|
||||
else
|
||||
self.add_token(19 #-animation_dsl.Token.GREATER_THAN-#, ">", 1)
|
||||
return self.create_token(19 #-animation_dsl.Token.GREATER_THAN-#, ">", 1)
|
||||
end
|
||||
elif ch == '&'
|
||||
if self.match('&')
|
||||
self.add_token(21 #-animation_dsl.Token.LOGICAL_AND-#, "&&", 2)
|
||||
return self.create_token(21 #-animation_dsl.Token.LOGICAL_AND-#, "&&", 2)
|
||||
else
|
||||
self.error("Single '&' not supported in DSL")
|
||||
end
|
||||
elif ch == '|'
|
||||
if self.match('|')
|
||||
self.add_token(22 #-animation_dsl.Token.LOGICAL_OR-#, "||", 2)
|
||||
return self.create_token(22 #-animation_dsl.Token.LOGICAL_OR-#, "||", 2)
|
||||
else
|
||||
self.error("Single '|' not supported in DSL")
|
||||
end
|
||||
elif ch == '-'
|
||||
if self.match('>')
|
||||
self.add_token(34 #-animation_dsl.Token.ARROW-#, "->", 2)
|
||||
return self.create_token(34 #-animation_dsl.Token.ARROW-#, "->", 2)
|
||||
else
|
||||
self.add_token(10 #-animation_dsl.Token.MINUS-#, "-", 1)
|
||||
return self.create_token(10 #-animation_dsl.Token.MINUS-#, "-", 1)
|
||||
end
|
||||
elif ch == '+'
|
||||
self.add_token(9 #-animation_dsl.Token.PLUS-#, "+", 1)
|
||||
return self.create_token(9 #-animation_dsl.Token.PLUS-#, "+", 1)
|
||||
elif ch == '*'
|
||||
self.add_token(11 #-animation_dsl.Token.MULTIPLY-#, "*", 1)
|
||||
return self.create_token(11 #-animation_dsl.Token.MULTIPLY-#, "*", 1)
|
||||
elif ch == '/'
|
||||
self.add_token(12 #-animation_dsl.Token.DIVIDE-#, "/", 1)
|
||||
return self.create_token(12 #-animation_dsl.Token.DIVIDE-#, "/", 1)
|
||||
elif ch == '%'
|
||||
self.add_token(13 #-animation_dsl.Token.MODULO-#, "%", 1)
|
||||
return self.create_token(13 #-animation_dsl.Token.MODULO-#, "%", 1)
|
||||
elif ch == '^'
|
||||
self.add_token(14 #-animation_dsl.Token.POWER-#, "^", 1)
|
||||
return self.create_token(14 #-animation_dsl.Token.POWER-#, "^", 1)
|
||||
elif ch == '('
|
||||
self.add_token(24 #-animation_dsl.Token.LEFT_PAREN-#, "(", 1)
|
||||
return self.create_token(24 #-animation_dsl.Token.LEFT_PAREN-#, "(", 1)
|
||||
elif ch == ')'
|
||||
self.add_token(25 #-animation_dsl.Token.RIGHT_PAREN-#, ")", 1)
|
||||
return self.create_token(25 #-animation_dsl.Token.RIGHT_PAREN-#, ")", 1)
|
||||
elif ch == '{'
|
||||
self.add_token(26 #-animation_dsl.Token.LEFT_BRACE-#, "{", 1)
|
||||
return self.create_token(26 #-animation_dsl.Token.LEFT_BRACE-#, "{", 1)
|
||||
elif ch == '}'
|
||||
self.add_token(27 #-animation_dsl.Token.RIGHT_BRACE-#, "}", 1)
|
||||
return self.create_token(27 #-animation_dsl.Token.RIGHT_BRACE-#, "}", 1)
|
||||
elif ch == '['
|
||||
self.add_token(28 #-animation_dsl.Token.LEFT_BRACKET-#, "[", 1)
|
||||
return self.create_token(28 #-animation_dsl.Token.LEFT_BRACKET-#, "[", 1)
|
||||
elif ch == ']'
|
||||
self.add_token(29 #-animation_dsl.Token.RIGHT_BRACKET-#, "]", 1)
|
||||
return self.create_token(29 #-animation_dsl.Token.RIGHT_BRACKET-#, "]", 1)
|
||||
elif ch == ','
|
||||
self.add_token(30 #-animation_dsl.Token.COMMA-#, ",", 1)
|
||||
return self.create_token(30 #-animation_dsl.Token.COMMA-#, ",", 1)
|
||||
elif ch == ';'
|
||||
self.add_token(31 #-animation_dsl.Token.SEMICOLON-#, ";", 1)
|
||||
return self.create_token(31 #-animation_dsl.Token.SEMICOLON-#, ";", 1)
|
||||
elif ch == ':'
|
||||
self.add_token(32 #-animation_dsl.Token.COLON-#, ":", 1)
|
||||
return self.create_token(32 #-animation_dsl.Token.COLON-#, ":", 1)
|
||||
elif ch == '.'
|
||||
if self.match('.')
|
||||
# Range operator (..) - treat as two dots for now
|
||||
self.add_token(33 #-animation_dsl.Token.DOT-#, ".", 1)
|
||||
self.add_token(33 #-animation_dsl.Token.DOT-#, ".", 1)
|
||||
else
|
||||
self.add_token(33 #-animation_dsl.Token.DOT-#, ".", 1)
|
||||
end
|
||||
# For now, just handle single dots - range operators can be added later if needed
|
||||
return self.create_token(33 #-animation_dsl.Token.DOT-#, ".", 1)
|
||||
else
|
||||
self.error("Unexpected character: '" + ch + "'")
|
||||
end
|
||||
end
|
||||
|
||||
# Helper methods
|
||||
|
||||
# Check if at end of source
|
||||
def at_end()
|
||||
return self.position >= size(self.source)
|
||||
end
|
||||
# === HELPER METHODS (from original lexer.be) ===
|
||||
|
||||
# Advance position and return current character
|
||||
def advance()
|
||||
@ -450,16 +656,8 @@ class DSLLexer
|
||||
return self.source[self.position]
|
||||
end
|
||||
|
||||
# Peek at next character without advancing
|
||||
def peek_next()
|
||||
if self.position + 1 >= size(self.source)
|
||||
return ""
|
||||
end
|
||||
return self.source[self.position + 1]
|
||||
end
|
||||
|
||||
# Peek ahead by n characters without advancing
|
||||
def peek_ahead(n)
|
||||
def peek_char_ahead(n)
|
||||
if self.position + n >= size(self.source)
|
||||
return ""
|
||||
end
|
||||
@ -494,11 +692,10 @@ class DSLLexer
|
||||
return self.is_digit(ch) || (ch >= 'a' && ch <= 'f') || (ch >= 'A' && ch <= 'F')
|
||||
end
|
||||
|
||||
# Add token to tokens list
|
||||
def add_token(token_type, value, length)
|
||||
# Create token with proper position tracking
|
||||
def create_token(token_type, value, length)
|
||||
import animation_dsl
|
||||
var token = animation_dsl.Token(token_type, value, self.line, self.column - length, length)
|
||||
self.tokens.push(token)
|
||||
return animation_dsl.Token(token_type, value, self.line, self.column - length, length)
|
||||
end
|
||||
|
||||
# Raise lexical error immediately
|
||||
@ -506,40 +703,8 @@ class DSLLexer
|
||||
var error_msg = "Line " + str(self.line) + ":" + str(self.column) + ": " + message
|
||||
raise "lexical_error", error_msg
|
||||
end
|
||||
|
||||
# Reset lexer state for reuse
|
||||
def reset(new_source)
|
||||
self.source = new_source != nil ? new_source : ""
|
||||
self.position = 0
|
||||
self.line = 1
|
||||
self.column = 1
|
||||
self.tokens = []
|
||||
end
|
||||
|
||||
# Get current position info for debugging
|
||||
def get_position_info()
|
||||
return {
|
||||
"position": self.position,
|
||||
"line": self.line,
|
||||
"column": self.column,
|
||||
"at_end": self.at_end()
|
||||
}
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
# Utility function to tokenize DSL source code
|
||||
#
|
||||
# @param source: string - DSL source code
|
||||
# @return list - Array of Token objects
|
||||
def tokenize_dsl(source)
|
||||
import animation_dsl
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
return lexer.tokenize()
|
||||
end
|
||||
|
||||
return {
|
||||
"DSLLexer": DSLLexer,
|
||||
"tokenize_dsl": tokenize_dsl
|
||||
"create_lexer": Lexer
|
||||
}
|
||||
@ -2,61 +2,61 @@
|
||||
# Provides color name to ARGB value mappings for the DSL transpiler
|
||||
|
||||
# Static color mapping for named colors (helps with solidification)
|
||||
# Maps color names to ARGB hex values (0xAARRGGBB format)
|
||||
# Maps color names to ARGB integer values (0xAARRGGBB format)
|
||||
# All colors have full alpha (0xFF) except transparent
|
||||
var named_colors = {
|
||||
# Primary colors
|
||||
"red": "0xFFFF0000", # Pure red
|
||||
"green": "0xFF008000", # HTML/CSS standard green (darker, more readable)
|
||||
"blue": "0xFF0000FF", # Pure blue
|
||||
"red": 0xFFFF0000, # Pure red
|
||||
"green": 0xFF008000, # HTML/CSS standard green (darker, more readable)
|
||||
"blue": 0xFF0000FF, # Pure blue
|
||||
|
||||
# Achromatic colors
|
||||
"white": "0xFFFFFFFF", # Pure white
|
||||
"black": "0xFF000000", # Pure black
|
||||
"gray": "0xFF808080", # Medium gray
|
||||
"grey": "0xFF808080", # Alternative spelling
|
||||
"silver": "0xFFC0C0C0", # Light gray
|
||||
"white": 0xFFFFFFFF, # Pure white
|
||||
"black": 0xFF000000, # Pure black
|
||||
"gray": 0xFF808080, # Medium gray
|
||||
"grey": 0xFF808080, # Alternative spelling
|
||||
"silver": 0xFFC0C0C0, # Light gray
|
||||
|
||||
# Secondary colors
|
||||
"yellow": "0xFFFFFF00", # Pure yellow (red + green)
|
||||
"cyan": "0xFF00FFFF", # Pure cyan (green + blue)
|
||||
"magenta": "0xFFFF00FF", # Pure magenta (red + blue)
|
||||
"yellow": 0xFFFFFF00, # Pure yellow (red + green)
|
||||
"cyan": 0xFF00FFFF, # Pure cyan (green + blue)
|
||||
"magenta": 0xFFFF00FF, # Pure magenta (red + blue)
|
||||
|
||||
# Extended web colors
|
||||
"orange": "0xFFFFA500", # Orange
|
||||
"purple": "0xFF800080", # Purple (darker magenta)
|
||||
"pink": "0xFFFFC0CB", # Light pink
|
||||
"lime": "0xFF00FF00", # Pure green (HTML/CSS lime = full intensity)
|
||||
"navy": "0xFF000080", # Dark blue
|
||||
"olive": "0xFF808000", # Dark yellow-green
|
||||
"maroon": "0xFF800000", # Dark red
|
||||
"teal": "0xFF008080", # Dark cyan
|
||||
"aqua": "0xFF00FFFF", # Same as cyan
|
||||
"fuchsia": "0xFFFF00FF", # Same as magenta
|
||||
"orange": 0xFFFFA500, # Orange
|
||||
"purple": 0xFF800080, # Purple (darker magenta)
|
||||
"pink": 0xFFFFC0CB, # Light pink
|
||||
"lime": 0xFF00FF00, # Pure green (HTML/CSS lime = full intensity)
|
||||
"navy": 0xFF000080, # Dark blue
|
||||
"olive": 0xFF808000, # Dark yellow-green
|
||||
"maroon": 0xFF800000, # Dark red
|
||||
"teal": 0xFF008080, # Dark cyan
|
||||
"aqua": 0xFF00FFFF, # Same as cyan
|
||||
"fuchsia": 0xFFFF00FF, # Same as magenta
|
||||
|
||||
# Precious metals
|
||||
"gold": "0xFFFFD700", # Metallic gold
|
||||
"gold": 0xFFFFD700, # Metallic gold
|
||||
|
||||
# Natural colors
|
||||
"brown": "0xFFA52A2A", # Saddle brown
|
||||
"tan": "0xFFD2B48C", # Light brown/beige
|
||||
"beige": "0xFFF5F5DC", # Very light brown
|
||||
"ivory": "0xFFFFFFF0", # Off-white with yellow tint
|
||||
"snow": "0xFFFFFAFA", # Off-white with slight blue tint
|
||||
"brown": 0xFFA52A2A, # Saddle brown
|
||||
"tan": 0xFFD2B48C, # Light brown/beige
|
||||
"beige": 0xFFF5F5DC, # Very light brown
|
||||
"ivory": 0xFFFFFFF0, # Off-white with yellow tint
|
||||
"snow": 0xFFFFFAFA, # Off-white with slight blue tint
|
||||
|
||||
# Flower/nature colors
|
||||
"indigo": "0xFF4B0082", # Deep blue-purple
|
||||
"violet": "0xFFEE82EE", # Light purple
|
||||
"crimson": "0xFFDC143C", # Deep red
|
||||
"coral": "0xFFFF7F50", # Orange-pink
|
||||
"salmon": "0xFFFA8072", # Pink-orange
|
||||
"khaki": "0xFFF0E68C", # Pale yellow-brown
|
||||
"plum": "0xFFDDA0DD", # Light purple
|
||||
"orchid": "0xFFDA70D6", # Medium purple
|
||||
"turquoise": "0xFF40E0D0", # Blue-green
|
||||
"indigo": 0xFF4B0082, # Deep blue-purple
|
||||
"violet": 0xFFEE82EE, # Light purple
|
||||
"crimson": 0xFFDC143C, # Deep red
|
||||
"coral": 0xFFFF7F50, # Orange-pink
|
||||
"salmon": 0xFFFA8072, # Pink-orange
|
||||
"khaki": 0xFFF0E68C, # Pale yellow-brown
|
||||
"plum": 0xFFDDA0DD, # Light purple
|
||||
"orchid": 0xFFDA70D6, # Medium purple
|
||||
"turquoise": 0xFF40E0D0, # Blue-green
|
||||
|
||||
# Special
|
||||
"transparent": "0x00000000" # Fully transparent (alpha = 0)
|
||||
"transparent": 0x00000000 # Fully transparent (alpha = 0)
|
||||
}
|
||||
|
||||
return {"named_colors": named_colors}
|
||||
|
||||
@ -1,179 +0,0 @@
|
||||
# DSL Runtime Integration
|
||||
# Provides complete DSL execution lifecycle management
|
||||
|
||||
#@ solidify:DSLRuntime,weak
|
||||
class DSLRuntime
|
||||
var engine # Animation engine instance
|
||||
var active_source # Currently loaded DSL source
|
||||
var debug_mode # Enable debug output
|
||||
|
||||
def init(engine, debug_mode)
|
||||
self.engine = engine
|
||||
self.active_source = nil
|
||||
self.debug_mode = debug_mode != nil ? debug_mode : false
|
||||
end
|
||||
|
||||
# Load and execute DSL from string
|
||||
def load_dsl(source_code)
|
||||
import animation_dsl
|
||||
if source_code == nil || size(source_code) == 0
|
||||
if self.debug_mode
|
||||
print("DSL: Empty source code")
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
# Compile DSL with exception handling
|
||||
if self.debug_mode
|
||||
print("DSL: Compiling source...")
|
||||
end
|
||||
|
||||
try
|
||||
var berry_code = animation_dsl.compile(source_code)
|
||||
# Execute the compiled Berry code
|
||||
return self.execute_berry_code(berry_code, source_code)
|
||||
except "dsl_compilation_error" as e, msg
|
||||
if self.debug_mode
|
||||
print("DSL: Compilation failed - " + msg)
|
||||
end
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
# Load DSL from file
|
||||
def load_dsl_file(filename)
|
||||
try
|
||||
var file = open(filename, "r")
|
||||
if file == nil
|
||||
if self.debug_mode
|
||||
print(f"DSL: Cannot open file {filename}")
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
var source_code = file.read()
|
||||
file.close()
|
||||
|
||||
if self.debug_mode
|
||||
print(f"DSL: Loaded {size(source_code)} characters from {filename}")
|
||||
end
|
||||
|
||||
return self.load_dsl(source_code)
|
||||
|
||||
except .. as e, msg
|
||||
if self.debug_mode
|
||||
print(f"DSL: File loading error: {msg}")
|
||||
end
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
# Reload current DSL (useful for development)
|
||||
def reload_dsl()
|
||||
if self.active_source == nil
|
||||
if self.debug_mode
|
||||
print("DSL: No active DSL to reload")
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
if self.debug_mode
|
||||
print("DSL: Reloading current DSL...")
|
||||
end
|
||||
|
||||
# Stop current animations
|
||||
self.engine.stop()
|
||||
self.engine.clear()
|
||||
|
||||
# Reload with fresh compilation
|
||||
return self.load_dsl(self.active_source)
|
||||
end
|
||||
|
||||
# Get generated Berry code for inspection (debugging)
|
||||
def get_generated_code(source_code)
|
||||
import animation_dsl
|
||||
if source_code == nil
|
||||
source_code = self.active_source
|
||||
end
|
||||
|
||||
if source_code == nil
|
||||
return nil
|
||||
end
|
||||
|
||||
# Generate code with exception handling
|
||||
try
|
||||
return animation_dsl.compile(source_code)
|
||||
except "dsl_compilation_error" as e, msg
|
||||
if self.debug_mode
|
||||
print("DSL: Code generation failed - " + msg)
|
||||
end
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
# Execute Berry code with proper error handling
|
||||
def execute_berry_code(berry_code, source_code)
|
||||
try
|
||||
# Stop current animations before starting new ones
|
||||
self.engine.stop()
|
||||
self.engine.clear()
|
||||
|
||||
# Compile and execute the Berry code
|
||||
var compiled_func = compile(berry_code)
|
||||
if compiled_func == nil
|
||||
if self.debug_mode
|
||||
print("DSL: Berry compilation failed")
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
# Execute in controlled environment
|
||||
compiled_func()
|
||||
|
||||
# Store as active source
|
||||
self.active_source = source_code
|
||||
|
||||
if self.debug_mode
|
||||
print("DSL: Execution successful")
|
||||
end
|
||||
|
||||
return true
|
||||
|
||||
except .. as e, msg
|
||||
if self.debug_mode
|
||||
print(f"DSL: Execution error: {msg}")
|
||||
end
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
# Get current engine for external access
|
||||
def get_engine()
|
||||
return self.engine
|
||||
end
|
||||
|
||||
# Check if DSL is currently loaded
|
||||
def is_loaded()
|
||||
return self.active_source != nil
|
||||
end
|
||||
|
||||
# Get current DSL source
|
||||
def get_active_source()
|
||||
return self.active_source
|
||||
end
|
||||
end
|
||||
|
||||
# Factory function for easy creation
|
||||
def create_dsl_runtime(strip, debug_mode)
|
||||
import animation_dsl
|
||||
var engine = animation.create_engine(strip)
|
||||
return animation_dsl.DSLRuntime(engine, debug_mode)
|
||||
end
|
||||
|
||||
# Return module exports
|
||||
return {
|
||||
"DSLRuntime": DSLRuntime,
|
||||
"create_dsl_runtime": create_dsl_runtime
|
||||
}
|
||||
@ -467,7 +467,9 @@ class SymbolTable
|
||||
if entry != nil
|
||||
# For builtin color entries, return the actual color value directly
|
||||
if entry.is_builtin && entry.type == 11 #-animation_dsl._symbol_entry.TYPE_COLOR-#
|
||||
return animation_dsl.named_colors[name]
|
||||
var color_value = animation_dsl.named_colors[name]
|
||||
# Convert integer to hex string format for transpiler
|
||||
return f"0x{color_value:08X}"
|
||||
end
|
||||
return entry.get_reference()
|
||||
end
|
||||
@ -590,7 +592,9 @@ class SymbolTable
|
||||
import animation_dsl
|
||||
var entry = self.get(color_name) # This will trigger _detect_and_cache_symbol if needed
|
||||
if entry != nil && entry.is_builtin && entry.type == 11 #-animation_dsl._symbol_entry.TYPE_COLOR-#
|
||||
return animation_dsl.named_colors[color_name]
|
||||
var color_value = animation_dsl.named_colors[color_name]
|
||||
# Convert integer to hex string format for transpiler
|
||||
return f"0x{color_value:08X}"
|
||||
end
|
||||
return "0xFFFFFFFF" # Default fallback
|
||||
end
|
||||
|
||||
@ -4,16 +4,16 @@
|
||||
#@ solidify:Token,weak
|
||||
class Token
|
||||
# Basic token types
|
||||
static var KEYWORD = 0 # strip, color, animation, sequence, etc.
|
||||
static var IDENTIFIER = 1 # user-defined names
|
||||
static var NUMBER = 2 # 123, 3.14
|
||||
static var STRING = 3 # "hello", 'world'
|
||||
static var COLOR = 4 # #FF0000, rgb(255,0,0), hsv(240,100,100)
|
||||
static var TIME = 5 # 2s, 500ms, 1m, 2h
|
||||
static var PERCENTAGE = 6 # 50%, 100%
|
||||
static var MULTIPLIER = 7 # 2x, 0.5x
|
||||
# static var KEYWORD = 0 # strip, color, animation, sequence, etc.
|
||||
# static var IDENTIFIER = 1 # user-defined names
|
||||
# static var NUMBER = 2 # 123, 3.14
|
||||
# static var STRING = 3 # "hello", 'world'
|
||||
# static var COLOR = 4 # #FF0000, rgb(255,0,0), hsv(240,100,100)
|
||||
# static var TIME = 5 # 2s, 500ms, 1m, 2h
|
||||
# static var PERCENTAGE = 6 # 50%, 100%
|
||||
# static var MULTIPLIER = 7 # 2x, 0.5x
|
||||
|
||||
# Static arrays for better solidification (moved from inline arrays)
|
||||
# Human readable type name for each type value
|
||||
static var names = [
|
||||
"KEYWORD", "IDENTIFIER", "NUMBER", "STRING", "COLOR", "TIME", "PERCENTAGE", "MULTIPLIER",
|
||||
"ASSIGN", "PLUS", "MINUS", "MULTIPLY", "DIVIDE", "MODULO", "POWER",
|
||||
@ -21,7 +21,7 @@ class Token
|
||||
"LOGICAL_AND", "LOGICAL_OR", "LOGICAL_NOT",
|
||||
"LEFT_PAREN", "RIGHT_PAREN", "LEFT_BRACE", "RIGHT_BRACE", "LEFT_BRACKET", "RIGHT_BRACKET",
|
||||
"COMMA", "SEMICOLON", "COLON", "DOT", "ARROW",
|
||||
"NEWLINE", "VARIABLE_REF", "COMMENT", "EOF", "ERROR",
|
||||
"NEWLINE", "VARIABLE_REF", "COMMENT", "" #-ex-EOF-#, "ERROR",
|
||||
"EVENT_ON", "EVENT_INTERRUPT", "EVENT_RESUME", "EVENT_AFTER"
|
||||
]
|
||||
|
||||
@ -70,63 +70,55 @@ class Token
|
||||
"turquoise", "tan", "beige", "ivory", "snow", "transparent"
|
||||
]
|
||||
|
||||
# Operators
|
||||
static var ASSIGN = 8 # =
|
||||
static var PLUS = 9 # +
|
||||
static var MINUS = 10 # -
|
||||
static var MULTIPLY = 11 # *
|
||||
static var DIVIDE = 12 # /
|
||||
static var MODULO = 13 # %
|
||||
static var POWER = 14 # ^
|
||||
# # Operators
|
||||
# static var ASSIGN = 8 # =
|
||||
# static var PLUS = 9 # +
|
||||
# static var MINUS = 10 # -
|
||||
# static var MULTIPLY = 11 # *
|
||||
# static var DIVIDE = 12 # /
|
||||
# static var MODULO = 13 # %
|
||||
# static var POWER = 14 # ^
|
||||
|
||||
# Comparison operators
|
||||
static var EQUAL = 15 # ==
|
||||
static var NOT_EQUAL = 16 # !=
|
||||
static var LESS_THAN = 17 # <
|
||||
static var LESS_EQUAL = 18 # <=
|
||||
static var GREATER_THAN = 19 # >
|
||||
static var GREATER_EQUAL = 20 # >=
|
||||
# # Comparison operators
|
||||
# static var EQUAL = 15 # ==
|
||||
# static var NOT_EQUAL = 16 # !=
|
||||
# static var LESS_THAN = 17 # <
|
||||
# static var LESS_EQUAL = 18 # <=
|
||||
# static var GREATER_THAN = 19 # >
|
||||
# static var GREATER_EQUAL = 20 # >=
|
||||
|
||||
# Logical operators
|
||||
static var LOGICAL_AND = 21 # &&
|
||||
static var LOGICAL_OR = 22 # ||
|
||||
static var LOGICAL_NOT = 23 # !
|
||||
# # Logical operators
|
||||
# static var LOGICAL_AND = 21 # &&
|
||||
# static var LOGICAL_OR = 22 # ||
|
||||
# static var LOGICAL_NOT = 23 # !
|
||||
|
||||
# Delimiters
|
||||
static var LEFT_PAREN = 24 # (
|
||||
static var RIGHT_PAREN = 25 # )
|
||||
static var LEFT_BRACE = 26 # {
|
||||
static var RIGHT_BRACE = 27 # }
|
||||
static var LEFT_BRACKET = 28 # [
|
||||
static var RIGHT_BRACKET = 29 # ]
|
||||
# # Delimiters
|
||||
# static var LEFT_PAREN = 24 # (
|
||||
# static var RIGHT_PAREN = 25 # )
|
||||
# static var LEFT_BRACE = 26 # {
|
||||
# static var RIGHT_BRACE = 27 # }
|
||||
# static var LEFT_BRACKET = 28 # [
|
||||
# static var RIGHT_BRACKET = 29 # ]
|
||||
|
||||
# Separators
|
||||
static var COMMA = 30 # ,
|
||||
static var SEMICOLON = 31 # ;
|
||||
static var COLON = 32 # :
|
||||
static var DOT = 33 # .
|
||||
static var ARROW = 34 # ->
|
||||
# # Separators
|
||||
# static var COMMA = 30 # ,
|
||||
# static var SEMICOLON = 31 # ;
|
||||
# static var COLON = 32 # :
|
||||
# static var DOT = 33 # .
|
||||
# static var ARROW = 34 # ->
|
||||
|
||||
# Special tokens
|
||||
static var NEWLINE = 35 # \n (significant in some contexts)
|
||||
static var VARIABLE_REF = 36 # $identifier
|
||||
static var COMMENT = 37 # # comment text
|
||||
static var EOF = 38 # End of file
|
||||
static var ERROR = 39 # Error token for invalid input
|
||||
# # Special tokens
|
||||
# static var NEWLINE = 35 # \n (significant in some contexts)
|
||||
# static var VARIABLE_REF = 36 # $identifier
|
||||
# static var COMMENT = 37 # # comment text
|
||||
# # static var EOF = 38 # End of file (REMOVED - reserved number)
|
||||
# static var ERROR = 39 # Error token for invalid input
|
||||
|
||||
# Event-related tokens
|
||||
static var EVENT_ON = 40 # on (event handler keyword)
|
||||
static var EVENT_INTERRUPT = 41 # interrupt
|
||||
static var EVENT_RESUME = 42 # resume
|
||||
static var EVENT_AFTER = 43 # after (for resume timing)
|
||||
|
||||
# Convert token type to string for debugging
|
||||
static def to_string(token_type)
|
||||
if token_type >= 0 && token_type < size(_class.names)
|
||||
return _class.names[token_type]
|
||||
end
|
||||
return "UNKNOWN"
|
||||
end
|
||||
# # Event-related tokens
|
||||
# static var EVENT_ON = 40 # on (event handler keyword)
|
||||
# static var EVENT_INTERRUPT = 41 # interrupt
|
||||
# static var EVENT_RESUME = 42 # resume
|
||||
# static var EVENT_AFTER = 43 # after (for resume timing)
|
||||
|
||||
var type # int - the type of this token (Token.KEYWORD, Token.IDENTIFIER, etc.)
|
||||
var value # String - the actual text value of the token
|
||||
@ -149,96 +141,17 @@ class Token
|
||||
self.length = length != nil ? length : size(self.value)
|
||||
end
|
||||
|
||||
# Check if this token is of a specific type
|
||||
#
|
||||
# @param token_type: int - Token type to check against
|
||||
# @return bool - True if token matches the type
|
||||
def is_type(token_type)
|
||||
return self.type == token_type
|
||||
end
|
||||
|
||||
# Check if this token is a keyword with specific value
|
||||
#
|
||||
# @param keyword: string - Keyword to check for
|
||||
# @return bool - True if token is the specified keyword
|
||||
def is_keyword(keyword)
|
||||
return self.type == 0 #-self.KEYWORD-# && self.value == keyword
|
||||
end
|
||||
|
||||
# Check if this token is an identifier with specific value
|
||||
#
|
||||
# @param name: string - Identifier name to check for
|
||||
# @return bool - True if token is the specified identifier
|
||||
def is_identifier(name)
|
||||
return self.type == 1 #-self.IDENTIFIER-# && self.value == name
|
||||
end
|
||||
|
||||
# Check if this token is an operator
|
||||
#
|
||||
# @return bool - True if token is any operator type
|
||||
def is_operator()
|
||||
return self.type >= 8 #-self.ASSIGN-# && self.type <= 23 #-self.LOGICAL_NOT-#
|
||||
end
|
||||
|
||||
# Check if this token is a delimiter
|
||||
#
|
||||
# @return bool - True if token is any delimiter type
|
||||
def is_delimiter()
|
||||
return self.type >= 24 #-self.LEFT_PAREN-# && self.type <= 29 #-self.RIGHT_BRACKET-#
|
||||
end
|
||||
|
||||
# Check if this token is a separator
|
||||
#
|
||||
# @return bool - True if token is any separator type
|
||||
def is_separator()
|
||||
return self.type >= 30 #-self.COMMA-# && self.type <= 34 #-self.ARROW-#
|
||||
end
|
||||
|
||||
# Check if this token is a literal value
|
||||
#
|
||||
# @return bool - True if token represents a literal value
|
||||
def is_literal()
|
||||
return self.type == 2 #-self.NUMBER-# ||
|
||||
self.type == 3 #-self.STRING-# ||
|
||||
self.type == 4 #-self.COLOR-# ||
|
||||
self.type == 5 #-self.TIME-# ||
|
||||
self.type == 6 #-self.PERCENTAGE-# ||
|
||||
self.type == 7 #-self.MULTIPLIER-#
|
||||
end
|
||||
|
||||
# Get the end column of this token
|
||||
#
|
||||
# @return int - Column number where token ends
|
||||
def end_column()
|
||||
return self.column + self.length - 1
|
||||
end
|
||||
|
||||
# Create a copy of this token with a different type
|
||||
#
|
||||
# @param new_type: int - New token type
|
||||
# @return Token - New token with same position but different type
|
||||
def with_type(new_type)
|
||||
import animation_dsl
|
||||
return animation_dsl.Token(new_type, self.value, self.line, self.column, self.length)
|
||||
end
|
||||
|
||||
# Create a copy of this token with a different value
|
||||
#
|
||||
# @param new_value: string - New value
|
||||
# @return Token - New token with same position but different value
|
||||
def with_value(new_value)
|
||||
import animation_dsl
|
||||
return animation_dsl.Token(self.type, new_value, self.line, self.column, size(new_value))
|
||||
end
|
||||
|
||||
# Get a string representation of the token for debugging
|
||||
#
|
||||
# @return string - Human-readable token description
|
||||
def tostring()
|
||||
var type_name = self.to_string(self.type)
|
||||
if self.type == 38 #-self.EOF-#
|
||||
return f"Token({type_name} at {self.line}:{self.column})"
|
||||
elif self.type == 35 #-self.NEWLINE-#
|
||||
var type_name = "UNKNOWN"
|
||||
if self.type >= 0 && self.type < size(self.names)
|
||||
type_name = self.names[self.type]
|
||||
end
|
||||
# if self.type == 38 #-self.EOF-#
|
||||
# return f"Token({type_name} at {self.line}:{self.column})"
|
||||
if self.type == 35 #-self.NEWLINE-#
|
||||
return f"Token({type_name} at {self.line}:{self.column})"
|
||||
elif size(self.value) > 20
|
||||
var short_value = self.value[0..17] + "..."
|
||||
@ -248,200 +161,10 @@ class Token
|
||||
end
|
||||
end
|
||||
|
||||
# Get a compact string representation for error messages
|
||||
#
|
||||
# @return string - Compact token description
|
||||
def to_error_string()
|
||||
if self.type == 38 #-self.EOF-#
|
||||
return "end of file"
|
||||
elif self.type == 35 #-self.NEWLINE-#
|
||||
return "newline"
|
||||
elif self.type == 0 #-self.KEYWORD-#
|
||||
return f"keyword '{self.value}'"
|
||||
elif self.type == 1 #-self.IDENTIFIER-#
|
||||
return f"identifier '{self.value}'"
|
||||
elif self.type == 3 #-self.STRING-#
|
||||
return f"string '{self.value}'"
|
||||
elif self.type == 2 #-self.NUMBER-#
|
||||
return f"number '{self.value}'"
|
||||
elif self.type == 4 #-self.COLOR-#
|
||||
return f"color '{self.value}'"
|
||||
elif self.type == 5 #-self.TIME-#
|
||||
return f"time '{self.value}'"
|
||||
elif self.type == 6 #-self.PERCENTAGE-#
|
||||
return f"percentage '{self.value}'"
|
||||
elif self.type == 39 #-self.ERROR-#
|
||||
return f"invalid token '{self.value}'"
|
||||
else
|
||||
return f"'{self.value}'"
|
||||
end
|
||||
end
|
||||
|
||||
# Check if this token represents a boolean value
|
||||
#
|
||||
# @return bool - True if token is "true" or "false" keyword
|
||||
def is_boolean()
|
||||
return self.type == 0 #-self.KEYWORD-# && (self.value == "true" || self.value == "false")
|
||||
end
|
||||
|
||||
# Get boolean value if this token represents one
|
||||
#
|
||||
# @return bool - Boolean value, or nil if not a boolean token
|
||||
def get_boolean_value()
|
||||
if self.is_boolean()
|
||||
return self.value == "true"
|
||||
end
|
||||
return nil
|
||||
end
|
||||
|
||||
# Check if this token represents a numeric value
|
||||
#
|
||||
# @return bool - True if token can be converted to a number
|
||||
def is_numeric()
|
||||
return self.type == 2 #-self.NUMBER-# ||
|
||||
self.type == 5 #-self.TIME-# ||
|
||||
self.type == 6 #-self.PERCENTAGE-# ||
|
||||
self.type == 7 #-self.MULTIPLIER-#
|
||||
end
|
||||
|
||||
# Get numeric value from token (without units) - returns only integers
|
||||
#
|
||||
# @return int - Numeric value, or nil if not numeric
|
||||
# - time is in ms
|
||||
# - percentage is converted to 100% = 255
|
||||
# - times is converted to x256 (2x = 512)
|
||||
def get_numeric_value()
|
||||
import string
|
||||
import math
|
||||
|
||||
if self.type == 2 #-self.NUMBER-#
|
||||
return math.round(real(self.value))
|
||||
elif self.type == 5 #-self.TIME-#
|
||||
# Remove time unit suffix and convert to milliseconds
|
||||
var value_str = self.value
|
||||
if string.endswith(value_str, "ms")
|
||||
return math.round(real(value_str[0..-3]))
|
||||
elif string.endswith(value_str, "s")
|
||||
return math.round(real(value_str[0..-2]) * 1000)
|
||||
elif string.endswith(value_str, "m")
|
||||
return math.round(real(value_str[0..-2]) * 60000)
|
||||
elif string.endswith(value_str, "h")
|
||||
return math.round(real(value_str[0..-2]) * 3600000)
|
||||
end
|
||||
elif self.type == 6 #-self.PERCENTAGE-#
|
||||
# Remove % and convert to 0-255 range (100% = 255)
|
||||
var percent = math.round(real(self.value[0..-2]))
|
||||
return tasmota.scale_uint(percent, 0, 100, 0, 255)
|
||||
elif self.type == 7 #-self.MULTIPLIER-#
|
||||
# Remove x suffix and convert to x256 scale (2x = 512)
|
||||
var multiplier = real(self.value[0..-2])
|
||||
return math.round(multiplier * 256)
|
||||
end
|
||||
return nil
|
||||
end
|
||||
|
||||
|
||||
|
||||
# Check if this token can start an expression
|
||||
#
|
||||
# @return bool - True if token can begin an expression
|
||||
def can_start_expression()
|
||||
return self.is_literal() ||
|
||||
self.type == 1 #-self.IDENTIFIER-# ||
|
||||
self.type == 36 #-self.VARIABLE_REF-# ||
|
||||
self.type == 24 #-self.LEFT_PAREN-# ||
|
||||
self.type == 23 #-self.LOGICAL_NOT-# ||
|
||||
self.type == 10 #-self.MINUS-# ||
|
||||
self.type == 9 #-self.PLUS-#
|
||||
end
|
||||
|
||||
# Check if this token can end an expression
|
||||
#
|
||||
# @return bool - True if token can end an expression
|
||||
def can_end_expression()
|
||||
return self.is_literal() ||
|
||||
self.type == 1 #-self.IDENTIFIER-# ||
|
||||
self.type == 36 #-self.VARIABLE_REF-# ||
|
||||
self.type == 25 #-self.RIGHT_PAREN-#
|
||||
end
|
||||
|
||||
# Check if this token indicates the start of a new top-level statement
|
||||
# Useful for single-pass transpiler to know when to stop collecting expression tokens
|
||||
#
|
||||
# @return bool - True if token starts a new statement
|
||||
def is_statement_start()
|
||||
if self.type != 0 #-self.KEYWORD-#
|
||||
return false
|
||||
end
|
||||
|
||||
for keyword : self.statement_keywords
|
||||
if self.value == keyword
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
# Check if this token is a DSL function name (for animation expressions)
|
||||
# Uses dynamic introspection to check if function exists in animation module
|
||||
#
|
||||
# @return bool - True if token is a DSL function name
|
||||
def is_dsl_function()
|
||||
if self.type != 0 #-self.KEYWORD-#
|
||||
return false
|
||||
end
|
||||
|
||||
# Use dynamic introspection to check if function exists in animation module
|
||||
# This automatically supports any new functions added to the framework
|
||||
try
|
||||
import introspect
|
||||
var animation = global.animation
|
||||
if animation != nil
|
||||
var members = introspect.members(animation)
|
||||
return members.find(self.value) != nil
|
||||
end
|
||||
except .. as e, msg
|
||||
# Fallback to false if introspection fails
|
||||
return false
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
# Utility functions for token handling
|
||||
|
||||
# Create an EOF token at a specific position
|
||||
#
|
||||
# @param line: int - Line number
|
||||
# @param column: int - Column number
|
||||
# @return Token - EOF token
|
||||
def create_eof_token(line, column)
|
||||
import animation_dsl
|
||||
return animation_dsl.Token(38 #-animation_dsl.Token.EOF-#, "", line, column, 0)
|
||||
end
|
||||
|
||||
# Create an error token with a message
|
||||
#
|
||||
# @param message: string - Error message
|
||||
# @param line: int - Line number
|
||||
# @param column: int - Column number
|
||||
# @return Token - Error token
|
||||
def create_error_token(message, line, column)
|
||||
import animation_dsl
|
||||
return animation_dsl.Token(39 #-animation_dsl.Token.ERROR-#, message, line, column, size(message))
|
||||
end
|
||||
|
||||
# Create a newline token
|
||||
#
|
||||
# @param line: int - Line number
|
||||
# @param column: int - Column number
|
||||
# @return Token - Newline token
|
||||
def create_newline_token(line, column)
|
||||
import animation_dsl
|
||||
return animation_dsl.Token(35 #-animation_dsl.Token.NEWLINE-#, "\n", line, column, 1)
|
||||
end
|
||||
|
||||
# Check if a string is a reserved keyword
|
||||
#
|
||||
# @param word: string - Word to check
|
||||
@ -470,45 +193,8 @@ def is_color_name(word)
|
||||
return false
|
||||
end
|
||||
|
||||
# Get the precedence of an operator token
|
||||
#
|
||||
# @param token: Token - Operator token
|
||||
# @return int - Precedence level (higher number = higher precedence)
|
||||
def get_operator_precedence(token)
|
||||
if token.type == 22 #-animation_dsl.Token.LOGICAL_OR-#
|
||||
return 1
|
||||
elif token.type == 21 #-animation_dsl.Token.LOGICAL_AND-#
|
||||
return 2
|
||||
elif token.type == 15 #-animation_dsl.Token.EQUAL-# || token.type == 16 #-animation_dsl.Token.NOT_EQUAL-#
|
||||
return 3
|
||||
elif token.type == 17 #-animation_dsl.Token.LESS_THAN-# || token.type == 18 #-animation_dsl.Token.LESS_EQUAL-# ||
|
||||
token.type == 19 #-animation_dsl.Token.GREATER_THAN-# || token.type == 20 #-animation_dsl.Token.GREATER_EQUAL-#
|
||||
return 4
|
||||
elif token.type == 9 #-animation_dsl.Token.PLUS-# || token.type == 10 #-animation_dsl.Token.MINUS-#
|
||||
return 5
|
||||
elif token.type == 11 #-animation_dsl.Token.MULTIPLY-# || token.type == 12 #-animation_dsl.Token.DIVIDE-# || token.type == 13 #-animation_dsl.Token.MODULO-#
|
||||
return 6
|
||||
elif token.type == 14 #-animation_dsl.Token.POWER-#
|
||||
return 7
|
||||
end
|
||||
return 0 # Not an operator or unknown operator
|
||||
end
|
||||
|
||||
# Check if an operator is right-associative
|
||||
#
|
||||
# @param token: Token - Operator token
|
||||
# @return bool - True if operator is right-associative
|
||||
def is_right_associative(token)
|
||||
return token.type == 14 #-animation_dsl.Token.POWER-# # Only power operator is right-associative
|
||||
end
|
||||
|
||||
return {
|
||||
"Token": Token,
|
||||
"create_eof_token": create_eof_token,
|
||||
"create_error_token": create_error_token,
|
||||
"create_newline_token": create_newline_token,
|
||||
"is_keyword": is_keyword,
|
||||
"is_color_name": is_color_name,
|
||||
"get_operator_precedence": get_operator_precedence,
|
||||
"is_right_associative": is_right_associative
|
||||
"is_color_name": is_color_name
|
||||
}
|
||||
@ -4,8 +4,7 @@
|
||||
|
||||
#@ solidify:SimpleDSLTranspiler,weak
|
||||
class SimpleDSLTranspiler
|
||||
var tokens # Token stream from lexer
|
||||
var pos # Current token position
|
||||
var pull_lexer # Pull lexer instance
|
||||
var output # Generated Berry code lines
|
||||
var warnings # Compilation warnings
|
||||
var run_statements # Collect all run statements for single engine.run()
|
||||
@ -59,29 +58,30 @@ class SimpleDSLTranspiler
|
||||
# String representation for debugging
|
||||
def tostring()
|
||||
var instance_str = (self.instance_for_validation != nil) ? f"instance={classname(self.instance_for_validation)}" : "instance=nil"
|
||||
var type_str = self._type_to_string(self.return_type)
|
||||
return f"ExpressionResult(expr='{self.expr}', dynamic={self.has_dynamic}, dangerous={self.has_dangerous}, comp={self.has_computation}, type={type_str}, {instance_str})"
|
||||
# var type_str = self._type_to_string(self.return_type)
|
||||
# return f"ExpressionResult(expr='{self.expr}', dynamic={self.has_dynamic}, dangerous={self.has_dangerous}, comp={self.has_computation}, type={type_str}, {instance_str})"
|
||||
return f"ExpressionResult(expr='{self.expr}', dynamic={self.has_dynamic}, dangerous={self.has_dangerous}, comp={self.has_computation}, type={self.return_type}, {instance_str})"
|
||||
end
|
||||
|
||||
# Helper method to convert type number to string for debugging
|
||||
def _type_to_string(type_num)
|
||||
if type_num == 1 #-animation_dsl._symbol_entry.TYPE_PALETTE_CONSTANT-# return "palette_constant"
|
||||
elif type_num == 2 #-animation_dsl._symbol_entry.TYPE_PALETTE-# return "palette"
|
||||
elif type_num == 3 #-animation_dsl._symbol_entry.TYPE_CONSTANT-# return "constant"
|
||||
elif type_num == 4 #-animation_dsl._symbol_entry.TYPE_MATH_FUNCTION-# return "math_function"
|
||||
elif type_num == 5 #-animation_dsl._symbol_entry.TYPE_USER_FUNCTION-# return "user_function"
|
||||
elif type_num == 6 #-animation_dsl._symbol_entry.TYPE_VALUE_PROVIDER_CONSTRUCTOR-# return "value_provider_constructor"
|
||||
elif type_num == 7 #-animation_dsl._symbol_entry.TYPE_VALUE_PROVIDER-# return "value_provider"
|
||||
elif type_num == 8 #-animation_dsl._symbol_entry.TYPE_ANIMATION_CONSTRUCTOR-# return "animation_constructor"
|
||||
elif type_num == 9 #-animation_dsl._symbol_entry.TYPE_ANIMATION-# return "animation"
|
||||
elif type_num == 10 #-animation_dsl._symbol_entry.TYPE_COLOR_CONSTRUCTOR-# return "color_constructor"
|
||||
elif type_num == 11 #-animation_dsl._symbol_entry.TYPE_COLOR-# return "color"
|
||||
elif type_num == 12 #-animation_dsl._symbol_entry.TYPE_VARIABLE-# return "variable"
|
||||
elif type_num == 13 #-animation_dsl._symbol_entry.TYPE_SEQUENCE-# return "sequence"
|
||||
elif type_num == 14 #-animation_dsl._symbol_entry.TYPE_TEMPLATE-# return "template"
|
||||
else return f"unknown({type_num})"
|
||||
end
|
||||
end
|
||||
# # Helper method to convert type number to string for debugging
|
||||
# def _type_to_string(type_num)
|
||||
# if type_num == 1 #-animation_dsl._symbol_entry.TYPE_PALETTE_CONSTANT-# return "palette_constant"
|
||||
# elif type_num == 2 #-animation_dsl._symbol_entry.TYPE_PALETTE-# return "palette"
|
||||
# elif type_num == 3 #-animation_dsl._symbol_entry.TYPE_CONSTANT-# return "constant"
|
||||
# elif type_num == 4 #-animation_dsl._symbol_entry.TYPE_MATH_FUNCTION-# return "math_function"
|
||||
# elif type_num == 5 #-animation_dsl._symbol_entry.TYPE_USER_FUNCTION-# return "user_function"
|
||||
# elif type_num == 6 #-animation_dsl._symbol_entry.TYPE_VALUE_PROVIDER_CONSTRUCTOR-# return "value_provider_constructor"
|
||||
# elif type_num == 7 #-animation_dsl._symbol_entry.TYPE_VALUE_PROVIDER-# return "value_provider"
|
||||
# elif type_num == 8 #-animation_dsl._symbol_entry.TYPE_ANIMATION_CONSTRUCTOR-# return "animation_constructor"
|
||||
# elif type_num == 9 #-animation_dsl._symbol_entry.TYPE_ANIMATION-# return "animation"
|
||||
# elif type_num == 10 #-animation_dsl._symbol_entry.TYPE_COLOR_CONSTRUCTOR-# return "color_constructor"
|
||||
# elif type_num == 11 #-animation_dsl._symbol_entry.TYPE_COLOR-# return "color"
|
||||
# elif type_num == 12 #-animation_dsl._symbol_entry.TYPE_VARIABLE-# return "variable"
|
||||
# elif type_num == 13 #-animation_dsl._symbol_entry.TYPE_SEQUENCE-# return "sequence"
|
||||
# elif type_num == 14 #-animation_dsl._symbol_entry.TYPE_TEMPLATE-# return "template"
|
||||
# else return f"unknown({type_num})"
|
||||
# end
|
||||
# end
|
||||
|
||||
# Static method to combine expression results
|
||||
# Takes an expression string and 1-2 ExpressionResult parameters (checks for nil)
|
||||
@ -152,10 +152,11 @@ class SimpleDSLTranspiler
|
||||
end
|
||||
end
|
||||
|
||||
def init(tokens)
|
||||
def init(pull_lexer)
|
||||
import animation_dsl
|
||||
self.tokens = tokens != nil ? tokens : []
|
||||
self.pos = 0
|
||||
|
||||
# Only support pull lexer interface now
|
||||
self.pull_lexer = pull_lexer
|
||||
self.output = []
|
||||
self.warnings = [] # Separate array for warnings
|
||||
self.run_statements = []
|
||||
@ -367,8 +368,24 @@ class SimpleDSLTranspiler
|
||||
# Transpile template body (similar to main transpile but without imports/engine start)
|
||||
def transpile_template_body()
|
||||
try
|
||||
# Process all statements in template body
|
||||
# Process all statements in template body until we hit the closing brace
|
||||
var brace_depth = 0
|
||||
while !self.at_end()
|
||||
var tok = self.current()
|
||||
|
||||
# Check for template end condition
|
||||
if tok != nil && tok.type == 27 #-animation_dsl.Token.RIGHT_BRACE-# && brace_depth == 0
|
||||
# This is the closing brace of the template - stop processing
|
||||
break
|
||||
end
|
||||
|
||||
# Track brace depth for nested braces
|
||||
if tok != nil && tok.type == 26 #-animation_dsl.Token.LEFT_BRACE-#
|
||||
brace_depth += 1
|
||||
elif tok != nil && tok.type == 27 #-animation_dsl.Token.RIGHT_BRACE-#
|
||||
brace_depth -= 1
|
||||
end
|
||||
|
||||
self.process_statement()
|
||||
end
|
||||
|
||||
@ -391,7 +408,7 @@ class SimpleDSLTranspiler
|
||||
# Process statements - simplified approach
|
||||
def process_statement()
|
||||
var tok = self.current()
|
||||
if tok == nil || tok.type == 38 #-animation_dsl.Token.EOF-#
|
||||
if tok == nil # EOF token removed - nil indicates end of file
|
||||
return
|
||||
end
|
||||
|
||||
@ -890,38 +907,8 @@ class SimpleDSLTranspiler
|
||||
end
|
||||
end
|
||||
|
||||
# Second pass: collect body tokens (everything until closing brace)
|
||||
var body_tokens = []
|
||||
var brace_depth = 0
|
||||
|
||||
while !self.at_end()
|
||||
var tok = self.current()
|
||||
|
||||
if tok == nil || tok.type == 38 #-animation_dsl.Token.EOF-#
|
||||
break
|
||||
end
|
||||
|
||||
if tok.type == 26 #-animation_dsl.Token.LEFT_BRACE-#
|
||||
brace_depth += 1
|
||||
body_tokens.push(tok)
|
||||
elif tok.type == 27 #-animation_dsl.Token.RIGHT_BRACE-#
|
||||
if brace_depth == 0
|
||||
break # This is our closing brace
|
||||
else
|
||||
brace_depth -= 1
|
||||
body_tokens.push(tok)
|
||||
end
|
||||
else
|
||||
body_tokens.push(tok)
|
||||
end
|
||||
|
||||
self.next()
|
||||
end
|
||||
|
||||
self.expect_right_brace()
|
||||
|
||||
# Generate Berry function for this template
|
||||
self.generate_template_function(name, params, param_types, body_tokens)
|
||||
# Generate Berry function for this template using direct pull-lexer approach
|
||||
self.generate_template_function_direct(name, params, param_types)
|
||||
|
||||
# Add template to symbol table with parameter information
|
||||
var template_info = {
|
||||
@ -1007,7 +994,7 @@ class SimpleDSLTranspiler
|
||||
# Process statements inside sequences using fluent interface
|
||||
def process_sequence_statement()
|
||||
var tok = self.current()
|
||||
if tok == nil || tok.type == 38 #-animation_dsl.Token.EOF-#
|
||||
if tok == nil # EOF token removed - nil indicates end of file
|
||||
return
|
||||
end
|
||||
|
||||
@ -1807,24 +1794,21 @@ class SimpleDSLTranspiler
|
||||
end
|
||||
end
|
||||
|
||||
# Helper methods
|
||||
# Helper methods - pull lexer only
|
||||
def current()
|
||||
return self.pos < size(self.tokens) ? self.tokens[self.pos] : nil
|
||||
return self.pull_lexer.peek_token()
|
||||
end
|
||||
|
||||
def peek()
|
||||
return (self.pos + 1 < size(self.tokens)) ? self.tokens[self.pos + 1] : nil
|
||||
return self.pull_lexer.peek_ahead(2) # Look ahead by 2 (next token after current)
|
||||
end
|
||||
|
||||
def next()
|
||||
if self.pos < size(self.tokens)
|
||||
self.pos += 1
|
||||
end
|
||||
return self.pull_lexer.next_token()
|
||||
end
|
||||
|
||||
def at_end()
|
||||
return self.pos >= size(self.tokens) ||
|
||||
(self.current() != nil && self.current().type == 38 #-animation_dsl.Token.EOF-#)
|
||||
return self.pull_lexer.at_end()
|
||||
end
|
||||
|
||||
def skip_whitespace()
|
||||
@ -2184,7 +2168,7 @@ class SimpleDSLTranspiler
|
||||
# Skip to next statement (newline or EOF)
|
||||
while !self.at_end()
|
||||
var tok = self.current()
|
||||
if tok.type == 35 #-animation_dsl.Token.NEWLINE-# || tok.type == 38 #-animation_dsl.Token.EOF-#
|
||||
if tok == nil || tok.type == 35 #-animation_dsl.Token.NEWLINE-# # EOF token removed - check nil
|
||||
break
|
||||
end
|
||||
self.next()
|
||||
@ -2645,8 +2629,10 @@ class SimpleDSLTranspiler
|
||||
self.strip_initialized = true
|
||||
end
|
||||
|
||||
# Generate Berry function for template definition
|
||||
def generate_template_function(name, params, param_types, body_tokens)
|
||||
|
||||
|
||||
# Generate Berry function for template definition using direct pull-lexer approach
|
||||
def generate_template_function_direct(name, params, param_types)
|
||||
import animation_dsl
|
||||
import string
|
||||
|
||||
@ -2659,8 +2645,9 @@ class SimpleDSLTranspiler
|
||||
self.add(f"# Template function: {name}")
|
||||
self.add(f"def {name}_template({param_list})")
|
||||
|
||||
# Create a new transpiler instance for the template body
|
||||
var template_transpiler = animation_dsl.SimpleDSLTranspiler(body_tokens)
|
||||
# Create a new transpiler that shares the same pull lexer
|
||||
# It will consume tokens from the current position until the template ends
|
||||
var template_transpiler = animation_dsl.SimpleDSLTranspiler(self.pull_lexer)
|
||||
template_transpiler.symbol_table = animation_dsl._symbol_table() # Fresh symbol table for template
|
||||
template_transpiler.strip_initialized = true # Templates assume engine exists
|
||||
|
||||
@ -2676,7 +2663,7 @@ class SimpleDSLTranspiler
|
||||
end
|
||||
end
|
||||
|
||||
# Transpile the template body
|
||||
# Transpile the template body - it will consume tokens until the closing brace
|
||||
var template_body = template_transpiler.transpile_template_body()
|
||||
|
||||
if template_body != nil
|
||||
@ -2697,6 +2684,9 @@ class SimpleDSLTranspiler
|
||||
end
|
||||
end
|
||||
|
||||
# Expect the closing brace (template_transpiler should have left us at this position)
|
||||
self.expect_right_brace()
|
||||
|
||||
self.add("end")
|
||||
self.add("")
|
||||
|
||||
@ -3011,16 +3001,8 @@ end
|
||||
# DSL compilation function
|
||||
def compile_dsl(source)
|
||||
import animation_dsl
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens
|
||||
|
||||
try
|
||||
tokens = lexer.tokenize()
|
||||
except "lexical_error" as e, msg
|
||||
raise "dsl_compilation_error", msg
|
||||
end
|
||||
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
var berry_code = transpiler.transpile()
|
||||
|
||||
return berry_code
|
||||
@ -3030,5 +3012,4 @@ end
|
||||
return {
|
||||
"SimpleDSLTranspiler": SimpleDSLTranspiler,
|
||||
"compile_dsl": compile_dsl,
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,308 @@
|
||||
# Demo Shutter Infinite Loop Test
|
||||
# Specific test to isolate the infinite loop in demo_shutter_rainbow_central.anim
|
||||
#
|
||||
# Command to run test:
|
||||
# ./berry -s -g -m lib/libesp32/berry_animation/src -e "import tasmota def log(x) print(x) end import animation" lib/libesp32/berry_animation/src/tests/demo_shutter_infinite_loop_test.be
|
||||
|
||||
import animation_dsl
|
||||
import string
|
||||
|
||||
# Test the exact problematic patterns from the demo file
|
||||
def test_demo_shutter_patterns()
|
||||
print("Testing specific patterns from demo_shutter_rainbow_central.anim...")
|
||||
|
||||
# Test 1: The nested repeat structure that might cause issues
|
||||
print(" Testing nested repeat structure...")
|
||||
var nested_repeat = "template shutter_central {\n" +
|
||||
" param colors type palette\n" +
|
||||
" param duration\n" +
|
||||
" \n" +
|
||||
" color col1 = color_cycle(palette=colors, cycle_period=0)\n" +
|
||||
" animation test_anim = solid(color=col1)\n" +
|
||||
" \n" +
|
||||
" sequence shutter_seq repeat forever {\n" +
|
||||
" repeat col1.palette_size times {\n" +
|
||||
" play test_anim for duration\n" +
|
||||
" col1.next = 1\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" \n" +
|
||||
" run shutter_seq\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"palette rainbow = [red, green, blue]\n" +
|
||||
"shutter_central(rainbow, 1s)"
|
||||
|
||||
try
|
||||
print(" Compiling nested repeat structure...")
|
||||
var result1 = animation_dsl.compile(nested_repeat)
|
||||
print(" ✓ Nested repeat structure works")
|
||||
except .. as e, msg
|
||||
print(f" ✗ Nested repeat structure failed: {e}: {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
# Test 2: The col1.next = 1 pattern
|
||||
print(" Testing color.next assignment...")
|
||||
var color_next = "color col1 = color_cycle(palette=[red, green], cycle_period=0)\n" +
|
||||
"col1.next = 1\n" +
|
||||
"animation test_anim = solid(color=col1)\n" +
|
||||
"run test_anim"
|
||||
|
||||
try
|
||||
print(" Compiling color.next assignment...")
|
||||
var result2 = animation_dsl.compile(color_next)
|
||||
print(" ✓ Color.next assignment works")
|
||||
except .. as e, msg
|
||||
print(f" ✗ Color.next assignment failed: {e}: {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
# Test 3: The restart statement
|
||||
print(" Testing restart statement...")
|
||||
var restart_test = "set shutter_size = sawtooth(min_value=0, max_value=10, duration=1s)\n" +
|
||||
"animation test_anim = solid(color=red)\n" +
|
||||
"sequence test_seq {\n" +
|
||||
" restart shutter_size\n" +
|
||||
" play test_anim for 1s\n" +
|
||||
"}\n" +
|
||||
"run test_seq"
|
||||
|
||||
try
|
||||
print(" Compiling restart statement...")
|
||||
var result3 = animation_dsl.compile(restart_test)
|
||||
print(" ✓ Restart statement works")
|
||||
except .. as e, msg
|
||||
print(f" ✗ Restart statement failed: {e}: {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
# Test 4: Complex expressions in beacon_animation
|
||||
print(" Testing complex beacon_animation expressions...")
|
||||
var complex_beacon = "template shutter_central {\n" +
|
||||
" param colors type palette\n" +
|
||||
" param duration\n" +
|
||||
" \n" +
|
||||
" set strip_len = strip_length()\n" +
|
||||
" set strip_len2 = (strip_len + 1) / 2\n" +
|
||||
" set shutter_size = sawtooth(min_value = 0, max_value = strip_len, duration = duration)\n" +
|
||||
" \n" +
|
||||
" color col1 = color_cycle(palette=colors, cycle_period=0)\n" +
|
||||
" \n" +
|
||||
" animation shutter_anim = beacon_animation(\n" +
|
||||
" color = col1\n" +
|
||||
" back_color = red\n" +
|
||||
" pos = strip_len2 - (shutter_size + 1) / 2\n" +
|
||||
" beacon_size = shutter_size\n" +
|
||||
" slew_size = 0\n" +
|
||||
" priority = 5\n" +
|
||||
" )\n" +
|
||||
" \n" +
|
||||
" run shutter_anim\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"palette rainbow = [red, green, blue]\n" +
|
||||
"shutter_central(rainbow, 1s)"
|
||||
|
||||
try
|
||||
print(" Compiling complex beacon_animation...")
|
||||
var result4 = animation_dsl.compile(complex_beacon)
|
||||
print(" ✓ Complex beacon_animation works")
|
||||
except .. as e, msg
|
||||
print(f" ✗ Complex beacon_animation failed: {e}: {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
# Test 5: The full problematic sequence structure
|
||||
print(" Testing full sequence structure (this may hang)...")
|
||||
var full_sequence = "template shutter_central {\n" +
|
||||
" param colors type palette\n" +
|
||||
" param duration\n" +
|
||||
" \n" +
|
||||
" set strip_len = strip_length()\n" +
|
||||
" set strip_len2 = (strip_len + 1) / 2\n" +
|
||||
" set shutter_size = sawtooth(min_value = 0, max_value = strip_len, duration = duration)\n" +
|
||||
" \n" +
|
||||
" color col1 = color_cycle(palette=colors, cycle_period=0)\n" +
|
||||
" color col2 = color_cycle(palette=colors, cycle_period=0)\n" +
|
||||
" col2.next = 1\n" +
|
||||
" \n" +
|
||||
" animation shutter_inout = beacon_animation(\n" +
|
||||
" color = col2\n" +
|
||||
" back_color = col1\n" +
|
||||
" pos = strip_len2 - (shutter_size + 1) / 2\n" +
|
||||
" beacon_size = shutter_size\n" +
|
||||
" slew_size = 0\n" +
|
||||
" priority = 5\n" +
|
||||
" )\n" +
|
||||
" \n" +
|
||||
" animation shutter_outin = beacon_animation(\n" +
|
||||
" color = col1\n" +
|
||||
" back_color = col2\n" +
|
||||
" pos = strip_len2 - (strip_len - shutter_size + 1) / 2\n" +
|
||||
" beacon_size = strip_len - shutter_size\n" +
|
||||
" slew_size = 0\n" +
|
||||
" priority = 5\n" +
|
||||
" )\n" +
|
||||
" \n" +
|
||||
" sequence shutter_seq repeat forever {\n" +
|
||||
" repeat col1.palette_size times {\n" +
|
||||
" restart shutter_size\n" +
|
||||
" play shutter_inout for duration\n" +
|
||||
" col1.next = 1\n" +
|
||||
" col2.next = 1\n" +
|
||||
" }\n" +
|
||||
" repeat col1.palette_size times {\n" +
|
||||
" restart shutter_size\n" +
|
||||
" play shutter_outin for duration\n" +
|
||||
" col1.next = 1\n" +
|
||||
" col2.next = 1\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" \n" +
|
||||
" run shutter_seq\n" +
|
||||
"}\n" +
|
||||
"\n" +
|
||||
"palette rainbow_with_white = [red, orange, yellow, green, blue, indigo, white]\n" +
|
||||
"shutter_central(rainbow_with_white, 1.5s)"
|
||||
|
||||
print(" CRITICAL: This exact structure causes infinite loop")
|
||||
print(" The combination of:")
|
||||
print(" - 'repeat forever' outer loop")
|
||||
print(" - Multiple nested 'repeat col1.palette_size times' loops")
|
||||
print(" - 'restart' statements inside the loops")
|
||||
print(" - '.next = 1' assignments on color_cycle objects")
|
||||
print(" appears to trigger infinite recursion in the transpiler")
|
||||
print("")
|
||||
print(" RECOMMENDATION: Debug the transpiler's handling of:")
|
||||
print(" 1. Nested repeat loop transpilation")
|
||||
print(" 2. Variable scope resolution in nested contexts")
|
||||
print(" 3. Color cycle object method resolution")
|
||||
print(" 4. Restart statement processing")
|
||||
|
||||
print("✓ Demo shutter patterns test completed")
|
||||
return true
|
||||
end
|
||||
|
||||
# Test reading the actual demo file and analyzing its structure
|
||||
def test_demo_file_analysis()
|
||||
print("Analyzing demo_shutter_rainbow_central.anim structure...")
|
||||
|
||||
var demo_content = ""
|
||||
try
|
||||
var f = open("lib/libesp32/berry_animation/anim_examples/demo_shutter_rainbow_central.anim", "r")
|
||||
demo_content = f.read()
|
||||
f.close()
|
||||
except .. as e, msg
|
||||
print(f" ERROR: Could not read demo file: {e} - {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
print(f" File size: {size(demo_content)} characters")
|
||||
|
||||
# Count occurrences of potentially problematic patterns
|
||||
var repeat_count = 0
|
||||
var pos = 0
|
||||
while true
|
||||
pos = string.find(demo_content, "repeat", pos)
|
||||
if pos == -1 break end
|
||||
repeat_count += 1
|
||||
pos += 6
|
||||
end
|
||||
|
||||
var next_count = 0
|
||||
pos = 0
|
||||
while true
|
||||
pos = string.find(demo_content, ".next", pos)
|
||||
if pos == -1 break end
|
||||
next_count += 1
|
||||
pos += 5
|
||||
end
|
||||
|
||||
var restart_count = 0
|
||||
pos = 0
|
||||
while true
|
||||
pos = string.find(demo_content, "restart", pos)
|
||||
if pos == -1 break end
|
||||
restart_count += 1
|
||||
pos += 7
|
||||
end
|
||||
|
||||
print(f" Found {repeat_count} 'repeat' statements")
|
||||
print(f" Found {next_count} '.next' assignments")
|
||||
print(f" Found {restart_count} 'restart' statements")
|
||||
|
||||
# Check for nested repeat structures
|
||||
if string.find(demo_content, "repeat forever") != -1
|
||||
print(" Contains 'repeat forever' - potential infinite loop source")
|
||||
end
|
||||
|
||||
if repeat_count > 2
|
||||
print(" Multiple nested repeat structures detected")
|
||||
end
|
||||
|
||||
print("✓ Demo file analysis completed")
|
||||
return true
|
||||
end
|
||||
|
||||
# Test the actual demo file compilation (DANGEROUS - may hang)
|
||||
def test_actual_demo_file_compilation()
|
||||
print("Testing actual demo file compilation...")
|
||||
print("WARNING: This test is designed to demonstrate the infinite loop")
|
||||
print("If you run this test, it WILL hang and you'll need to kill the process")
|
||||
print("")
|
||||
print("To reproduce the infinite loop manually, run:")
|
||||
print(" animation_dsl.compile(open('lib/libesp32/berry_animation/anim_examples/demo_shutter_rainbow_central.anim', 'r').read())")
|
||||
print("")
|
||||
print("SKIPPING actual compilation to prevent hang")
|
||||
print("✓ Actual demo file compilation test documented")
|
||||
return true
|
||||
end
|
||||
|
||||
# Run all demo shutter infinite loop tests
|
||||
def run_all_demo_shutter_tests()
|
||||
print("=== Demo Shutter Infinite Loop Test Suite ===")
|
||||
print("")
|
||||
|
||||
var tests = [
|
||||
test_demo_file_analysis,
|
||||
test_demo_shutter_patterns,
|
||||
test_actual_demo_file_compilation
|
||||
]
|
||||
|
||||
var passed = 0
|
||||
var total = size(tests)
|
||||
|
||||
for test_func : tests
|
||||
try
|
||||
if test_func()
|
||||
passed += 1
|
||||
else
|
||||
print("✗ Test failed")
|
||||
end
|
||||
except .. as error_type, error_message
|
||||
print(f"✗ Test crashed: {error_type} - {error_message}")
|
||||
end
|
||||
print("")
|
||||
end
|
||||
|
||||
print("=== Demo Shutter Test Results ===")
|
||||
print(f"Passed: {passed}/{total}")
|
||||
|
||||
if passed == total
|
||||
print("All demo shutter tests passed! ✓")
|
||||
print("")
|
||||
print("CONCLUSION:")
|
||||
print("The infinite loop appears to be caused by the complex nested")
|
||||
print("repeat structure with 'repeat forever' and multiple inner")
|
||||
print("'repeat col1.palette_size times' loops combined with")
|
||||
print("'restart' statements and '.next' assignments.")
|
||||
return true
|
||||
else
|
||||
print("Some demo shutter tests failed! ✗")
|
||||
raise "test_failed"
|
||||
end
|
||||
end
|
||||
|
||||
# Run the tests
|
||||
return run_all_demo_shutter_tests()
|
||||
@ -146,7 +146,7 @@ def test_compilation_failures()
|
||||
var berry_code = animation_dsl.compile(dangerous_dsl)
|
||||
assert(false, "Should fail with dangerous function creation")
|
||||
except "dsl_compilation_error" as e, msg
|
||||
assert(string.find(msg, "Function 'strip_length' cannot be used in computed expressions") >= 0,
|
||||
assert(string.find(msg, "Expression 'animation.strip_length(engine)' cannot be used in computed expressions.") >= 0,
|
||||
"Should report dangerous function creation error")
|
||||
print("✓ Dangerous function creation properly rejected")
|
||||
end
|
||||
@ -417,24 +417,6 @@ def test_complete_example()
|
||||
return true
|
||||
end
|
||||
|
||||
# Test the specific failing case mentioned in the original request
|
||||
def test_specific_failing_case()
|
||||
print("Testing specific failing case: set s2 = strip_length() + strip_length()...")
|
||||
|
||||
var failing_dsl = "set s2 = strip_length() + strip_length()"
|
||||
|
||||
try
|
||||
var berry_code = animation_dsl.compile(failing_dsl)
|
||||
assert(false, "Should fail - dangerous pattern should be rejected")
|
||||
except "dsl_compilation_error" as e, msg
|
||||
assert(string.find(msg, "Function 'strip_length' cannot be used in computed expressions") >= 0,
|
||||
"Should report the specific error about function usage in computed expressions")
|
||||
print("✓ Specific failing case properly rejected with correct error message")
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
# Run all tests
|
||||
def run_all_tests()
|
||||
print("=== DSL Compilation Test Suite ===")
|
||||
@ -443,8 +425,7 @@ def run_all_tests()
|
||||
test_successful_compilation,
|
||||
test_compilation_failures,
|
||||
test_edge_cases,
|
||||
test_complete_example,
|
||||
test_specific_failing_case
|
||||
test_complete_example
|
||||
]
|
||||
|
||||
var passed = 0
|
||||
@ -471,7 +452,7 @@ def run_all_tests()
|
||||
return true
|
||||
else
|
||||
print("❌ Some tests failed")
|
||||
return false
|
||||
raise "test_failed"
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# DSL Lexer Test Suite
|
||||
# Tests for DSLLexer class
|
||||
# Tests for create_lexer class
|
||||
#
|
||||
# Command to run test is:
|
||||
# ./berry -s -g -m lib/libesp32/berry_animation -e "import tasmota" lib/libesp32/berry_animation/tests/dsl_lexer_test.be
|
||||
@ -8,14 +8,33 @@ import animation
|
||||
import animation_dsl
|
||||
import string
|
||||
|
||||
# Helper function to extract all tokens from a pull lexer (for testing only)
|
||||
def extract_all_tokens(lexer)
|
||||
var tokens = []
|
||||
lexer.reset() # Start from beginning
|
||||
|
||||
while !lexer.at_end()
|
||||
var token = lexer.next_token()
|
||||
|
||||
# EOF token removed - check for nil instead
|
||||
if token == nil
|
||||
break
|
||||
end
|
||||
|
||||
tokens.push(token)
|
||||
end
|
||||
|
||||
return tokens
|
||||
end
|
||||
|
||||
# Test basic tokenization
|
||||
def test_basic_tokenization()
|
||||
print("Testing basic DSL tokenization...")
|
||||
|
||||
var dsl_source = "strip length 60\ncolor red = 0xFF0000\nrun demo"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
# Should have: strip, length, 60, color, red, =, #FF0000, run, demo, EOF
|
||||
print(" Found " + str(size(tokens)) + " tokens")
|
||||
@ -27,17 +46,17 @@ def test_basic_tokenization()
|
||||
assert(size(tokens) >= 9, "Should have at least 9 tokens")
|
||||
|
||||
# Check first few tokens
|
||||
assert(tokens[0].type == animation_dsl.Token.KEYWORD && tokens[0].value == "strip")
|
||||
assert(tokens[0].type == 0 #-animation_dsl.Token.KEYWORD-# && tokens[0].value == "strip")
|
||||
# Note: "length" might be IDENTIFIER, not KEYWORD - that's OK for DSL properties
|
||||
assert(tokens[2].type == animation_dsl.Token.NUMBER && tokens[2].value == "60")
|
||||
assert(tokens[2].type == 2 #-animation_dsl.Token.NUMBER-# && tokens[2].value == "60")
|
||||
|
||||
# Check color tokens
|
||||
var found_color_keyword = false
|
||||
var found_color_value = false
|
||||
for token : tokens
|
||||
if token.type == animation_dsl.Token.KEYWORD && token.value == "color"
|
||||
if token.type == 0 #-animation_dsl.Token.KEYWORD-# && token.value == "color"
|
||||
found_color_keyword = true
|
||||
elif token.type == animation_dsl.Token.COLOR && token.value == "0xFF0000"
|
||||
elif token.type == 4 #-animation_dsl.Token.COLOR-# && token.value == "0xFF0000"
|
||||
found_color_value = true
|
||||
end
|
||||
end
|
||||
@ -55,18 +74,18 @@ def test_color_tokenization()
|
||||
print("Testing color tokenization...")
|
||||
|
||||
var color_tests = [
|
||||
["0xFF0000", animation_dsl.Token.COLOR],
|
||||
["red", animation_dsl.Token.COLOR],
|
||||
["blue", animation_dsl.Token.COLOR],
|
||||
["white", animation_dsl.Token.COLOR] # transparent is a keyword, so use white instead
|
||||
["0xFF0000", 4 #-animation_dsl.Token.COLOR-#],
|
||||
["red", 4 #-animation_dsl.Token.COLOR-#],
|
||||
["blue", 4 #-animation_dsl.Token.COLOR-#],
|
||||
["white", 4 #-animation_dsl.Token.COLOR-#] # transparent is a keyword, so use white instead
|
||||
]
|
||||
|
||||
for test : color_tests
|
||||
var color_value = test[0]
|
||||
var expected_type = test[1]
|
||||
|
||||
var lexer = animation_dsl.DSLLexer("color test = " + color_value)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer("color test = " + color_value)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
var found_color = false
|
||||
for token : tokens
|
||||
@ -88,51 +107,33 @@ def test_numeric_tokenization()
|
||||
print("Testing numeric tokenization...")
|
||||
|
||||
var numeric_tests = [
|
||||
["42", animation_dsl.Token.NUMBER],
|
||||
["3.14", animation_dsl.Token.NUMBER],
|
||||
["2s", animation_dsl.Token.TIME],
|
||||
["500ms", animation_dsl.Token.TIME],
|
||||
["1m", animation_dsl.Token.TIME],
|
||||
["2h", animation_dsl.Token.TIME],
|
||||
["50%", animation_dsl.Token.PERCENTAGE],
|
||||
["2x", animation_dsl.Token.MULTIPLIER]
|
||||
["42", 2 #-animation_dsl.Token.NUMBER-#],
|
||||
["3.14", 2 #-animation_dsl.Token.NUMBER-#],
|
||||
["2s", 5 #-animation_dsl.Token.TIME-#],
|
||||
["500ms", 5 #-animation_dsl.Token.TIME-#],
|
||||
["1m", 5 #-animation_dsl.Token.TIME-#],
|
||||
["2h", 5 #-animation_dsl.Token.TIME-#],
|
||||
["50%", 6 #-animation_dsl.Token.PERCENTAGE-#],
|
||||
["2x", 7 #-animation_dsl.Token.MULTIPLIER-#]
|
||||
]
|
||||
|
||||
for test : numeric_tests
|
||||
var value = test[0]
|
||||
var expected_type = test[1]
|
||||
|
||||
var lexer = animation_dsl.DSLLexer("value = " + value)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer("value = " + value)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
var found_numeric = false
|
||||
for token : tokens
|
||||
if token.value == value && token.type == expected_type
|
||||
found_numeric = true
|
||||
|
||||
# Test numeric value extraction
|
||||
if token.is_numeric()
|
||||
var numeric_val = token.get_numeric_value()
|
||||
assert(numeric_val != nil, "Should extract numeric value from " + value)
|
||||
end
|
||||
|
||||
# Test time conversion
|
||||
if token.type == animation_dsl.Token.TIME
|
||||
var time_ms = token.get_numeric_value()
|
||||
assert(time_ms != nil && time_ms > 0, "Should convert time to milliseconds")
|
||||
end
|
||||
|
||||
# Test percentage conversion
|
||||
if token.type == animation_dsl.Token.PERCENTAGE
|
||||
var percent_255 = token.get_numeric_value()
|
||||
assert(percent_255 != nil && percent_255 >= 0 && percent_255 <= 255, "Should convert percentage to 0-255 range")
|
||||
end
|
||||
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
assert(found_numeric, "Should recognize '" + value + "' as " + animation_dsl.Token.to_string(expected_type))
|
||||
assert(found_numeric, "Should recognize '" + value + "' as " + animation_dsl.Token.names[expected_type])
|
||||
end
|
||||
|
||||
print("✓ Numeric tokenization test passed")
|
||||
@ -149,11 +150,11 @@ def test_keyword_recognition()
|
||||
]
|
||||
|
||||
for keyword : keywords
|
||||
var lexer = animation_dsl.DSLLexer(keyword + " test")
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(keyword + " test")
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) >= 2, "Should have at least 2 tokens")
|
||||
assert(tokens[0].type == animation_dsl.Token.KEYWORD, "'" + keyword + "' should be recognized as keyword")
|
||||
assert(tokens[0].type == 0 #-animation_dsl.Token.KEYWORD-#, "'" + keyword + "' should be recognized as keyword")
|
||||
assert(tokens[0].value == keyword, "Keyword value should match")
|
||||
end
|
||||
|
||||
@ -166,41 +167,41 @@ def test_operators_and_delimiters()
|
||||
print("Testing operators and delimiters...")
|
||||
|
||||
var operator_tests = [
|
||||
["=", animation_dsl.Token.ASSIGN],
|
||||
["==", animation_dsl.Token.EQUAL],
|
||||
["!=", animation_dsl.Token.NOT_EQUAL],
|
||||
["<", animation_dsl.Token.LESS_THAN],
|
||||
["<=", animation_dsl.Token.LESS_EQUAL],
|
||||
[">", animation_dsl.Token.GREATER_THAN],
|
||||
[">=", animation_dsl.Token.GREATER_EQUAL],
|
||||
["&&", animation_dsl.Token.LOGICAL_AND],
|
||||
["||", animation_dsl.Token.LOGICAL_OR],
|
||||
["!", animation_dsl.Token.LOGICAL_NOT],
|
||||
["+", animation_dsl.Token.PLUS],
|
||||
["-", animation_dsl.Token.MINUS],
|
||||
["*", animation_dsl.Token.MULTIPLY],
|
||||
["/", animation_dsl.Token.DIVIDE],
|
||||
["%", animation_dsl.Token.MODULO],
|
||||
["^", animation_dsl.Token.POWER],
|
||||
["(", animation_dsl.Token.LEFT_PAREN],
|
||||
[")", animation_dsl.Token.RIGHT_PAREN],
|
||||
["{", animation_dsl.Token.LEFT_BRACE],
|
||||
["}", animation_dsl.Token.RIGHT_BRACE],
|
||||
["[", animation_dsl.Token.LEFT_BRACKET],
|
||||
["]", animation_dsl.Token.RIGHT_BRACKET],
|
||||
[",", animation_dsl.Token.COMMA],
|
||||
[";", animation_dsl.Token.SEMICOLON],
|
||||
[":", animation_dsl.Token.COLON],
|
||||
[".", animation_dsl.Token.DOT],
|
||||
["->", animation_dsl.Token.ARROW]
|
||||
["=", 8 #-animation_dsl.Token.ASSIGN-#],
|
||||
["==", 15 #-animation_dsl.Token.EQUAL-#],
|
||||
["!=", 16 #-animation_dsl.Token.NOT_EQUAL-#],
|
||||
["<", 17 #-animation_dsl.Token.LESS_THAN-#],
|
||||
["<=", 18 #-animation_dsl.Token.LESS_EQUAL-#],
|
||||
[">", 19 #-animation_dsl.Token.GREATER_THAN-#],
|
||||
[">=", 20 #-animation_dsl.Token.GREATER_EQUAL-#],
|
||||
["&&", 21 #-animation_dsl.Token.LOGICAL_AND-#],
|
||||
["||", 22 #-animation_dsl.Token.LOGICAL_OR-#],
|
||||
["!", 23 #-animation_dsl.Token.LOGICAL_NOT-#],
|
||||
["+", 9 #-animation_dsl.Token.PLUS-#],
|
||||
["-", 10 #-animation_dsl.Token.MINUS-#],
|
||||
["*", 11 #-animation_dsl.Token.MULTIPLY-#],
|
||||
["/", 12 #-animation_dsl.Token.DIVIDE-#],
|
||||
["%", 13 #-animation_dsl.Token.MODULO-#],
|
||||
["^", 14 #-animation_dsl.Token.POWER-#],
|
||||
["(", 24 #-animation_dsl.Token.LEFT_PAREN-#],
|
||||
[")", 25 #-animation_dsl.Token.RIGHT_PAREN-#],
|
||||
["{", 26 #-animation_dsl.Token.LEFT_BRACE-#],
|
||||
["}", 27 #-animation_dsl.Token.RIGHT_BRACE-#],
|
||||
["[", 28 #-animation_dsl.Token.LEFT_BRACKET-#],
|
||||
["]", 29 #-animation_dsl.Token.RIGHT_BRACKET-#],
|
||||
[",", 30 #-animation_dsl.Token.COMMA-#],
|
||||
[";", 31 #-animation_dsl.Token.SEMICOLON-#],
|
||||
[":", 32 #-animation_dsl.Token.COLON-#],
|
||||
[".", 33 #-animation_dsl.Token.DOT-#],
|
||||
["->", 34 #-animation_dsl.Token.ARROW-#]
|
||||
]
|
||||
|
||||
for test : operator_tests
|
||||
var op = test[0]
|
||||
var expected_type = test[1]
|
||||
|
||||
var lexer = animation_dsl.DSLLexer("a " + op + " b")
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer("a " + op + " b")
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
var found_operator = false
|
||||
for token : tokens
|
||||
@ -210,7 +211,7 @@ def test_operators_and_delimiters()
|
||||
end
|
||||
end
|
||||
|
||||
assert(found_operator, "Should recognize '" + op + "' as " + animation_dsl.Token.to_string(expected_type))
|
||||
assert(found_operator, "Should recognize '" + op + "' as " + animation_dsl.Token.names[expected_type])
|
||||
end
|
||||
|
||||
print("✓ Operators and delimiters test passed")
|
||||
@ -228,12 +229,12 @@ def test_string_literals()
|
||||
]
|
||||
|
||||
for str_test : string_tests
|
||||
var lexer = animation_dsl.DSLLexer("text = " + str_test)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer("text = " + str_test)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
var found_string = false
|
||||
for token : tokens
|
||||
if token.type == animation_dsl.Token.STRING
|
||||
if token.type == 3 #-animation_dsl.Token.STRING-#
|
||||
found_string = true
|
||||
break
|
||||
end
|
||||
@ -245,8 +246,8 @@ def test_string_literals()
|
||||
|
||||
# Test unterminated string (should raise exception)
|
||||
try
|
||||
var lexer = animation_dsl.DSLLexer('text = "unterminated string')
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer('text = "unterminated string')
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
assert(false, "Unterminated string should raise exception")
|
||||
except "lexical_error" as e, msg
|
||||
# Expected - unterminated string should raise lexical_error
|
||||
@ -267,12 +268,12 @@ def test_variable_references()
|
||||
]
|
||||
|
||||
for var_test : var_tests
|
||||
var lexer = animation_dsl.DSLLexer("value = " + var_test)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer("value = " + var_test)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
var found_var_ref = false
|
||||
for token : tokens
|
||||
if token.type == animation_dsl.Token.VARIABLE_REF && token.value == var_test
|
||||
if token.type == 36 #-animation_dsl.Token.VARIABLE_REF-# && token.value == var_test
|
||||
found_var_ref = true
|
||||
break
|
||||
end
|
||||
@ -285,8 +286,8 @@ def test_variable_references()
|
||||
var invalid_tests = ["$123", "$"]
|
||||
for invalid_test : invalid_tests
|
||||
try
|
||||
var lexer = animation_dsl.DSLLexer("value = " + invalid_test)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer("value = " + invalid_test)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
assert(false, "Invalid variable reference should raise exception: " + invalid_test)
|
||||
except "lexical_error" as e, msg
|
||||
# Expected - invalid variable reference should raise lexical_error
|
||||
@ -307,12 +308,12 @@ def test_comments()
|
||||
]
|
||||
|
||||
for comment_test : comment_tests
|
||||
var lexer = animation_dsl.DSLLexer(comment_test)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(comment_test)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
var found_comment = false
|
||||
for token : tokens
|
||||
if token.type == animation_dsl.Token.COMMENT
|
||||
if token.type == 37 #-animation_dsl.Token.COMMENT-#
|
||||
found_comment = true
|
||||
break
|
||||
end
|
||||
@ -355,15 +356,15 @@ def test_complex_dsl()
|
||||
"# Execution\n" +
|
||||
"run campfire"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(complex_dsl)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(complex_dsl)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) > 50, "Should have many tokens")
|
||||
|
||||
# Count token types
|
||||
var token_counts = {}
|
||||
for token : tokens
|
||||
var type_name = animation_dsl.Token.to_string(token.type)
|
||||
var type_name = animation_dsl.Token.names[token.type]
|
||||
if token_counts.contains(type_name)
|
||||
token_counts[type_name] += 1
|
||||
else
|
||||
@ -389,8 +390,8 @@ def test_error_handling()
|
||||
|
||||
# Test invalid characters (should raise exception)
|
||||
try
|
||||
var lexer1 = animation_dsl.DSLLexer("color red = @invalid")
|
||||
var tokens1 = lexer1.tokenize()
|
||||
var lexer1 = animation_dsl.create_lexer("color red = @invalid")
|
||||
var tokens1 = extract_all_tokens(lexer1)
|
||||
assert(false, "Invalid character should raise exception")
|
||||
except "lexical_error" as e, msg
|
||||
# Expected - invalid character should raise lexical_error
|
||||
@ -399,8 +400,8 @@ def test_error_handling()
|
||||
|
||||
# Test invalid hex color (should raise exception)
|
||||
try
|
||||
var lexer2 = animation_dsl.DSLLexer("color red = 0xGGGGGG")
|
||||
var tokens2 = lexer2.tokenize()
|
||||
var lexer2 = animation_dsl.create_lexer("color red = 0xGGGGGG")
|
||||
var tokens2 = extract_all_tokens(lexer2)
|
||||
assert(false, "Invalid hex color should raise exception")
|
||||
except "lexical_error" as e, msg
|
||||
# Expected - invalid hex color should raise lexical_error
|
||||
@ -409,8 +410,8 @@ def test_error_handling()
|
||||
|
||||
# Test unterminated string (should raise exception)
|
||||
try
|
||||
var lexer3 = animation_dsl.DSLLexer('text = "unterminated')
|
||||
var tokens3 = lexer3.tokenize()
|
||||
var lexer3 = animation_dsl.create_lexer('text = "unterminated')
|
||||
var tokens3 = extract_all_tokens(lexer3)
|
||||
assert(false, "Unterminated string should raise exception")
|
||||
except "lexical_error" as e, msg
|
||||
# Expected - unterminated string should raise lexical_error
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# DSL Lexer Triple Quotes Test Suite
|
||||
# Tests for triple-quoted string tokenization in DSLLexer
|
||||
# Tests for triple-quoted string tokenization in create_lexer
|
||||
#
|
||||
# Command to run test is:
|
||||
# ./berry -s -g -m lib/libesp32/berry_animation/src -e "import tasmota" lib/libesp32/berry_animation/src/tests/dsl_lexer_triple_quotes_test.be
|
||||
@ -7,18 +7,37 @@
|
||||
import animation_dsl
|
||||
import string
|
||||
|
||||
# Helper function to extract all tokens from a pull lexer (for testing only)
|
||||
def extract_all_tokens(lexer)
|
||||
var tokens = []
|
||||
lexer.reset() # Start from beginning
|
||||
|
||||
while !lexer.at_end()
|
||||
var token = lexer.next_token()
|
||||
|
||||
# EOF token removed - check for nil instead
|
||||
if token == nil
|
||||
break
|
||||
end
|
||||
|
||||
tokens.push(token)
|
||||
end
|
||||
|
||||
return tokens
|
||||
end
|
||||
|
||||
# Test basic triple-quoted string tokenization with double quotes
|
||||
def test_triple_quotes_double()
|
||||
print("Testing triple-quoted string tokenization (triple quotes)...")
|
||||
|
||||
var source = 'berry """\nHello World\n"""'
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) >= 3, "Should have at least 3 tokens (berry, string, EOF)")
|
||||
assert(tokens[0].type == animation_dsl.Token.KEYWORD, "First token should be KEYWORD")
|
||||
assert(size(tokens) >= 2, "Should have at least 2 tokens (berry, string)")
|
||||
assert(tokens[0].type == 0 #-animation_dsl.Token.KEYWORD-#, "First token should be KEYWORD")
|
||||
assert(tokens[0].value == "berry", "First token should be 'berry'")
|
||||
assert(tokens[1].type == animation_dsl.Token.STRING, "Second token should be STRING")
|
||||
assert(tokens[1].type == 3 #-animation_dsl.Token.STRING-#, "Second token should be STRING")
|
||||
assert(tokens[1].value == "\nHello World\n", "String content should be preserved")
|
||||
|
||||
print("✓ Triple-quoted string (double quotes) tokenization test passed")
|
||||
@ -30,13 +49,13 @@ def test_triple_quotes_single()
|
||||
print("Testing triple-quoted string tokenization (single quotes)...")
|
||||
|
||||
var source = "berry '''\nHello World\n'''"
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) >= 3, "Should have at least 3 tokens (berry, string, EOF)")
|
||||
assert(tokens[0].type == animation_dsl.Token.KEYWORD, "First token should be KEYWORD")
|
||||
assert(size(tokens) >= 2, "Should have at least 2 tokens (berry, string)")
|
||||
assert(tokens[0].type == 0 #-animation_dsl.Token.KEYWORD-#, "First token should be KEYWORD")
|
||||
assert(tokens[0].value == "berry", "First token should be 'berry'")
|
||||
assert(tokens[1].type == animation_dsl.Token.STRING, "Second token should be STRING")
|
||||
assert(tokens[1].type == 3 #-animation_dsl.Token.STRING-#, "Second token should be STRING")
|
||||
assert(tokens[1].value == "\nHello World\n", "String content should be preserved")
|
||||
|
||||
print("✓ Triple-quoted string (single quotes) tokenization test passed")
|
||||
@ -48,11 +67,11 @@ def test_multiline_triple_quotes()
|
||||
print("Testing multiline triple-quoted string tokenization...")
|
||||
|
||||
var source = 'berry """\nLine 1\nLine 2\nLine 3\n"""'
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) >= 3, "Should have at least 3 tokens")
|
||||
assert(tokens[1].type == animation_dsl.Token.STRING, "Second token should be STRING")
|
||||
assert(size(tokens) >= 2, "Should have at least 2 tokens")
|
||||
assert(tokens[1].type == 3 #-animation_dsl.Token.STRING-#, "Second token should be STRING")
|
||||
|
||||
var expected_content = "\nLine 1\nLine 2\nLine 3\n"
|
||||
assert(tokens[1].value == expected_content, f"String content should be '{expected_content}', got '{tokens[1].value}'")
|
||||
@ -66,11 +85,11 @@ def test_embedded_quotes()
|
||||
print("Testing triple-quoted string with embedded quotes...")
|
||||
|
||||
var source = 'berry """\nprint("Hello")\nvar x = \'world\'\n"""'
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) >= 3, "Should have at least 3 tokens")
|
||||
assert(tokens[1].type == animation_dsl.Token.STRING, "Second token should be STRING")
|
||||
assert(size(tokens) >= 2, "Should have at least 2 tokens")
|
||||
assert(tokens[1].type == 3 #-animation_dsl.Token.STRING-#, "Second token should be STRING")
|
||||
|
||||
var expected_content = '\nprint("Hello")\nvar x = \'world\'\n'
|
||||
assert(tokens[1].value == expected_content, f"String content should preserve embedded quotes")
|
||||
@ -84,11 +103,11 @@ def test_empty_triple_quotes()
|
||||
print("Testing empty triple-quoted string...")
|
||||
|
||||
var source = 'berry """"""'
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) >= 3, "Should have at least 3 tokens")
|
||||
assert(tokens[1].type == animation_dsl.Token.STRING, "Second token should be STRING")
|
||||
assert(size(tokens) >= 2, "Should have at least 2 tokens")
|
||||
assert(tokens[1].type == 3 #-animation_dsl.Token.STRING-#, "Second token should be STRING")
|
||||
assert(tokens[1].value == "", "Empty string should have empty value")
|
||||
|
||||
print("✓ Empty triple-quoted string test passed")
|
||||
@ -100,11 +119,11 @@ def test_unterminated_triple_quotes()
|
||||
print("Testing unterminated triple-quoted string...")
|
||||
|
||||
var source = 'berry """\nUnterminated string'
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
|
||||
# Should raise an exception for unterminated string
|
||||
# Should raise an exception when trying to extract tokens (pull-mode lexer)
|
||||
try
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer) # This should trigger the error
|
||||
assert(false, "Should raise exception for unterminated triple-quoted string")
|
||||
except "lexical_error" as e, msg
|
||||
# Expected - unterminated string should raise lexical_error
|
||||
@ -129,11 +148,11 @@ def test_complex_content()
|
||||
'print("Result:", result)\n' +
|
||||
'"""'
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
assert(size(tokens) >= 3, "Should have at least 3 tokens")
|
||||
assert(tokens[1].type == animation_dsl.Token.STRING, "Second token should be STRING")
|
||||
assert(size(tokens) >= 2, "Should have at least 2 tokens")
|
||||
assert(tokens[1].type == 3 #-animation_dsl.Token.STRING-#, "Second token should be STRING")
|
||||
|
||||
var content = tokens[1].value
|
||||
assert(string.find(content, "import math") >= 0, "Should contain import statement")
|
||||
@ -150,13 +169,13 @@ def test_mixed_quote_types()
|
||||
print("Testing that triple quotes don't interfere with regular quotes...")
|
||||
|
||||
var source = 'color red = 0xFF0000\nberry """\ntest\n"""\nvar x = "normal string"'
|
||||
var lexer = animation_dsl.DSLLexer(source)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(source)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
# Find the normal string token
|
||||
var normal_string_found = false
|
||||
for token : tokens
|
||||
if token.type == animation_dsl.Token.STRING && token.value == "normal string"
|
||||
if token.type == 3 #-animation_dsl.Token.STRING-# && token.value == "normal string"
|
||||
normal_string_found = true
|
||||
break
|
||||
end
|
||||
|
||||
@ -1,253 +0,0 @@
|
||||
# DSL Runtime Integration Test
|
||||
# Tests the complete DSL execution lifecycle and file loading
|
||||
#
|
||||
# Command to run test is:
|
||||
# ./berry -s -g -m lib/libesp32/berry_animation -e "import tasmota" lib/libesp32/berry_animation/tests/dsl_runtime_test.be
|
||||
|
||||
import string
|
||||
import animation
|
||||
import animation_dsl
|
||||
|
||||
def test_dsl_runtime()
|
||||
print("=== DSL Runtime Integration Test ===")
|
||||
|
||||
# Create strip and runtime
|
||||
var strip = global.Leds(30)
|
||||
var runtime = animation_dsl.create_runtime(strip, true) # Debug mode enabled
|
||||
|
||||
var tests_passed = 0
|
||||
var tests_total = 0
|
||||
|
||||
# Test 1: Basic DSL loading and execution
|
||||
tests_total += 1
|
||||
print("\nTest 1: Basic DSL loading")
|
||||
|
||||
var simple_dsl =
|
||||
"# strip length 30 # TEMPORARILY DISABLED\n"
|
||||
"color custom_red = 0xFF0000\n"
|
||||
"animation red_anim = pulsating_animation(color=static_color(color=custom_red), period=2s)\n"
|
||||
"sequence demo {\n"
|
||||
" play red_anim for 1s\n"
|
||||
"}\n"
|
||||
"run demo"
|
||||
|
||||
if runtime.load_dsl(simple_dsl)
|
||||
print("✓ Basic DSL loading successful")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ Basic DSL loading failed")
|
||||
end
|
||||
|
||||
# Test 2: Reload functionality
|
||||
tests_total += 1
|
||||
print("\nTest 2: Reload functionality")
|
||||
|
||||
# Load same DSL again - should work without issues
|
||||
if runtime.load_dsl(simple_dsl)
|
||||
print("✓ DSL reload successful")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ DSL reload failed")
|
||||
end
|
||||
|
||||
# Test 3: Generated code inspection
|
||||
tests_total += 1
|
||||
print("\nTest 3: Generated code inspection")
|
||||
|
||||
try
|
||||
var generated_code = runtime.get_generated_code(simple_dsl)
|
||||
if generated_code != nil && size(generated_code) > 0
|
||||
print("✓ Generated code available")
|
||||
print(f"Generated code length: {size(generated_code)} characters")
|
||||
|
||||
# Check for expected content
|
||||
if string.find(generated_code, "import animation") >= 0 &&
|
||||
string.find(generated_code, "var custom_red_") >= 0
|
||||
print("✓ Generated code contains expected elements")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ Generated code missing expected elements")
|
||||
print("Generated code preview:")
|
||||
print(generated_code[0..200] + "...")
|
||||
end
|
||||
else
|
||||
print("✗ Generated code not available")
|
||||
end
|
||||
except "dsl_compilation_error" as e, msg
|
||||
print("✗ Generated code compilation failed: " + msg)
|
||||
end
|
||||
|
||||
# Test 4: Error handling
|
||||
tests_total += 1
|
||||
print("\nTest 4: Error handling")
|
||||
|
||||
var invalid_dsl = "color invalid_syntax = \n" +
|
||||
"animation broken = unknown_function(param=value)"
|
||||
|
||||
if !runtime.load_dsl(invalid_dsl)
|
||||
print("✓ Error handling working - invalid DSL rejected")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ Error handling failed - invalid DSL accepted")
|
||||
end
|
||||
|
||||
# Test 5: DSL reload functionality
|
||||
tests_total += 1
|
||||
print("\nTest 5: DSL reload functionality")
|
||||
|
||||
if runtime.reload_dsl()
|
||||
print("✓ DSL reload successful")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ DSL reload failed")
|
||||
end
|
||||
|
||||
# Test 6: Multiple DSL sources
|
||||
tests_total += 1
|
||||
print("\nTest 6: Multiple DSL sources")
|
||||
|
||||
var dsl1 =
|
||||
"# strip length 30 # TEMPORARILY DISABLED\n" +
|
||||
"color custom_blue = 0x0000FF\n" +
|
||||
"animation blue_anim = pulsating_animation(color=static_color(color=custom_blue), period=2s)\n" +
|
||||
"sequence blue_demo {\n" +
|
||||
" play blue_anim for 1s\n" +
|
||||
"}\n" +
|
||||
"run blue_demo"
|
||||
|
||||
var dsl2 =
|
||||
"# strip length 30 # TEMPORARILY DISABLED\n" +
|
||||
"color custom_green = 0x00FF00\n" +
|
||||
"animation green_anim = pulsating_animation(color=static_color(color=custom_green), period=2s)\n" +
|
||||
"sequence green_demo {\n" +
|
||||
" play green_anim for 1s\n" +
|
||||
"}\n" +
|
||||
"run green_demo"
|
||||
|
||||
if runtime.load_dsl(dsl1) && runtime.load_dsl(dsl2)
|
||||
print("✓ Multiple DSL sources loaded successfully")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ Failed to load multiple DSL sources")
|
||||
end
|
||||
|
||||
# Test 7: Runtime state management
|
||||
tests_total += 1
|
||||
print("\nTest 7: Runtime state management")
|
||||
|
||||
if runtime.is_loaded() && runtime.get_active_source() != nil
|
||||
print("✓ Runtime state management working")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ Runtime state management failed")
|
||||
end
|
||||
|
||||
# Test 8: engine access
|
||||
tests_total += 1
|
||||
print("\nTest 8: engine access")
|
||||
|
||||
var engine = runtime.get_engine()
|
||||
if engine != nil
|
||||
print("✓ Engine access working")
|
||||
tests_passed += 1
|
||||
else
|
||||
print("✗ engine access failed")
|
||||
end
|
||||
|
||||
# Final results
|
||||
print(f"\n=== DSL Runtime Test Results ===")
|
||||
print(f"Tests passed: {tests_passed}/{tests_total}")
|
||||
print(f"Success rate: {tests_passed * 100 / tests_total}%")
|
||||
|
||||
if tests_passed == tests_total
|
||||
print("🎉 All DSL Runtime tests passed!")
|
||||
return true
|
||||
else
|
||||
print("❌ Some DSL Runtime tests failed")
|
||||
raise "test_failed"
|
||||
end
|
||||
end
|
||||
|
||||
def test_dsl_file_operations()
|
||||
print("\n=== DSL File Operations Test ===")
|
||||
|
||||
# Create a test DSL file
|
||||
var test_filename = "/tmp/test_animation.dsl"
|
||||
var test_dsl_content = "# strip length 20 # TEMPORARILY DISABLED\n" +
|
||||
"color custom_purple = 0x800080\n" +
|
||||
"animation purple_anim = pulsating_animation(color=static_color(color=custom_purple), period=2s)\n" +
|
||||
"sequence file_test {\n" +
|
||||
" play purple_anim for 2s\n" +
|
||||
"}\n" +
|
||||
"run file_test"
|
||||
|
||||
try
|
||||
# Write test file
|
||||
var file = open(test_filename, "w")
|
||||
if file != nil
|
||||
file.write(test_dsl_content)
|
||||
file.close()
|
||||
print(f"✓ Test file created: {test_filename}")
|
||||
|
||||
# Test file loading
|
||||
var strip = global.Leds(20)
|
||||
var runtime = animation_dsl.create_runtime(strip, true)
|
||||
|
||||
if runtime.load_dsl_file(test_filename)
|
||||
print("✓ DSL file loading successful")
|
||||
|
||||
# Verify content was loaded
|
||||
var active_source = runtime.get_active_source()
|
||||
if active_source != nil && string.find(active_source, "custom_purple") >= 0
|
||||
print("✓ File content loaded correctly")
|
||||
return true
|
||||
else
|
||||
print("✗ File content not loaded correctly")
|
||||
end
|
||||
else
|
||||
print("✗ DSL file loading failed")
|
||||
end
|
||||
else
|
||||
print("✗ Could not create test file")
|
||||
end
|
||||
|
||||
except .. as e, msg
|
||||
print(f"File operations test skipped: {msg}")
|
||||
return true # Skip file tests if filesystem not available
|
||||
end
|
||||
|
||||
return false
|
||||
end
|
||||
|
||||
# Run the tests
|
||||
def run_all_dsl_runtime_tests()
|
||||
print("Starting DSL Runtime Integration Tests...")
|
||||
|
||||
var basic_tests_passed = test_dsl_runtime()
|
||||
var file_tests_passed = test_dsl_file_operations()
|
||||
|
||||
print(f"\n=== Overall DSL Runtime Test Results ===")
|
||||
if basic_tests_passed
|
||||
print("✓ Core runtime tests: PASSED")
|
||||
else
|
||||
print("✗ Core runtime tests: FAILED")
|
||||
raise "failed_tests"
|
||||
end
|
||||
|
||||
if file_tests_passed
|
||||
print("✓ File operation tests: PASSED")
|
||||
else
|
||||
print("✓ File operation tests: SKIPPED (filesystem not available)")
|
||||
raise "failed_tests"
|
||||
end
|
||||
|
||||
return basic_tests_passed
|
||||
end
|
||||
|
||||
run_all_dsl_runtime_tests()
|
||||
|
||||
return {
|
||||
"test_dsl_runtime": test_dsl_runtime,
|
||||
"test_dsl_file_operations": test_dsl_file_operations,
|
||||
"run_all_dsl_runtime_tests": run_all_dsl_runtime_tests
|
||||
}
|
||||
@ -8,6 +8,25 @@ import animation
|
||||
import animation_dsl
|
||||
import string
|
||||
|
||||
# Helper function to extract all tokens from a pull lexer (for testing only)
|
||||
def extract_all_tokens(lexer)
|
||||
var tokens = []
|
||||
lexer.reset() # Start from beginning
|
||||
|
||||
while !lexer.at_end()
|
||||
var token = lexer.next_token()
|
||||
|
||||
# EOF token removed - check for nil instead
|
||||
if token == nil
|
||||
break
|
||||
end
|
||||
|
||||
tokens.push(token)
|
||||
end
|
||||
|
||||
return tokens
|
||||
end
|
||||
|
||||
# Test basic transpilation
|
||||
def test_basic_transpilation()
|
||||
print("Testing basic DSL transpilation...")
|
||||
@ -630,9 +649,8 @@ def test_forward_references()
|
||||
var compilation_failed = false
|
||||
|
||||
try
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
berry_code = transpiler.transpile()
|
||||
except "dsl_compilation_error" as e, msg
|
||||
compilation_failed = true
|
||||
@ -700,23 +718,12 @@ def test_complex_dsl()
|
||||
print("Complex DSL compilation failed - checking for specific issues...")
|
||||
|
||||
# Test individual components
|
||||
var lexer = animation_dsl.DSLLexer(complex_dsl)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(complex_dsl)
|
||||
|
||||
if lexer.has_errors()
|
||||
print("Lexical errors found:")
|
||||
print(lexer.get_error_report())
|
||||
else
|
||||
print("Lexical analysis passed")
|
||||
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
var result = transpiler.transpile()
|
||||
|
||||
if transpiler.has_errors()
|
||||
print("Transpilation errors found:")
|
||||
print(transpiler.get_error_report())
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
print("✓ Complex DSL test completed")
|
||||
@ -731,11 +738,13 @@ def test_transpiler_components()
|
||||
print("Testing basic transpiler instantiation...")
|
||||
|
||||
# Test token processing
|
||||
var lexer = animation_dsl.DSLLexer("color red = 0xFF0000")
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer("color red = 0xFF0000")
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
assert(size(tokens) >= 4, "Should have multiple tokens")
|
||||
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
# Reset lexer position before creating transpiler
|
||||
lexer.reset()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
assert(!transpiler.at_end(), "Should not be at end initially")
|
||||
|
||||
print("✓ Transpiler components test passed")
|
||||
|
||||
@ -4,6 +4,25 @@
|
||||
import animation
|
||||
import animation_dsl
|
||||
|
||||
# Helper function to extract all tokens from a pull lexer (for testing only)
|
||||
def extract_all_tokens(lexer)
|
||||
var tokens = []
|
||||
lexer.reset() # Start from beginning
|
||||
|
||||
while !lexer.at_end()
|
||||
var token = lexer.next_token()
|
||||
|
||||
# EOF token removed - check for nil instead
|
||||
if token == nil
|
||||
break
|
||||
end
|
||||
|
||||
tokens.push(token)
|
||||
end
|
||||
|
||||
return tokens
|
||||
end
|
||||
|
||||
# Test basic palette definition and compilation
|
||||
def test_palette_definition()
|
||||
print("Testing palette definition...")
|
||||
@ -359,12 +378,12 @@ def test_palette_keyword_recognition()
|
||||
print("Testing palette keyword recognition...")
|
||||
|
||||
var simple_palette_dsl = "palette test = [(0, #FF0000)]"
|
||||
var lexer = animation_dsl.DSLLexer(simple_palette_dsl)
|
||||
var tokens = lexer.tokenize()
|
||||
var lexer = animation_dsl.create_lexer(simple_palette_dsl)
|
||||
var tokens = extract_all_tokens(lexer)
|
||||
|
||||
var found_palette_keyword = false
|
||||
for token : tokens
|
||||
if token.type == animation_dsl.Token.KEYWORD && token.value == "palette"
|
||||
if token.type == 0 #-animation_dsl.Token.KEYWORD-# && token.value == "palette"
|
||||
found_palette_keyword = true
|
||||
break
|
||||
end
|
||||
|
||||
1015
lib/libesp32/berry_animation/src/tests/pull_lexer_test.be
Normal file
1015
lib/libesp32/berry_animation/src/tests/pull_lexer_test.be
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,158 @@
|
||||
# Test for Pull Lexer Interface with Transpiler
|
||||
# Verifies that the transpiler works correctly with both token array and pull lexer
|
||||
|
||||
import animation_dsl
|
||||
|
||||
def test_pull_lexer_basic()
|
||||
print("=== Testing Pull Lexer Basic Functionality ===")
|
||||
|
||||
var dsl_source = "# Simple DSL test\n" +
|
||||
"color my_red = 0xFF0000\n" +
|
||||
"animation pulse = pulsating_animation(color=my_red, period=2s)\n" +
|
||||
"run pulse"
|
||||
|
||||
# Test with new create_lexer interface (uses pull lexer internally)
|
||||
var pull_lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(pull_lexer)
|
||||
var berry_code = transpiler.transpile()
|
||||
|
||||
print("New create_lexer Result (using pull lexer internally):")
|
||||
print(berry_code)
|
||||
print()
|
||||
|
||||
# Test with direct pull lexer creation
|
||||
var direct_pull_lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var direct_transpiler = animation_dsl.SimpleDSLTranspiler(direct_pull_lexer)
|
||||
var direct_berry_code = direct_transpiler.transpile()
|
||||
|
||||
print("Direct Pull Lexer Result:")
|
||||
print(direct_berry_code)
|
||||
print()
|
||||
|
||||
# Compare results - they should be identical
|
||||
if berry_code == direct_berry_code
|
||||
print("✅ SUCCESS: create_lexer and direct pull lexer produce identical results")
|
||||
else
|
||||
print("❌ FAILURE: Results differ between create_lexer and direct pull lexer")
|
||||
print("Differences found!")
|
||||
end
|
||||
|
||||
return berry_code == direct_berry_code
|
||||
end
|
||||
|
||||
def test_pull_lexer_template_mode()
|
||||
print("=== Testing Pull Lexer Template Mode ===")
|
||||
|
||||
var template_source = "animation test = solid(color=red)\n" +
|
||||
"test.opacity = 200\n" +
|
||||
"run test"
|
||||
|
||||
# Test with template mode enabled
|
||||
var pull_lexer = animation_dsl.create_lexer(template_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(pull_lexer)
|
||||
|
||||
var berry_code = transpiler.transpile_template_body()
|
||||
|
||||
print("Template Body Result:")
|
||||
print(berry_code)
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
def test_pull_lexer_token_access()
|
||||
print("=== Testing Pull Lexer Token Access Methods ===")
|
||||
|
||||
var dsl_source = "color red = 0xFF0000"
|
||||
var pull_lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(pull_lexer)
|
||||
|
||||
print("Testing token access methods:")
|
||||
|
||||
# Test current()
|
||||
var current_token = transpiler.current()
|
||||
print(f"Current token: {current_token.tostring()}")
|
||||
|
||||
# Test peek()
|
||||
var next_token = transpiler.peek()
|
||||
print(f"Next token: {next_token.tostring()}")
|
||||
|
||||
# Test next()
|
||||
var consumed_token = transpiler.next()
|
||||
print(f"Consumed token: {consumed_token.tostring()}")
|
||||
|
||||
# Test current() after next()
|
||||
current_token = transpiler.current()
|
||||
print(f"Current token after next(): {current_token.tostring()}")
|
||||
|
||||
# Test at_end()
|
||||
print(f"At end: {transpiler.at_end()}")
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
def test_pull_lexer_position_info()
|
||||
print("=== Testing Pull Lexer Position Information ===")
|
||||
|
||||
var dsl_source = "color red = 0xFF0000\n" +
|
||||
"animation pulse = pulsating_animation(color=red)"
|
||||
|
||||
var pull_lexer = animation_dsl.create_lexer(dsl_source)
|
||||
|
||||
# Consume a few tokens
|
||||
pull_lexer.next_token() # color
|
||||
pull_lexer.next_token() # red
|
||||
pull_lexer.next_token() # =
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
def run_all_tests()
|
||||
print("Running Pull Lexer Transpiler Tests")
|
||||
print("=" * 50)
|
||||
|
||||
var tests = [
|
||||
test_pull_lexer_basic,
|
||||
test_pull_lexer_template_mode,
|
||||
test_pull_lexer_token_access,
|
||||
test_pull_lexer_position_info
|
||||
]
|
||||
|
||||
var passed = 0
|
||||
var total = size(tests)
|
||||
|
||||
for test_func : tests
|
||||
try
|
||||
if test_func()
|
||||
passed += 1
|
||||
print("✅ PASSED\n")
|
||||
else
|
||||
print("❌ FAILED\n")
|
||||
end
|
||||
except .. as e, msg
|
||||
print(f"❌ ERROR: {msg}\n")
|
||||
end
|
||||
end
|
||||
|
||||
print("=" * 50)
|
||||
print(f"Results: {passed}/{total} tests passed")
|
||||
|
||||
if passed == total
|
||||
print("🎉 All tests passed!")
|
||||
else
|
||||
print("⚠️ Some tests failed")
|
||||
raise "test_failed"
|
||||
end
|
||||
|
||||
return passed == total
|
||||
end
|
||||
|
||||
# Run tests when this file is executed directly
|
||||
run_all_tests()
|
||||
|
||||
return {
|
||||
"test_pull_lexer_basic": test_pull_lexer_basic,
|
||||
"test_pull_lexer_template_mode": test_pull_lexer_template_mode,
|
||||
"test_pull_lexer_token_access": test_pull_lexer_token_access,
|
||||
"test_pull_lexer_position_info": test_pull_lexer_position_info,
|
||||
"run_all_tests": run_all_tests
|
||||
}
|
||||
@ -16,9 +16,8 @@ def test_basic_symbol_registration()
|
||||
"animation solid_red = solid(color=custom_red)\n" +
|
||||
"animation red_anim = solid_red"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
|
||||
# Process the DSL
|
||||
var berry_code = transpiler.transpile()
|
||||
@ -44,9 +43,8 @@ def test_proper_symbol_ordering()
|
||||
var dsl_source = "color custom_red = 0xFF0000\n" +
|
||||
"animation fire_pattern = solid(color=custom_red)"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
|
||||
var berry_code = transpiler.transpile()
|
||||
|
||||
@ -70,9 +68,8 @@ def test_undefined_reference_handling()
|
||||
# DSL with undefined reference
|
||||
var dsl_source = "animation test_pattern = solid(color=undefined_color)"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
|
||||
# Should detect undefined reference at transpile time and raise exception
|
||||
try
|
||||
@ -96,9 +93,8 @@ def test_builtin_reference_handling()
|
||||
var dsl_source = "animation red_pattern = solid(color=red)\n" +
|
||||
"animation pulse_anim = pulsating_animation(color=red, period=2000)"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
|
||||
var berry_code = transpiler.transpile()
|
||||
|
||||
@ -120,9 +116,8 @@ def test_definition_generation()
|
||||
|
||||
var dsl_source = "color custom_blue = 0x0000FF"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
|
||||
var berry_code = transpiler.transpile()
|
||||
|
||||
@ -151,9 +146,8 @@ def test_complex_symbol_dependencies()
|
||||
"}\n" +
|
||||
"run demo"
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_source)
|
||||
var tokens = lexer.tokenize()
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_source)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
|
||||
var berry_code = transpiler.transpile()
|
||||
|
||||
|
||||
@ -108,6 +108,8 @@ def run_all_tests()
|
||||
|
||||
# DSL tests
|
||||
"lib/libesp32/berry_animation/src/tests/dsl_lexer_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/pull_lexer_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/pull_lexer_transpiler_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/token_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/global_variable_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/dsl_transpiler_test.be",
|
||||
@ -115,7 +117,6 @@ def run_all_tests()
|
||||
"lib/libesp32/berry_animation/src/tests/dsl_core_processing_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/simplified_transpiler_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/symbol_registry_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/dsl_runtime_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/nested_function_calls_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/user_functions_test.be",
|
||||
"lib/libesp32/berry_animation/src/tests/palette_dsl_test.be",
|
||||
|
||||
@ -7,24 +7,12 @@ import string
|
||||
def test_transpilation_case(dsl_code, expected_methods, test_name)
|
||||
print(f"\n Testing: {test_name}")
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_code)
|
||||
var tokens
|
||||
|
||||
try
|
||||
tokens = lexer.tokenize()
|
||||
except "lexical_error" as e, msg
|
||||
print(f" ❌ Lexer error: {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_code)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
var generated_code = transpiler.transpile()
|
||||
|
||||
if generated_code == nil
|
||||
print(" ❌ Transpilation failed:")
|
||||
for error : transpiler.errors
|
||||
print(f" {error}")
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
@ -64,24 +52,12 @@ end
|
||||
def test_non_math_functions(dsl_code)
|
||||
print("\n Testing: Non-math functions should NOT be prefixed with animation._math.")
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_code)
|
||||
var tokens
|
||||
|
||||
try
|
||||
tokens = lexer.tokenize()
|
||||
except "lexical_error" as e, msg
|
||||
print(f" ❌ Lexer error: {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_code)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
var generated_code = transpiler.transpile()
|
||||
|
||||
if generated_code == nil
|
||||
print(" ❌ Transpilation failed:")
|
||||
for error : transpiler.errors
|
||||
print(f" {error}")
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
@ -110,7 +86,8 @@ end
|
||||
def test_is_math_method_function()
|
||||
print("\nTesting is_math_method() function directly...")
|
||||
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler([])
|
||||
var dummy_lexer = animation_dsl.create_lexer("")
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(dummy_lexer)
|
||||
|
||||
# Test mathematical methods
|
||||
var math_methods = ["min", "max", "abs", "round", "sqrt", "scale", "sin", "cos"]
|
||||
|
||||
@ -10,24 +10,12 @@ import "user_functions" as user_funcs
|
||||
def test_transpilation_case(dsl_code, expected_user_function, test_name)
|
||||
print(f"\n Testing: {test_name}")
|
||||
|
||||
var lexer = animation_dsl.DSLLexer(dsl_code)
|
||||
var tokens
|
||||
|
||||
try
|
||||
tokens = lexer.tokenize()
|
||||
except "lexical_error" as e, msg
|
||||
print(f" ❌ Lexer error: {msg}")
|
||||
return false
|
||||
end
|
||||
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(tokens)
|
||||
var lexer = animation_dsl.create_lexer(dsl_code)
|
||||
var transpiler = animation_dsl.SimpleDSLTranspiler(lexer)
|
||||
var generated_code = transpiler.transpile()
|
||||
|
||||
if generated_code == nil
|
||||
print(" ❌ Transpilation failed:")
|
||||
for error : transpiler.errors
|
||||
print(f" {error}")
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
|
||||
@ -5,46 +5,18 @@ import string
|
||||
import animation
|
||||
import animation_dsl
|
||||
|
||||
# Test Token constants and utilities
|
||||
def test_token_type_constants()
|
||||
print("Testing Token constants...")
|
||||
|
||||
# Test that all constants are defined and unique
|
||||
var token_types = [
|
||||
animation_dsl.Token.KEYWORD, animation_dsl.Token.IDENTIFIER, animation_dsl.Token.NUMBER,
|
||||
animation_dsl.Token.STRING, animation_dsl.Token.COLOR, animation_dsl.Token.TIME,
|
||||
animation_dsl.Token.PERCENTAGE, animation_dsl.Token.MULTIPLIER, animation_dsl.Token.ASSIGN,
|
||||
animation_dsl.Token.PLUS, animation_dsl.Token.MINUS, animation_dsl.Token.MULTIPLY,
|
||||
animation_dsl.Token.DIVIDE, animation_dsl.Token.MODULO, animation_dsl.Token.POWER,
|
||||
animation_dsl.Token.EQUAL, animation_dsl.Token.NOT_EQUAL, animation_dsl.Token.LESS_THAN,
|
||||
animation_dsl.Token.LESS_EQUAL, animation_dsl.Token.GREATER_THAN, animation_dsl.Token.GREATER_EQUAL,
|
||||
animation_dsl.Token.LOGICAL_AND, animation_dsl.Token.LOGICAL_OR, animation_dsl.Token.LOGICAL_NOT,
|
||||
animation_dsl.Token.LEFT_PAREN, animation_dsl.Token.RIGHT_PAREN, animation_dsl.Token.LEFT_BRACE,
|
||||
animation_dsl.Token.RIGHT_BRACE, animation_dsl.Token.LEFT_BRACKET, animation_dsl.Token.RIGHT_BRACKET,
|
||||
animation_dsl.Token.COMMA, animation_dsl.Token.SEMICOLON, animation_dsl.Token.COLON,
|
||||
animation_dsl.Token.DOT, animation_dsl.Token.ARROW, animation_dsl.Token.NEWLINE,
|
||||
animation_dsl.Token.VARIABLE_REF, animation_dsl.Token.COMMENT, animation_dsl.Token.EOF,
|
||||
animation_dsl.Token.ERROR
|
||||
]
|
||||
|
||||
# Check that all values are different
|
||||
var seen = {}
|
||||
for token_type : token_types
|
||||
if seen.contains(token_type)
|
||||
print(f"ERROR: Duplicate token type value: {token_type}")
|
||||
return false
|
||||
end
|
||||
seen[token_type] = true
|
||||
# Local helper functions to replace removed Token methods
|
||||
# These replace the removed is_identifier(), is_type(), and is_keyword() methods from Token class
|
||||
def is_identifier(token, name)
|
||||
return token.type == 1 #-animation_dsl.Token.IDENTIFIER-# && token.value == name
|
||||
end
|
||||
|
||||
# Test to_string method
|
||||
assert(animation_dsl.Token.to_string(animation_dsl.Token.KEYWORD) == "KEYWORD")
|
||||
assert(animation_dsl.Token.to_string(animation_dsl.Token.IDENTIFIER) == "IDENTIFIER")
|
||||
assert(animation_dsl.Token.to_string(animation_dsl.Token.EOF) == "EOF")
|
||||
assert(animation_dsl.Token.to_string(999) == "UNKNOWN")
|
||||
def is_type(token, token_type)
|
||||
return token.type == token_type
|
||||
end
|
||||
|
||||
print("✓ Token constants test passed")
|
||||
return true
|
||||
def is_keyword(token, keyword)
|
||||
return token.type == 0 #-animation_dsl.Token.KEYWORD-# && token.value == keyword
|
||||
end
|
||||
|
||||
# Test Token class basic functionality
|
||||
@ -52,19 +24,19 @@ def test_token_basic()
|
||||
print("Testing Token basic functionality...")
|
||||
|
||||
# Test basic token creation
|
||||
var token = animation_dsl.Token(animation_dsl.Token.KEYWORD, "color", 1, 5, 5)
|
||||
assert(token.type == animation_dsl.Token.KEYWORD)
|
||||
var token = animation_dsl.Token(0 #-animation_dsl.Token.KEYWORD-#, "color", 1, 5, 5)
|
||||
assert(token.type == 0 #-animation_dsl.Token.KEYWORD-#)
|
||||
assert(token.value == "color")
|
||||
assert(token.line == 1)
|
||||
assert(token.column == 5)
|
||||
assert(token.length == 5)
|
||||
|
||||
# Test default length calculation
|
||||
var token2 = animation_dsl.Token(animation_dsl.Token.IDENTIFIER, "red", 2, 10)
|
||||
var token2 = animation_dsl.Token(1 #-animation_dsl.Token.IDENTIFIER-#, "red", 2, 10)
|
||||
assert(token2.length == 3) # Should default to size of "red"
|
||||
|
||||
# Test nil handling
|
||||
var token3 = animation_dsl.Token(animation_dsl.Token.EOF, nil, nil, nil)
|
||||
# Test nil handling (using ERROR token instead of removed EOF)
|
||||
var token3 = animation_dsl.Token(39 #-animation_dsl.Token.ERROR-#, nil, nil, nil)
|
||||
assert(token3.value == "")
|
||||
assert(token3.line == 1)
|
||||
assert(token3.column == 1)
|
||||
@ -77,42 +49,26 @@ end
|
||||
def test_token_type_checking()
|
||||
print("Testing Token type checking methods...")
|
||||
|
||||
var keyword_token = animation_dsl.Token(animation_dsl.Token.KEYWORD, "color", 1, 1)
|
||||
var identifier_token = animation_dsl.Token(animation_dsl.Token.IDENTIFIER, "red", 1, 1)
|
||||
var number_token = animation_dsl.Token(animation_dsl.Token.NUMBER, "123", 1, 1)
|
||||
var operator_token = animation_dsl.Token(animation_dsl.Token.PLUS, "+", 1, 1)
|
||||
var delimiter_token = animation_dsl.Token(animation_dsl.Token.LEFT_PAREN, "(", 1, 1)
|
||||
var separator_token = animation_dsl.Token(animation_dsl.Token.COMMA, ",", 1, 1)
|
||||
var keyword_token = animation_dsl.Token(0 #-animation_dsl.Token.KEYWORD-#, "color", 1, 1)
|
||||
var identifier_token = animation_dsl.Token(1 #-animation_dsl.Token.IDENTIFIER-#, "red", 1, 1)
|
||||
var number_token = animation_dsl.Token(2 #-animation_dsl.Token.NUMBER-#, "123", 1, 1)
|
||||
var operator_token = animation_dsl.Token(9 #-animation_dsl.Token.PLUS-#, "+", 1, 1)
|
||||
var delimiter_token = animation_dsl.Token(24 #-animation_dsl.Token.LEFT_PAREN-#, "(", 1, 1)
|
||||
var separator_token = animation_dsl.Token(30 #-animation_dsl.Token.COMMA-#, ",", 1, 1)
|
||||
|
||||
# Test is_type
|
||||
assert(keyword_token.is_type(animation_dsl.Token.KEYWORD))
|
||||
assert(!keyword_token.is_type(animation_dsl.Token.IDENTIFIER))
|
||||
assert(is_type(keyword_token, 0 #-animation_dsl.Token.KEYWORD-#))
|
||||
assert(!is_type(keyword_token, 1 #-animation_dsl.Token.IDENTIFIER-#))
|
||||
|
||||
# Test is_keyword
|
||||
assert(keyword_token.is_keyword("color"))
|
||||
assert(!keyword_token.is_keyword("red"))
|
||||
assert(!identifier_token.is_keyword("color"))
|
||||
assert(is_keyword(keyword_token, "color"))
|
||||
assert(!is_keyword(keyword_token, "red"))
|
||||
assert(!is_keyword(identifier_token, "color"))
|
||||
|
||||
# Test is_identifier
|
||||
assert(identifier_token.is_identifier("red"))
|
||||
assert(!identifier_token.is_identifier("blue"))
|
||||
assert(!keyword_token.is_identifier("red"))
|
||||
|
||||
# Test is_operator
|
||||
assert(operator_token.is_operator())
|
||||
assert(!keyword_token.is_operator())
|
||||
|
||||
# Test is_delimiter
|
||||
assert(delimiter_token.is_delimiter())
|
||||
assert(!keyword_token.is_delimiter())
|
||||
|
||||
# Test is_separator
|
||||
assert(separator_token.is_separator())
|
||||
assert(!keyword_token.is_separator())
|
||||
|
||||
# Test is_literal
|
||||
assert(number_token.is_literal())
|
||||
assert(!keyword_token.is_literal())
|
||||
assert(is_identifier(identifier_token, "red"))
|
||||
assert(!is_identifier(identifier_token, "blue"))
|
||||
assert(!is_identifier(keyword_token, "red"))
|
||||
|
||||
print("✓ Token type checking test passed")
|
||||
return true
|
||||
@ -123,53 +79,26 @@ def test_token_value_extraction()
|
||||
print("Testing Token value extraction methods...")
|
||||
|
||||
# Test boolean tokens
|
||||
var true_token = animation_dsl.Token(animation_dsl.Token.KEYWORD, "true", 1, 1)
|
||||
var false_token = animation_dsl.Token(animation_dsl.Token.KEYWORD, "false", 1, 1)
|
||||
var other_token = animation_dsl.Token(animation_dsl.Token.KEYWORD, "color", 1, 1)
|
||||
|
||||
assert(true_token.is_boolean())
|
||||
assert(false_token.is_boolean())
|
||||
assert(!other_token.is_boolean())
|
||||
|
||||
assert(true_token.get_boolean_value() == true)
|
||||
assert(false_token.get_boolean_value() == false)
|
||||
assert(other_token.get_boolean_value() == nil)
|
||||
var true_token = animation_dsl.Token(0 #-animation_dsl.Token.KEYWORD-#, "true", 1, 1)
|
||||
var false_token = animation_dsl.Token(0 #-animation_dsl.Token.KEYWORD-#, "false", 1, 1)
|
||||
var other_token = animation_dsl.Token(0 #-animation_dsl.Token.KEYWORD-#, "color", 1, 1)
|
||||
|
||||
# Test numeric tokens
|
||||
var number_token = animation_dsl.Token(animation_dsl.Token.NUMBER, "123.45", 1, 1)
|
||||
var time_token = animation_dsl.Token(animation_dsl.Token.TIME, "2s", 1, 1)
|
||||
var percent_token = animation_dsl.Token(animation_dsl.Token.PERCENTAGE, "50%", 1, 1)
|
||||
var multiplier_token = animation_dsl.Token(animation_dsl.Token.MULTIPLIER, "2.5x", 1, 1)
|
||||
|
||||
assert(number_token.is_numeric())
|
||||
assert(time_token.is_numeric())
|
||||
assert(percent_token.is_numeric())
|
||||
assert(multiplier_token.is_numeric())
|
||||
|
||||
assert(number_token.get_numeric_value() == 123) # Converted to int
|
||||
assert(time_token.get_numeric_value() == 2000) # 2s = 2000ms (already int)
|
||||
assert(percent_token.get_numeric_value() == 127 || percent_token.get_numeric_value() == 128) # 50% = ~127-128 in 0-255 range
|
||||
assert(multiplier_token.get_numeric_value() == 640) # 2.5x = 2.5 * 256 = 640
|
||||
var number_token = animation_dsl.Token(2 #-animation_dsl.Token.NUMBER-#, "123.45", 1, 1)
|
||||
var time_token = animation_dsl.Token(5 #-animation_dsl.Token.TIME-#, "2s", 1, 1)
|
||||
var percent_token = animation_dsl.Token(6 #-animation_dsl.Token.PERCENTAGE-#, "50%", 1, 1)
|
||||
var multiplier_token = animation_dsl.Token(7 #-animation_dsl.Token.MULTIPLIER-#, "2.5x", 1, 1)
|
||||
|
||||
# Test time conversion
|
||||
var ms_token = animation_dsl.Token(animation_dsl.Token.TIME, "500ms", 1, 1)
|
||||
var s_token = animation_dsl.Token(animation_dsl.Token.TIME, "3s", 1, 1)
|
||||
var m_token = animation_dsl.Token(animation_dsl.Token.TIME, "2m", 1, 1)
|
||||
var h_token = animation_dsl.Token(animation_dsl.Token.TIME, "1h", 1, 1)
|
||||
|
||||
assert(ms_token.get_numeric_value() == 500)
|
||||
assert(s_token.get_numeric_value() == 3000)
|
||||
assert(m_token.get_numeric_value() == 120000)
|
||||
assert(h_token.get_numeric_value() == 3600000)
|
||||
var ms_token = animation_dsl.Token(5 #-animation_dsl.Token.TIME-#, "500ms", 1, 1)
|
||||
var s_token = animation_dsl.Token(5 #-animation_dsl.Token.TIME-#, "3s", 1, 1)
|
||||
var m_token = animation_dsl.Token(5 #-animation_dsl.Token.TIME-#, "2m", 1, 1)
|
||||
var h_token = animation_dsl.Token(5 #-animation_dsl.Token.TIME-#, "1h", 1, 1)
|
||||
|
||||
# Test percentage to 255 conversion
|
||||
var percent_0 = animation_dsl.Token(animation_dsl.Token.PERCENTAGE, "0%", 1, 1)
|
||||
var percent_50 = animation_dsl.Token(animation_dsl.Token.PERCENTAGE, "50%", 1, 1)
|
||||
var percent_100 = animation_dsl.Token(animation_dsl.Token.PERCENTAGE, "100%", 1, 1)
|
||||
|
||||
assert(percent_0.get_numeric_value() == 0)
|
||||
assert(percent_50.get_numeric_value() == 127 || percent_50.get_numeric_value() == 128) # Allow rounding
|
||||
assert(percent_100.get_numeric_value() == 255)
|
||||
var percent_0 = animation_dsl.Token(6 #-animation_dsl.Token.PERCENTAGE-#, "0%", 1, 1)
|
||||
var percent_50 = animation_dsl.Token(6 #-animation_dsl.Token.PERCENTAGE-#, "50%", 1, 1)
|
||||
var percent_100 = animation_dsl.Token(6 #-animation_dsl.Token.PERCENTAGE-#, "100%", 1, 1)
|
||||
|
||||
print("✓ Token value extraction test passed")
|
||||
return true
|
||||
@ -179,38 +108,13 @@ end
|
||||
def test_token_utilities()
|
||||
print("Testing Token utility methods...")
|
||||
|
||||
var token = animation_dsl.Token(animation_dsl.Token.IDENTIFIER, "test", 5, 10, 4)
|
||||
|
||||
# Test end_column
|
||||
assert(token.end_column() == 13) # 10 + 4 - 1
|
||||
|
||||
# Test with_type
|
||||
var new_token = token.with_type(animation_dsl.Token.KEYWORD)
|
||||
assert(new_token.type == animation_dsl.Token.KEYWORD)
|
||||
assert(new_token.value == "test")
|
||||
assert(new_token.line == 5)
|
||||
assert(new_token.column == 10)
|
||||
|
||||
# Test with_value
|
||||
var new_token2 = token.with_value("newvalue")
|
||||
assert(new_token2.type == animation_dsl.Token.IDENTIFIER)
|
||||
assert(new_token2.value == "newvalue")
|
||||
assert(new_token2.length == 8) # size of "newvalue"
|
||||
var token = animation_dsl.Token(1 #-animation_dsl.Token.IDENTIFIER-#, "test", 5, 10, 4)
|
||||
|
||||
# Test expression checking
|
||||
var literal_token = animation_dsl.Token(animation_dsl.Token.NUMBER, "123", 1, 1)
|
||||
var identifier_token = animation_dsl.Token(animation_dsl.Token.IDENTIFIER, "test", 1, 1)
|
||||
var paren_token = animation_dsl.Token(animation_dsl.Token.LEFT_PAREN, "(", 1, 1)
|
||||
var keyword_token = animation_dsl.Token(animation_dsl.Token.KEYWORD, "color", 1, 1)
|
||||
|
||||
assert(literal_token.can_start_expression())
|
||||
assert(identifier_token.can_start_expression())
|
||||
assert(paren_token.can_start_expression())
|
||||
assert(!keyword_token.can_start_expression())
|
||||
|
||||
assert(literal_token.can_end_expression())
|
||||
assert(identifier_token.can_end_expression())
|
||||
assert(!paren_token.can_end_expression())
|
||||
var literal_token = animation_dsl.Token(2 #-animation_dsl.Token.NUMBER-#, "123", 1, 1)
|
||||
var identifier_token = animation_dsl.Token(1 #-animation_dsl.Token.IDENTIFIER-#, "test", 1, 1)
|
||||
var paren_token = animation_dsl.Token(24 #-animation_dsl.Token.LEFT_PAREN-#, "(", 1, 1)
|
||||
var keyword_token = animation_dsl.Token(0 #-animation_dsl.Token.KEYWORD-#, "color", 1, 1)
|
||||
|
||||
print("✓ Token utilities test passed")
|
||||
return true
|
||||
@ -220,10 +124,10 @@ end
|
||||
def test_token_string_representations()
|
||||
print("Testing Token string representations...")
|
||||
|
||||
var keyword_token = animation_dsl.Token(animation_dsl.Token.KEYWORD, "color", 1, 5)
|
||||
var eof_token = animation_dsl.Token(animation_dsl.Token.EOF, "", 10, 1)
|
||||
var error_token = animation_dsl.Token(animation_dsl.Token.ERROR, "Invalid character", 2, 8)
|
||||
var long_token = animation_dsl.Token(animation_dsl.Token.STRING, "This is a very long string that should be truncated", 3, 1)
|
||||
var keyword_token = animation_dsl.Token(0 #-animation_dsl.Token.KEYWORD-#, "color", 1, 5)
|
||||
# EOF token removed - use ERROR token for testing instead
|
||||
var error_token = animation_dsl.Token(39 #-animation_dsl.Token.ERROR-#, "Invalid character", 2, 8)
|
||||
var long_token = animation_dsl.Token(3 #-animation_dsl.Token.STRING-#, "This is a very long string that should be truncated", 3, 1)
|
||||
|
||||
# Test tostring
|
||||
var keyword_str = keyword_token.tostring()
|
||||
@ -231,18 +135,11 @@ def test_token_string_representations()
|
||||
assert(string.find(keyword_str, "color") != -1)
|
||||
assert(string.find(keyword_str, "1:5") != -1)
|
||||
|
||||
var eof_str = eof_token.tostring()
|
||||
assert(string.find(eof_str, "EOF") != -1)
|
||||
assert(string.find(eof_str, "10:1") != -1)
|
||||
# EOF token removed - skip EOF-specific string tests
|
||||
|
||||
var long_str = long_token.tostring()
|
||||
assert(string.find(long_str, "...") != -1) # Should be truncated
|
||||
|
||||
# Test to_error_string
|
||||
assert(keyword_token.to_error_string() == "keyword 'color'")
|
||||
assert(eof_token.to_error_string() == "end of file")
|
||||
assert(error_token.to_error_string() == "invalid token 'Invalid character'")
|
||||
|
||||
print("✓ Token string representations test passed")
|
||||
return true
|
||||
end
|
||||
@ -251,25 +148,7 @@ end
|
||||
def test_utility_functions()
|
||||
print("Testing utility functions...")
|
||||
|
||||
# Test create_eof_token
|
||||
var eof_token = animation_dsl.create_eof_token(5, 10)
|
||||
assert(eof_token.type == animation_dsl.Token.EOF)
|
||||
assert(eof_token.line == 5)
|
||||
assert(eof_token.column == 10)
|
||||
|
||||
# Test create_error_token
|
||||
var error_token = animation_dsl.create_error_token("Test error", 3, 7)
|
||||
assert(error_token.type == animation_dsl.Token.ERROR)
|
||||
assert(error_token.value == "Test error")
|
||||
assert(error_token.line == 3)
|
||||
assert(error_token.column == 7)
|
||||
|
||||
# Test create_newline_token
|
||||
var newline_token = animation_dsl.create_newline_token(2, 15)
|
||||
assert(newline_token.type == animation_dsl.Token.NEWLINE)
|
||||
assert(newline_token.value == "\n")
|
||||
assert(newline_token.line == 2)
|
||||
assert(newline_token.column == 15)
|
||||
# create_eof_token test removed - function deprecated with EOF token removal
|
||||
|
||||
# Test is_keyword
|
||||
assert(animation_dsl.is_keyword("color"))
|
||||
@ -290,19 +169,10 @@ def test_utility_functions()
|
||||
assert(!animation_dsl.is_color_name("my_color"))
|
||||
|
||||
# Test operator precedence
|
||||
var plus_token = animation_dsl.Token(animation_dsl.Token.PLUS, "+", 1, 1)
|
||||
var multiply_token = animation_dsl.Token(animation_dsl.Token.MULTIPLY, "*", 1, 1)
|
||||
var power_token = animation_dsl.Token(animation_dsl.Token.POWER, "^", 1, 1)
|
||||
var and_token = animation_dsl.Token(animation_dsl.Token.LOGICAL_AND, "&&", 1, 1)
|
||||
|
||||
assert(animation_dsl.get_operator_precedence(multiply_token) > animation_dsl.get_operator_precedence(plus_token))
|
||||
assert(animation_dsl.get_operator_precedence(power_token) > animation_dsl.get_operator_precedence(multiply_token))
|
||||
assert(animation_dsl.get_operator_precedence(plus_token) > animation_dsl.get_operator_precedence(and_token))
|
||||
|
||||
# Test associativity
|
||||
assert(animation_dsl.is_right_associative(power_token))
|
||||
assert(!animation_dsl.is_right_associative(plus_token))
|
||||
assert(!animation_dsl.is_right_associative(multiply_token))
|
||||
var plus_token = animation_dsl.Token(9 #-animation_dsl.Token.PLUS-#, "+", 1, 1)
|
||||
var multiply_token = animation_dsl.Token(11 #-animation_dsl.Token.MULTIPLY-#, "*", 1, 1)
|
||||
var power_token = animation_dsl.Token(14 #-animation_dsl.Token.POWER-#, "^", 1, 1)
|
||||
var and_token = animation_dsl.Token(21 #-animation_dsl.Token.LOGICAL_AND-#, "&&", 1, 1)
|
||||
|
||||
print("✓ Utility functions test passed")
|
||||
return true
|
||||
@ -313,7 +183,7 @@ def test_edge_cases()
|
||||
print("Testing edge cases...")
|
||||
|
||||
# Test empty values
|
||||
var empty_token = animation_dsl.Token(animation_dsl.Token.STRING, "", 1, 1)
|
||||
var empty_token = animation_dsl.Token(3 #-animation_dsl.Token.STRING-#, "", 1, 1)
|
||||
assert(empty_token.value == "")
|
||||
assert(empty_token.length == 0)
|
||||
|
||||
@ -322,24 +192,14 @@ def test_edge_cases()
|
||||
for i : 0..99
|
||||
long_value += "x"
|
||||
end
|
||||
var long_token = animation_dsl.Token(animation_dsl.Token.STRING, long_value, 1, 1)
|
||||
var long_token = animation_dsl.Token(3 #-animation_dsl.Token.STRING-#, long_value, 1, 1)
|
||||
assert(size(long_token.value) == 100)
|
||||
assert(long_token.length == 100)
|
||||
|
||||
# Test invalid time formats (should not crash)
|
||||
var invalid_time = animation_dsl.Token(animation_dsl.Token.TIME, "invalid", 1, 1)
|
||||
assert(invalid_time.get_numeric_value() == nil)
|
||||
|
||||
# Test invalid percentage formats
|
||||
var invalid_percent = animation_dsl.Token(animation_dsl.Token.PERCENTAGE, "invalid%", 1, 1)
|
||||
var invalid_percent = animation_dsl.Token(6 #-animation_dsl.Token.PERCENTAGE-#, "invalid%", 1, 1)
|
||||
# Should not crash, but may return nil or 0
|
||||
|
||||
# Test boundary values
|
||||
var zero_percent = animation_dsl.Token(animation_dsl.Token.PERCENTAGE, "0%", 1, 1)
|
||||
var max_percent = animation_dsl.Token(animation_dsl.Token.PERCENTAGE, "100%", 1, 1)
|
||||
assert(zero_percent.get_numeric_value() == 0)
|
||||
assert(max_percent.get_numeric_value() == 255)
|
||||
|
||||
print("✓ Edge cases test passed")
|
||||
return true
|
||||
end
|
||||
@ -349,7 +209,6 @@ def run_token_tests()
|
||||
print("=== Token System Test Suite ===")
|
||||
|
||||
var tests = [
|
||||
test_token_type_constants,
|
||||
test_token_basic,
|
||||
test_token_type_checking,
|
||||
test_token_value_extraction,
|
||||
|
||||
Loading…
Reference in New Issue
Block a user