Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions lib/lua/lexer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,12 @@ defmodule Lua.Lexer do
do_tokenize(rest, [token | acc], pos)
end

# \z escape: skip all following whitespace
defp scan_string(<<?\\, ?z, rest::binary>>, str_acc, acc, pos, start_pos, quote) do
{remaining, new_pos} = skip_whitespace_in_string(rest, advance_column(pos, 2))
scan_string(remaining, str_acc, acc, new_pos, start_pos, quote)
end

defp scan_string(<<?\\, esc, rest::binary>>, str_acc, acc, pos, start_pos, quote) do
# Escape sequence
case escape_char(esc) do
Expand Down Expand Up @@ -371,6 +377,34 @@ defmodule Lua.Lexer do
defp escape_char(?'), do: {:ok, ?'}
defp escape_char(_), do: :error

# Helper for \z escape: skip all whitespace characters
defp skip_whitespace_in_string(<<?\s, rest::binary>>, pos) do
skip_whitespace_in_string(rest, advance_column(pos, 1))
end

defp skip_whitespace_in_string(<<?\t, rest::binary>>, pos) do
skip_whitespace_in_string(rest, advance_column(pos, 1))
end

defp skip_whitespace_in_string(<<?\n, rest::binary>>, pos) do
new_pos = %{line: pos.line + 1, column: 1, byte_offset: pos.byte_offset + 1}
skip_whitespace_in_string(rest, new_pos)
end

defp skip_whitespace_in_string(<<?\r, ?\n, rest::binary>>, pos) do
new_pos = %{line: pos.line + 1, column: 1, byte_offset: pos.byte_offset + 2}
skip_whitespace_in_string(rest, new_pos)
end

defp skip_whitespace_in_string(<<?\r, rest::binary>>, pos) do
new_pos = %{line: pos.line + 1, column: 1, byte_offset: pos.byte_offset + 1}
skip_whitespace_in_string(rest, new_pos)
end

defp skip_whitespace_in_string(rest, pos) do
{rest, pos}
end

# Scan long bracket for level: [[ or [=[ or [==[ etc.
defp scan_long_bracket(rest, equals) do
case rest do
Expand Down
16 changes: 15 additions & 1 deletion test/lua/lexer_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,21 @@ defmodule Lua.LexerTest do
assert {:error, {:unclosed_long_string, _}} = Lexer.tokenize("[[hello")
assert {:error, {:unclosed_long_string, _}} = Lexer.tokenize("[=[test")
end

test "handles \\z escape sequence (skip whitespace)" do
# \z skips all following whitespace including newlines
assert {:ok, [{:string, "abcdef", _}, {:eof, _}]} = Lexer.tokenize("\"abc\\z \n def\"")

# Multiple spaces and tabs - \z skips them all
assert {:ok, [{:string, "helloworld", _}, {:eof, _}]} =
Lexer.tokenize("\"hello\\z \t world\"")

# Multiple newlines
assert {:ok, [{:string, "abc", _}, {:eof, _}]} = Lexer.tokenize("\"abc\\z \n\n\n\"")

# With CRLF
assert {:ok, [{:string, "test", _}, {:eof, _}]} = Lexer.tokenize("\"test\\z\r\n\"")
end
end

describe "operators" do
Expand Down Expand Up @@ -553,7 +568,6 @@ defmodule Lua.LexerTest do
test "handles invalid escape sequences in strings" do
# Invalid escape sequences should be included as-is
assert {:ok, [{:string, "\\x", _}, {:eof, _}]} = Lexer.tokenize(~s("\\x"))
assert {:ok, [{:string, "\\z", _}, {:eof, _}]} = Lexer.tokenize(~s("\\z"))
assert {:ok, [{:string, "\\1", _}, {:eof, _}]} = Lexer.tokenize(~s("\\1"))
end

Expand Down
18 changes: 18 additions & 0 deletions test/lua_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,24 @@ defmodule LuaTest do
assert {["nested"], _lua} =
lua |> Lua.set!([:a, :b, :c], "nested") |> Lua.eval!("return a.b.c")
end

test "table constructors with semicolons", %{lua: lua} do
# Can retrieve values from tables with explicit fields using semicolons
code = """
t = {1, 2; n=2}
return t[1], t[2], t.n
"""

assert {[1, 2, 2], _lua} = Lua.eval!(lua, code)

# Mixed commas and semicolons
code = """
t = {1; 2, 3}
return t[1], t[2], t[3]
"""

assert {[1, 2, 3], _lua} = Lua.eval!(lua, code)
end
end

describe "inspect" do
Expand Down