WIP STUFF
This commit is contained in:
parent
de8c3d620a
commit
afc0738a31
10 changed files with 247 additions and 168 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -30,4 +30,4 @@ lytlang-*.tar
|
|||
*.swp
|
||||
|
||||
# Ignore compiled Erlang files
|
||||
/src/lytlang.erl
|
||||
/src/*.erl
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# Lytlang
|
||||
|
||||
Lytlang's goal is to enable you to write Elixir code more tersely and is itself
|
||||
written in Elixir. Its syntax is a terrible merging of Elm, CoffeeScript,
|
||||
Elixir, a sprinkling of Rust, and some made-up crap. Here's a rough preview:
|
||||
Lytlang is an opinionated way to write more readable Elixir code.
|
||||
|
||||
```
|
||||
!module Leetcode
|
||||
|
|
102
lib/lytlang.ex
102
lib/lytlang.ex
|
@ -1,31 +1,83 @@
|
|||
defmodule Lytlang do
|
||||
@moduledoc """
|
||||
Documentation for Lytlang.
|
||||
"""
|
||||
def eval(string) do
|
||||
{value, _} =
|
||||
string
|
||||
|> from_lytlang()
|
||||
|> IO.inspect()
|
||||
|> Code.eval_quoted()
|
||||
|> IO.inspect()
|
||||
|
||||
@doc """
|
||||
Hello world.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> Lytlang.hello()
|
||||
:world
|
||||
|
||||
"""
|
||||
def hello do
|
||||
:world
|
||||
value
|
||||
end
|
||||
|
||||
@doc """
|
||||
Echo.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> Lytlang.echo("world")
|
||||
"world"
|
||||
|
||||
"""
|
||||
def echo(s) do
|
||||
s
|
||||
def from_lytlang(string, binding \\ [], opts \\ []) do
|
||||
string |> String.to_charlist() |> tokenize() |> parse() |> transform(binding, opts)
|
||||
end
|
||||
|
||||
def tokenize(string) do
|
||||
{:ok, tokens, _} =
|
||||
:lytlang_lexer.string(string)
|
||||
|> IO.inspect()
|
||||
|
||||
tokens
|
||||
end
|
||||
|
||||
def parse(tokens) do
|
||||
{:ok, tree} =
|
||||
:lytlang_parser.parse(tokens)
|
||||
|> IO.inspect()
|
||||
|
||||
tree
|
||||
end
|
||||
|
||||
def transform(ast, binding \\ [], opts \\ [])
|
||||
|
||||
def transform([] = _expr_list, _binding, _opts), do: []
|
||||
|
||||
def transform([expr | rest] = _expr_list, binding, opts) do
|
||||
[transform(expr, binding, opts) | transform(rest, binding, opts)]
|
||||
end
|
||||
|
||||
def transform({:binary_op, _line, op, left, right}, binding, _) do
|
||||
{op, binding, [transform(left), transform(right)]}
|
||||
end
|
||||
|
||||
def transform({:unary_op, line, op, left}, _, _) do
|
||||
{:op, line, op, transform(left)}
|
||||
end
|
||||
|
||||
def transform({:integer, _, n} = _expr, _, _), do: n
|
||||
end
|
||||
|
||||
"""
|
||||
-module(elixir).
|
||||
-export([eval/1, from_elixir/1, from_erlang/1]).
|
||||
|
||||
eval(String) ->
|
||||
{value, Value, _} = erl_eval:expr(from_elixir(String), []),
|
||||
Value.
|
||||
|
||||
% Temporary to aid debugging
|
||||
from_elixir(String) ->
|
||||
transform(parse(String)).
|
||||
|
||||
% Temporary to aid debugging
|
||||
from_erlang(String) ->
|
||||
{ok, Tokens, _} = erl_scan:string(String),
|
||||
{ok, [Form]} = erl_parse:parse_exprs(Tokens),
|
||||
Form.
|
||||
|
||||
parse(String) ->
|
||||
{ok, Tokens, _} = elixir_lexer:string(String),
|
||||
{ok, ParseTree} = elixir_parser:parse(Tokens),
|
||||
ParseTree.
|
||||
|
||||
transform({ binary_op, Line, Op, Left, Right }) ->
|
||||
{op, Line, Op, transform(Left), transform(Right)};
|
||||
|
||||
transform({ unary_op, Line, Op, Right }) ->
|
||||
{op, Line, Op, transform(Right)};
|
||||
|
||||
|
||||
transform({ integer, _, _ } = Expr) -> Expr.
|
||||
"""
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
defmodule Lytlang.Parser do
|
||||
@doc """
|
||||
Attempts to tokenize an input string to start_tag, end_tag, and char
|
||||
"""
|
||||
@spec parse(binary) :: list
|
||||
def parse(input) do
|
||||
{:ok, tokens, _} = input |> to_char_list |> :lytlang.string()
|
||||
tokens
|
||||
end
|
||||
end
|
|
@ -1,95 +0,0 @@
|
|||
defmodule Lytlang.Transpiler do
|
||||
@moduledoc """
|
||||
Documentation for Lytlang.
|
||||
"""
|
||||
|
||||
@spec transpile_file(String.t()) :: String.t()
|
||||
def transpile_file(file_path) do
|
||||
file_path
|
||||
|> File.read!()
|
||||
|> transpile_block()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Transpiler!
|
||||
|
||||
## Examples
|
||||
|
||||
iex> Lytlang.Transpiler.transpile_block("!mod EmptyModule")
|
||||
"defmodule EmptyModule do\\nend"
|
||||
|
||||
iex> Lytlang.Transpiler.transpile_block("!mod EmptyModule\\n\\tfn hello\\n\\t\\t:world")
|
||||
"defmodule EmptyModule\\n\\tdef hello() do\\n\\t\\t:world\\n\\tend\\nend"
|
||||
|
||||
"""
|
||||
@spec transpile_block(String.t(), Keyword.t()) :: String.t()
|
||||
def transpile_block(s, _opts \\ []) do
|
||||
s
|
||||
|> split_lines()
|
||||
|> Enum.map(&tokenize/1)
|
||||
|> IO.inspect(label: "Tokens")
|
||||
|> build_ast()
|
||||
|> IO.inspect(label: "AST")
|
||||
|> ast_to_elixir()
|
||||
end
|
||||
|
||||
def initial_ast_state() do
|
||||
%{
|
||||
ast: {:__global, []}
|
||||
}
|
||||
end
|
||||
|
||||
def build_ast(token_lines, state \\ nil)
|
||||
|
||||
def build_ast([], state), do: state.ast |> Enum.reverse()
|
||||
|
||||
def build_ast([line_tokens | rest], state) do
|
||||
state =
|
||||
if !state do
|
||||
initial_ast_state()
|
||||
else
|
||||
state
|
||||
end
|
||||
|
||||
ast = state.ast
|
||||
|
||||
state =
|
||||
case line_tokens do
|
||||
["!mod", module_name | rest] ->
|
||||
%{state | ast: [{:module, module_name, []} | ast]}
|
||||
|
||||
["fn", fn_name | rest] ->
|
||||
%{state | ast: [{:function, fn_name, []} | ast]}
|
||||
|
||||
expr ->
|
||||
expr |> IO.inspect(label: "lyt expression")
|
||||
%{state | ast: [expr |> Enum.join(" ") | ast]}
|
||||
end
|
||||
|
||||
build_ast(rest, state)
|
||||
end
|
||||
|
||||
def ast_to_elixir(s, state \\ %{elixir_code: []})
|
||||
|
||||
def ast_to_elixir([], state),
|
||||
do: state.elixir_code |> Enum.filter(&(String.trim(&1) != "")) |> Enum.join("\n")
|
||||
|
||||
def ast_to_elixir([leaf | rest], state) do
|
||||
child = fn {head, tail}, state -> [head | ast_to_elixir(rest, state)] ++ [tail] end
|
||||
|
||||
case leaf do
|
||||
:global -> child.({"", ""}, state)
|
||||
{:module, mod, []} -> child.({"defmodule #{mod} do", "end"}, state)
|
||||
{:fn, fname, []} -> child.({"def #{fname} do", "end"}, state)
|
||||
expr when is_binary(expr) -> [expr]
|
||||
end
|
||||
end
|
||||
|
||||
def split_lines(s) do
|
||||
String.split(s, "\n", trim: true)
|
||||
end
|
||||
|
||||
def tokenize(s) do
|
||||
Regex.split(~r/\s/, s)
|
||||
end
|
||||
end
|
|
@ -1,16 +0,0 @@
|
|||
Definitions.
|
||||
|
||||
I = \t
|
||||
MODULE_DECL = module\s[a-zA-Z][a-zA-Z0-9_]*
|
||||
FILE_LEVEL_MODULE_DECL = !module\s([a-zA-Z][a-zA-Z0-9_]*)
|
||||
NUMBER = [0-9]+
|
||||
|
||||
Rules.
|
||||
|
||||
%% number
|
||||
{NUMBER} : {token, { number, TokenLine, list_to_integer(TokenChars) } }.
|
||||
{MODULE_DECL} : {token, { module_decl, TokenLine, TokenChars } }.
|
||||
{FILE_LEVEL_MODULE_DECL} : {token, { file_level_module_decl, TokenLine, TokenChars } }.
|
||||
[\s\n\r\t]+ : skip_token.
|
||||
|
||||
Erlang code.
|
44
src/lytlang_lexer.xrl
Normal file
44
src/lytlang_lexer.xrl
Normal file
|
@ -0,0 +1,44 @@
|
|||
% Lexer syntax for the Elixir language done with leex
|
||||
% Copyright (C) 2011 Jose Valim
|
||||
|
||||
Definitions.
|
||||
|
||||
D = [0-9]
|
||||
U = [A-Z]
|
||||
L = [a-z]
|
||||
WS = [\s]
|
||||
|
||||
Rules.
|
||||
|
||||
{D}+\.{D}+ : { token, { float, TokenLine, list_to_float(TokenChars) } }.
|
||||
{D}+ : { token, { integer, TokenLine, list_to_integer(TokenChars) } }.
|
||||
({L}|_)({U}{L}{D}|_)* : { token, var(TokenChars, TokenLine) }.
|
||||
|
||||
\+ : { token, { '+', TokenLine } }.
|
||||
- : { token, { '-', TokenLine } }.
|
||||
\* : { token, { '*', TokenLine } }.
|
||||
/ : { token, { '/', TokenLine } }.
|
||||
\( : { token, { '(', TokenLine } }.
|
||||
\) : { token, { ')', TokenLine } }.
|
||||
= : { token, { '=', TokenLine } }.
|
||||
-> : { token, { '->', TokenLine } }.
|
||||
; : { token, { ';', TokenLine } }.
|
||||
|
||||
{Comment} : skip_token.
|
||||
{WS}+ : skip_token.
|
||||
% ({Comment}|{Whitespace})*(\n({Comment}|{Whitespace})*)+ : { token, { eol, TokenLine } }.
|
||||
|
||||
Erlang code.
|
||||
|
||||
var(Chars, Line) ->
|
||||
Atom = list_to_atom(Chars),
|
||||
case reserved_word(Atom) of
|
||||
true -> {Atom, Line};
|
||||
false -> {var, Line, Atom}
|
||||
end.
|
||||
|
||||
reserved_word('nil') -> true;
|
||||
reserved_word('true') -> true;
|
||||
reserved_word('false') -> true;
|
||||
reserved_word('module') -> true;
|
||||
reserved_word(_) -> false.
|
90
src/lytlang_parser.yrl
Normal file
90
src/lytlang_parser.yrl
Normal file
|
@ -0,0 +1,90 @@
|
|||
Nonterminals
|
||||
grammar
|
||||
expr_list
|
||||
expr
|
||||
assign_expr
|
||||
add_expr
|
||||
mult_expr
|
||||
unary_expr
|
||||
fun_expr
|
||||
body
|
||||
stabber
|
||||
max_expr
|
||||
number
|
||||
unary_op
|
||||
add_op
|
||||
mult_op
|
||||
.
|
||||
|
||||
Terminals
|
||||
var float integer eol
|
||||
'+' '-' '*' '/' '(' ')' '=' '->'
|
||||
.
|
||||
|
||||
Rootsymbol grammar.
|
||||
|
||||
grammar -> expr_list : '$1'.
|
||||
grammar -> '$empty' : [].
|
||||
|
||||
expr_list -> eol : [].
|
||||
expr_list -> expr : ['$1'].
|
||||
expr_list -> expr eol : ['$1'].
|
||||
expr_list -> eol expr_list : '$2'.
|
||||
expr_list -> expr eol expr_list : ['$1'|'$3'].
|
||||
|
||||
expr -> assign_expr : '$1'.
|
||||
|
||||
assign_expr -> add_expr '=' assign_expr :
|
||||
{ match, ?line('$2'), '$1', '$3' }.
|
||||
|
||||
assign_expr -> add_expr : '$1'.
|
||||
|
||||
%% Arithmetic operations
|
||||
add_expr -> add_expr add_op mult_expr :
|
||||
{ binary_op, ?line('$1'), ?op('$2'), '$1', '$3' }.
|
||||
|
||||
add_expr -> mult_expr : '$1'.
|
||||
|
||||
mult_expr -> mult_expr mult_op unary_expr :
|
||||
{ binary_op, ?line('$1'), ?op('$2'), '$1', '$3' }.
|
||||
|
||||
mult_expr -> unary_expr : '$1'.
|
||||
|
||||
unary_expr -> unary_op max_expr :
|
||||
{ unary_op, ?line('$1'), ?op('$1'), '$2' }.
|
||||
|
||||
unary_expr -> max_expr : '$1'.
|
||||
|
||||
fun_expr -> stabber eol body :
|
||||
{ 'fn', ?line('$1')
|
||||
, { clauses, [ { clause, ?line('$1'), [], [], '$3' } ] }
|
||||
}.
|
||||
|
||||
fun_expr -> max_expr : '$1'.
|
||||
|
||||
%% Minimum expressions
|
||||
max_expr -> number : '$1'.
|
||||
max_expr -> '(' expr ')' : '$2'.
|
||||
|
||||
%% Numbers
|
||||
number -> float : '$1'.
|
||||
number -> integer : '$1'.
|
||||
|
||||
%% Unary operator
|
||||
unary_op -> '+' : '$1'.
|
||||
unary_op -> '-' : '$1'.
|
||||
|
||||
%% Addition operators
|
||||
add_op -> '+' : '$1'.
|
||||
add_op -> '-' : '$1'.
|
||||
|
||||
%% Multiplication operators
|
||||
mult_op -> '*' : '$1'.
|
||||
mult_op -> '/' : '$1'.
|
||||
|
||||
Erlang code.
|
||||
|
||||
-define(op(Node), element(1, Node)).
|
||||
-define(line(Node), element(2, Node)).
|
||||
-define(char(Node), element(3, Node)).
|
||||
|
|
@ -1,24 +1,28 @@
|
|||
mod SimpleApp
|
||||
entry
|
||||
:project
|
||||
deps:
|
||||
{:cowboy, "~> 2.6"}
|
||||
{:plug, "~> 1.8"}
|
||||
{:plug_cowboy, "~> 2.0"}
|
||||
:app
|
||||
[extra_applications: [:logger]]
|
||||
!project SimpleApp simple_app
|
||||
# should generate a SimpleApp.MixProject_ module
|
||||
version: "0.1.0"
|
||||
elixir: "~> 1.8"
|
||||
lytl: "~> 0.1"
|
||||
start_permanent: Mix.env() == prod
|
||||
extra_applications: [logger]
|
||||
deps: []
|
||||
|
||||
import Plug.Conn
|
||||
!module
|
||||
# uses project's name as module if not module name specified
|
||||
import Plug.Conn
|
||||
|
||||
fn init default_options
|
||||
IO.puts "initializing plug"
|
||||
default_options
|
||||
init = opts -> IO.puts "initializing plug"; opts
|
||||
|
||||
fn call conn:Plug.Conn.t() _options = []
|
||||
call = conn _options = [] ->
|
||||
IO.puts "calling plug"
|
||||
|
||||
conn
|
||||
|> put_resp_content_type "text/plain"
|
||||
|> send_resp 200 "Hello world"
|
||||
|
||||
long_fn =
|
||||
arg1
|
||||
arg2
|
||||
arg3
|
||||
->
|
||||
|
||||
# in lytx run `Plug.Adapters.Cowboy.http SimpleApp []`
|
||||
|
|
|
@ -1,9 +1,21 @@
|
|||
defmodule LytlangTest do
|
||||
use ExUnit.Case
|
||||
doctest Lytlang
|
||||
doctest Lytlang.Transpiler
|
||||
|
||||
test "greets the world" do
|
||||
assert Lytlang.hello() == :world
|
||||
test "arithmetic" do
|
||||
[
|
||||
{"2 + 3 + 8", 13},
|
||||
{"2 * 3 + 8", 14},
|
||||
{"2 + 3 * 8", 26},
|
||||
{"(2 + 3) * 8", 40},
|
||||
{"2 * (3 + 8)", 22},
|
||||
{"2 + (3 * 8)", 26},
|
||||
{"8 - 3", 5},
|
||||
{"8 / 4", 2.0},
|
||||
{"2 + 3", 5}
|
||||
]
|
||||
|> Enum.map(fn {string, val} ->
|
||||
assert Lytlang.eval(string) == val
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue