Create g4 and tokenizer.

Add Lexer and Parser .g4.
Add tokenizer for REVERSED.
This commit is contained in:
mii
2021-12-19 21:53:52 +09:00
parent 883d4f9e0e
commit 0b0a5b8dcb
19 changed files with 2088 additions and 8 deletions

14
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,14 @@
{
"version": "1.0.0",
"configurations": [
{
"name": "Debug ANTLR4 grammar",
"type": "antlr-debug",
"request": "launch",
"input": "grammar/sample.lysa",
"grammar": "grammar/CatalysaParser.g4",
"printParseTree": true,
"visualParseTree": true
},
]
}

36
Cargo.lock generated
View File

@ -5,3 +5,39 @@ version = 3
[[package]]
name = "catalysa"
version = "0.1.0"
[[package]]
name = "catalysa-parser"
version = "0.1.0"
dependencies = [
"nom",
]
[[package]]
name = "memchr"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "nom"
version = "7.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b1d11e1ef389c76fe5b81bcaf2ea32cf88b62bc494e19f493d0b30e7a930109"
dependencies = [
"memchr",
"minimal-lexical",
"version_check",
]
[[package]]
name = "version_check"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"

View File

@ -1,8 +1,5 @@
[package]
name = "catalysa"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
[workspace]
members = [
"catalysa",
"catalysa-parser"
]

View File

@ -0,0 +1,9 @@
[package]
name = "catalysa-parser"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nom = "7"

View File

@ -0,0 +1,2 @@
pub mod tokenizer;
pub mod token;

View File

@ -0,0 +1,14 @@
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TokenKind {
RESERVED,
NUMBER,
IDENT,
TEXT,
}
#[derive(Clone, Debug)]
pub struct Token {
pub kind: TokenKind,
pub num: usize,
pub str: String
}

View File

@ -0,0 +1,75 @@
use nom::{IResult, branch::alt, bytes::complete::tag};
use crate::token::Token;
pub struct Tokenizer {
pub code: str
}
impl Tokenizer {
pub fn tokenize(&mut self) -> Vec<Token> {
let mut tokens: Vec<Token> = vec![];
let mut code = &self.code;
while code != "" {
}
vec![]
}
fn reserved(input: &str) -> IResult<&str, &str> {
alt((
alt((
tag("+="),
tag("-="),
tag("*="),
tag("/="),
tag("&&"),
tag("||"),
tag("&"),
tag("=="),
tag("="),
tag("!="),
tag(">="),
tag("<="),
tag(">"),
tag("<"),
tag(";"),
tag(":"),
tag(","),
tag("."),
tag("\""),
)),
alt((
tag("+"),
tag("-"),
tag("*"),
tag("/"),
tag("("),
tag(")"),
tag("{"),
tag("}"),
tag("->"),
tag("for"),
tag("f"),
tag("while"),
tag("if"),
tag("else"),
tag("let"),
tag("return")
))
))(input)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn reserved_test() {
assert_eq!(Tokenizer::reserved("+"), Ok(("", "+")));
assert_eq!(Tokenizer::reserved("=="), Ok(("", "==")));
assert_eq!(Tokenizer::reserved("="), Ok(("", "=")));
assert_eq!(Tokenizer::reserved("for"), Ok(("", "for")));
}
}

8
catalysa/Cargo.toml Normal file
View File

@ -0,0 +1,8 @@
[package]
name = "catalysa"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,187 @@
// Generated from h:\Git\catalysa\grammar\CatalysaLexer.g4 by ANTLR 4.8
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class CatalysaLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
WS=1, ADD=2, SUB=3, MUL=4, DIV=5, CONJ=6, OR=7, AND=8, EQ=9, EQEQ=10,
NE=11, BE=12, LE=13, BT=14, LT=15, SEMICOLON=16, COLON=17, COMMA=18, DOT=19,
QUOTE=20, ADD_ASSIGNMENT=21, SUB_ASSIGNMENT=22, MUL_ASSIGNMENT=23, DIV_ASSIGNMENT=24,
LPAREN=25, RPAREN=26, LCURL=27, RCURL=28, ARROW=29, F=30, FOR=31, WHILE=32,
IF=33, ELSE=34, LET=35, RETURN=36, NUM=37, TEXT=38, IDENT=39;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
public static String[] modeNames = {
"DEFAULT_MODE"
};
private static String[] makeRuleNames() {
return new String[] {
"WS", "ADD", "SUB", "MUL", "DIV", "CONJ", "OR", "AND", "EQ", "EQEQ",
"NE", "BE", "LE", "BT", "LT", "SEMICOLON", "COLON", "COMMA", "DOT", "QUOTE",
"ADD_ASSIGNMENT", "SUB_ASSIGNMENT", "MUL_ASSIGNMENT", "DIV_ASSIGNMENT",
"LPAREN", "RPAREN", "LCURL", "RCURL", "ARROW", "F", "FOR", "WHILE", "IF",
"ELSE", "LET", "RETURN", "NUM", "TEXT", "IDENT"
};
}
public static final String[] ruleNames = makeRuleNames();
private static String[] makeLiteralNames() {
return new String[] {
null, null, "'+'", "'-'", "'*'", "'/'", "'&&'", "'||'", "'&'", "'='",
"'=='", "'!='", "'>='", "'<='", "'>'", "'<'", "';'", "':'", "','", "'.'",
"'\"'", "'+='", "'-='", "'*='", "'/='", "'('", "')'", "'{'", "'}'", "'->'",
"'f'", "'for'", "'while'", "'if'", "'else'", "'let'", "'return'"
};
}
private static final String[] _LITERAL_NAMES = makeLiteralNames();
private static String[] makeSymbolicNames() {
return new String[] {
null, "WS", "ADD", "SUB", "MUL", "DIV", "CONJ", "OR", "AND", "EQ", "EQEQ",
"NE", "BE", "LE", "BT", "LT", "SEMICOLON", "COLON", "COMMA", "DOT", "QUOTE",
"ADD_ASSIGNMENT", "SUB_ASSIGNMENT", "MUL_ASSIGNMENT", "DIV_ASSIGNMENT",
"LPAREN", "RPAREN", "LCURL", "RCURL", "ARROW", "F", "FOR", "WHILE", "IF",
"ELSE", "LET", "RETURN", "NUM", "TEXT", "IDENT"
};
}
private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames();
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public CatalysaLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "CatalysaLexer.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getChannelNames() { return channelNames; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2)\u00cc\b\1\4\2\t"+
"\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
"\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+
"\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\3\2\3\2\3\2\3\2\3\3"+
"\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3\n\3\n\3"+
"\13\3\13\3\13\3\f\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\20\3"+
"\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25\3\26\3\26\3\26\3"+
"\27\3\27\3\27\3\30\3\30\3\30\3\31\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3"+
"\34\3\35\3\35\3\36\3\36\3\36\3\37\3\37\3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3"+
"\"\3\"\3\"\3#\3#\3#\3#\3#\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\7&\u00ba"+
"\n&\f&\16&\u00bd\13&\3\'\3\'\7\'\u00c1\n\'\f\'\16\'\u00c4\13\'\3\'\3\'"+
"\3(\6(\u00c9\n(\r(\16(\u00ca\2\2)\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23"+
"\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31"+
"\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)\3\2\7\5\2\13\f"+
"\17\17\"\"\3\2\63;\3\2\62;\7\2//\62;C\\aac|\5\2C\\aac|\2\u00ce\2\3\3\2"+
"\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17"+
"\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2"+
"\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3"+
"\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3"+
"\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2"+
"=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3"+
"\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\3Q\3\2\2\2\5U\3\2\2\2\7W\3\2\2"+
"\2\tY\3\2\2\2\13[\3\2\2\2\r]\3\2\2\2\17`\3\2\2\2\21c\3\2\2\2\23e\3\2\2"+
"\2\25g\3\2\2\2\27j\3\2\2\2\31m\3\2\2\2\33p\3\2\2\2\35s\3\2\2\2\37u\3\2"+
"\2\2!w\3\2\2\2#y\3\2\2\2%{\3\2\2\2\'}\3\2\2\2)\177\3\2\2\2+\u0081\3\2"+
"\2\2-\u0084\3\2\2\2/\u0087\3\2\2\2\61\u008a\3\2\2\2\63\u008d\3\2\2\2\65"+
"\u008f\3\2\2\2\67\u0091\3\2\2\29\u0093\3\2\2\2;\u0095\3\2\2\2=\u0098\3"+
"\2\2\2?\u009a\3\2\2\2A\u009e\3\2\2\2C\u00a4\3\2\2\2E\u00a7\3\2\2\2G\u00ac"+
"\3\2\2\2I\u00b0\3\2\2\2K\u00b7\3\2\2\2M\u00be\3\2\2\2O\u00c8\3\2\2\2Q"+
"R\t\2\2\2RS\3\2\2\2ST\b\2\2\2T\4\3\2\2\2UV\7-\2\2V\6\3\2\2\2WX\7/\2\2"+
"X\b\3\2\2\2YZ\7,\2\2Z\n\3\2\2\2[\\\7\61\2\2\\\f\3\2\2\2]^\7(\2\2^_\7("+
"\2\2_\16\3\2\2\2`a\7~\2\2ab\7~\2\2b\20\3\2\2\2cd\7(\2\2d\22\3\2\2\2ef"+
"\7?\2\2f\24\3\2\2\2gh\7?\2\2hi\7?\2\2i\26\3\2\2\2jk\7#\2\2kl\7?\2\2l\30"+
"\3\2\2\2mn\7@\2\2no\7?\2\2o\32\3\2\2\2pq\7>\2\2qr\7?\2\2r\34\3\2\2\2s"+
"t\7@\2\2t\36\3\2\2\2uv\7>\2\2v \3\2\2\2wx\7=\2\2x\"\3\2\2\2yz\7<\2\2z"+
"$\3\2\2\2{|\7.\2\2|&\3\2\2\2}~\7\60\2\2~(\3\2\2\2\177\u0080\7$\2\2\u0080"+
"*\3\2\2\2\u0081\u0082\7-\2\2\u0082\u0083\7?\2\2\u0083,\3\2\2\2\u0084\u0085"+
"\7/\2\2\u0085\u0086\7?\2\2\u0086.\3\2\2\2\u0087\u0088\7,\2\2\u0088\u0089"+
"\7?\2\2\u0089\60\3\2\2\2\u008a\u008b\7\61\2\2\u008b\u008c\7?\2\2\u008c"+
"\62\3\2\2\2\u008d\u008e\7*\2\2\u008e\64\3\2\2\2\u008f\u0090\7+\2\2\u0090"+
"\66\3\2\2\2\u0091\u0092\7}\2\2\u00928\3\2\2\2\u0093\u0094\7\177\2\2\u0094"+
":\3\2\2\2\u0095\u0096\7/\2\2\u0096\u0097\7@\2\2\u0097<\3\2\2\2\u0098\u0099"+
"\7h\2\2\u0099>\3\2\2\2\u009a\u009b\7h\2\2\u009b\u009c\7q\2\2\u009c\u009d"+
"\7t\2\2\u009d@\3\2\2\2\u009e\u009f\7y\2\2\u009f\u00a0\7j\2\2\u00a0\u00a1"+
"\7k\2\2\u00a1\u00a2\7n\2\2\u00a2\u00a3\7g\2\2\u00a3B\3\2\2\2\u00a4\u00a5"+
"\7k\2\2\u00a5\u00a6\7h\2\2\u00a6D\3\2\2\2\u00a7\u00a8\7g\2\2\u00a8\u00a9"+
"\7n\2\2\u00a9\u00aa\7u\2\2\u00aa\u00ab\7g\2\2\u00abF\3\2\2\2\u00ac\u00ad"+
"\7n\2\2\u00ad\u00ae\7g\2\2\u00ae\u00af\7v\2\2\u00afH\3\2\2\2\u00b0\u00b1"+
"\7t\2\2\u00b1\u00b2\7g\2\2\u00b2\u00b3\7v\2\2\u00b3\u00b4\7w\2\2\u00b4"+
"\u00b5\7t\2\2\u00b5\u00b6\7p\2\2\u00b6J\3\2\2\2\u00b7\u00bb\t\3\2\2\u00b8"+
"\u00ba\t\4\2\2\u00b9\u00b8\3\2\2\2\u00ba\u00bd\3\2\2\2\u00bb\u00b9\3\2"+
"\2\2\u00bb\u00bc\3\2\2\2\u00bcL\3\2\2\2\u00bd\u00bb\3\2\2\2\u00be\u00c2"+
"\5)\25\2\u00bf\u00c1\t\5\2\2\u00c0\u00bf\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2"+
"\u00c0\3\2\2\2\u00c2\u00c3\3\2\2\2\u00c3\u00c5\3\2\2\2\u00c4\u00c2\3\2"+
"\2\2\u00c5\u00c6\5)\25\2\u00c6N\3\2\2\2\u00c7\u00c9\t\6\2\2\u00c8\u00c7"+
"\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00c8\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb"+
"P\3\2\2\2\6\2\u00bb\u00c2\u00ca\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}

View File

@ -0,0 +1,74 @@
WS=1
ADD=2
SUB=3
MUL=4
DIV=5
CONJ=6
OR=7
AND=8
EQ=9
EQEQ=10
NE=11
BE=12
LE=13
BT=14
LT=15
SEMICOLON=16
COLON=17
COMMA=18
DOT=19
QUOTE=20
ADD_ASSIGNMENT=21
SUB_ASSIGNMENT=22
MUL_ASSIGNMENT=23
DIV_ASSIGNMENT=24
LPAREN=25
RPAREN=26
LCURL=27
RCURL=28
ARROW=29
F=30
FOR=31
WHILE=32
IF=33
ELSE=34
LET=35
RETURN=36
NUM=37
TEXT=38
IDENT=39
'+'=2
'-'=3
'*'=4
'/'=5
'&&'=6
'||'=7
'&'=8
'='=9
'=='=10
'!='=11
'>='=12
'<='=13
'>'=14
'<'=15
';'=16
':'=17
','=18
'.'=19
'"'=20
'+='=21
'-='=22
'*='=23
'/='=24
'('=25
')'=26
'{'=27
'}'=28
'->'=29
'f'=30
'for'=31
'while'=32
'if'=33
'else'=34
'let'=35
'return'=36

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,74 @@
WS=1
ADD=2
SUB=3
MUL=4
DIV=5
CONJ=6
OR=7
AND=8
EQ=9
EQEQ=10
NE=11
BE=12
LE=13
BT=14
LT=15
SEMICOLON=16
COLON=17
COMMA=18
DOT=19
QUOTE=20
ADD_ASSIGNMENT=21
SUB_ASSIGNMENT=22
MUL_ASSIGNMENT=23
DIV_ASSIGNMENT=24
LPAREN=25
RPAREN=26
LCURL=27
RCURL=28
ARROW=29
F=30
FOR=31
WHILE=32
IF=33
ELSE=34
LET=35
RETURN=36
NUM=37
TEXT=38
IDENT=39
'+'=2
'-'=3
'*'=4
'/'=5
'&&'=6
'||'=7
'&'=8
'='=9
'=='=10
'!='=11
'>='=12
'<='=13
'>'=14
'<'=15
';'=16
':'=17
','=18
'.'=19
'"'=20
'+='=21
'-='=22
'*='=23
'/='=24
'('=25
')'=26
'{'=27
'}'=28
'->'=29
'f'=30
'for'=31
'while'=32
'if'=33
'else'=34
'let'=35
'return'=36

48
grammar/CatalysaLexer.g4 Normal file
View File

@ -0,0 +1,48 @@
lexer grammar CatalysaLexer;
WS
: [ \t\r\n]
-> skip
;
ADD: '+';
SUB: '-';
MUL: '*' ;
DIV: '/' ;
CONJ: '&&' ;
OR: '||' ;
AND: '&' ;
EQ: '=' ;
EQEQ: '==' ;
NE: '!=' ;
BE: '>=' ;
LE: '<=' ;
BT: '>' ;
LT: '<' ;
SEMICOLON: ';' ;
COLON: ':' ;
COMMA: ',' ;
DOT: '.' ;
QUOTE: '"' ;
ADD_ASSIGNMENT: '+=' ;
SUB_ASSIGNMENT: '-=' ;
MUL_ASSIGNMENT: '*=' ;
DIV_ASSIGNMENT: '/=' ;
LPAREN: '(' ;
RPAREN: ')' ;
LCURL: '{' ;
RCURL: '}' ;
ARROW: '->' ;
F: 'f' ;
FOR: 'for' ;
WHILE: 'while' ;
IF: 'if' ;
ELSE: 'else' ;
LET: 'let' ;
RETURN: 'return' ;
NUM: [1-9] [0-9]* ;
TEXT: QUOTE [a-zA-Z0-9_-]* QUOTE ;
IDENT: [a-zA-Z_]+ ;

33
grammar/CatalysaParser.g4 Normal file
View File

@ -0,0 +1,33 @@
parser grammar CatalysaParser;
options { tokenVocab = CatalysaLexer; }
catalysa_file: program*;
program: if
| f_call
| function
| expr
| let
| block
;
f_call: IDENT LPAREN (program COMMA?)* RPAREN ;
block: LCURL program* RCURL ;
function: F IDENT LPAREN ( IDENT COMMA? )* RPAREN ARROW program ;
let: LET IDENT (EQ program)? ;
if: IF relational program (ELSE program)* ;
expr: assign | relational ;
assign: IDENT EQ relational ;
relational: add (LE add | LT add | BE add | BT add | EQEQ add | NE add | CONJ add | OR add)* ;
add: mul (ADD mul | SUB mul | SUB_ASSIGNMENT mul | ADD_ASSIGNMENT mul)* ;
mul: unary (MUL unary | DIV unary | DIV_ASSIGNMENT unary | MUL_ASSIGNMENT unary)* ;
primary: LPAREN expr RPAREN | IDENT | TEXT | function_call | NUM ;
function_call: IDENT LPAREN (unary COMMA?)* RPAREN ;
unary: ADD primary
| SUB primary
| primary
;

6
grammar/sample.lysa Normal file
View File

@ -0,0 +1,6 @@
f x(a) ->
if (a > 2) || (a == 1)
a = 3
let a = x(x(2))

1
input.txt Normal file
View File

@ -0,0 +1 @@
a+a