Mercurial > repos > rhope
diff lex.rhope @ 131:0a4682be2db2
Modify lexer and new parser to work in compiler
author | Mike Pavone <pavone@retrodev.com> |
---|---|
date | Fri, 05 Nov 2010 02:43:34 +0000 |
parents | 73e978d590c7 |
children | 1f238280047f |
line wrap: on
line diff
--- a/lex.rhope Fri Nov 05 02:42:45 2010 +0000 +++ b/lex.rhope Fri Nov 05 02:43:34 2010 +0000 @@ -1,4 +1,3 @@ -Import extendlib.rhope Blueprint Token { @@ -9,7 +8,7 @@ Token[type,raw,text:out] { - out <- [[[Build["Token"]]Type <<[type]]Raw Text <<[raw]]Text <<[text] + out <- [[[Build[Token()]]Type <<[type]]Raw Text <<[raw]]Text <<[text] } _Type Match[val, test type, type:out] @@ -22,9 +21,19 @@ } } +As List[val:out] +{ + [(List(),List Leaf())]Find[=[?, Blueprint Of[val]]] + { + out <- val + }{ + out <- [()]Append[val] + } +} + Type Match@Token[token,type:match,nomatch] { - match,nomatch <- If[Fold[["_Type Match"]Set Input[2, [token]Type >>], No, As List[type]]] + match,nomatch <- If[Fold[_Type Match[?,?, [token]Type >>], No, As List[type]]] } String Literal[string, raw string, escapes, text, simple tokens, token list:out] @@ -38,7 +47,7 @@ If[[first] = ["\\"]] { second,next text <- [rest]Slice[1] - char <- [escapes]Index[To String[second]] {} + char <- [escapes]Index[String[second]] {} { char <- Val[second] } @@ -53,9 +62,10 @@ Line Comment[start comment, text, simple tokens, token list:out] { - next text, comment <- [text]Get DString["\n"] {} {} {} + comment,,next text <- [text]Partition["\n"] {} {} {} { next text <- "" + comment <- Val[text] } out <- _Lex[next text, [next text]Slice[0], simple tokens, [token list]Append[Token["Line Comment", [start comment]Append[comment], comment]]] @@ -63,13 +73,14 @@ Block Comment[comment,raw comment, depth, text, simple tokens, token list:out] { - Print[["Block Comment: Depth="]Append[depth]] + Print[["Block Comment: Depth="]Append[String[depth]]] If[[depth] > [0]] { - next text, chunk, delim <- [text]Get DString[("/*","*/")] {} {} {} + chunk, delim, next text <- [text]Partition[("/*","*/")] {} {} {} { next text <- "" delim <- "" + chunk <- Val[text] } If[[delim] = ["/*"]] { @@ -110,14 +121,14 @@ If[[[text]Length] > [0]] { first,rest <- [text]Slice[1] - [simple tokens]Index[To String[first]] + [simple tokens]Index[String[first]] { - token worker <- [["Add Token"]Set Input[0, Token[~, first, ""]]]Set Input[1, rest] + token worker <- Val[Add Token[Token[~, first, ""], rest, ?]] }{ If[[first] = ["\""]] { - escapes <- [[[New@Dictionary[]]Set["n","\n"]]Set["r","\r"]]Set["t","\t"] - token worker <- [[[["String Literal"]Set Input[0, [first]Slice[0]]]Set Input[1, first]]Set Input[2, escapes]]Set Input[3, rest] + escapes <- [[[Dictionary[]]Set["n","\n"]]Set["r","\r"]]Set["t","\t"] + token worker <- Val[String Literal[[first]Slice[0], first, escapes, rest, ?]] //out <- String Literal["", first, rest, simple tokens, token list, escapes] }{ second,second rest <- [rest]Slice[1] @@ -125,23 +136,23 @@ { [first]Append[second] { - token worker <- [["Add Token"]Set Input[0, Token["Assignment", ~, ~]]]Set Input[1, second rest] + token worker <- Val[Add Token[Token["Assignment", ~, ~], second rest, ?]] } }{ If[[[first] = ["/"]] And [[second] = ["*"]]] { - token worker <- [[[["Block Comment"]Set Input[0, [first]Slice[0]]]Set Input[1, [first]Append[second]]]Set Input[2, 1]]Set Input[3, second rest] + token worker <- Val[Block Comment[[first]Slice[0], [first]Append[second], 1, second rest, ?]] //out <- Block Comment[next text, simple tokens, token list, 1] }{ If[[[first] = ["/"]] And [[second] = ["/"]]] { - token worker <- [["Line Comment"]Set Input[0, [first]Append[second]]]Set Input[1, second rest] + token worker <- Val[Line Comment[[first]Append[second], second rest, ?]] //out <- Line Comment["", [first]Append[second], next text, simple tokens, token list] }{ If[[[first]In["0123456789"]] Or [[[first] = ["-"]] And [[second]In["0123456789"]]]] { - token worker <- [["Numeric Literal"]Set Input[0, first]]Set Input[1, rest] + token worker <- Val[Numeric Literal[first, rest, ?]] //out <- Numeric Literal[text, simple tokens, token list] }{ out <- _Lex[rest, [symbol]Append[first], simple tokens, token list] @@ -161,9 +172,7 @@ }{ next list <- [token list]Append[Token["Symbol", trimmed, trimmed]] } - out <- [[token worker]Do[ - [[()]Append[simple tokens]]Append[next list]] - ]Index[0] + out <- [token worker]Call[simple tokens, next list] } }{ out <- token list @@ -172,7 +181,7 @@ Lex[text:out] { - simple tokens <- [[[[[[[[[[[New@Dictionary[] + simple tokens <- [[[[[[[[[[[Dictionary[] ]Set["{", "Block Begin"] ]Set["}", "Block End"] ]Set["(", "List Begin"]