Commit bf75da3f authored by matthew-eads's avatar matthew-eads
Browse files

ast to ast

parent 23eec762
File added
{-# OPTIONS_GHC -w #-}
module Main where
import System.Environment
import Data.Char
import Data.Maybe
import System.IO
import Control.Applicative(Applicative(..))
import Control.Monad (ap)
......@@ -379,25 +382,23 @@ lexer s acc Line =
lexer (x:s) acc Ignore = lexer s acc Ignore
lexer [] acc Ignore = reverse (TokenEOF:acc)
writeHeader :: IO ()
writeHeader = do {
putStr ("{\nmodule Main where\n\nimport Lexer\n}\n" ++
"%name imp\n%tokentype { Token }\n%token \n" ++
writeHeader :: Handle -> String -> IO ()
writeHeader h s = do {
hPutStr h ("{\nmodule Main where\n\nimport Lexer\n}\n" ++
"%name imp\n%tokentype { Token }\n%token \n" ++ s ++
"if { TokenIf }\nwhile { TokenWhile }\n" ++
"begin { TokenBegin }\nset { TokenSet }\n" ++
"'+' { TokenSym $$ }\n'-' { TokenSym $$ }\n" ++
"'/' { TokenSym $$ }\n'*' { TokenSym $$ }\n" ++
"'(' { TokenLParen }\n')' { TokenRParen }\n" ++
"int { TokenInt $$ }\nvar { TokenVar $$ }\n" ++
"'@' { TokenSym $$ }\n'!' { TokenSym $$ }\n'$' { TokenSym $$ }\n'%' { TokenSym $$ }\n'^' { TokenSym $$ }\n'&' { TokenSym $$ }\n'_' { TokenSym $$ }\n'`' { TokenSym $$ }\n'~' { TokenSym $$ }\n'=' { TokenSym $$ }\n'[' { TokenSym $$ }\n']' { TokenSym $$ }\n'{' { TokenSym $$ }\n'}' { TokenSym $$ }\n':' { TokenSym $$ }\n';' { TokenSym $$ }\n'<' { TokenSym $$ }\n'>' { TokenSym $$ }\n" ++
"'@' { TokenOp $$ }\n'!' { TokenOp $$ }\n'$' { TokenOp $$ }\n'%' { TokenOp $$ }\n'^' { TokenOp $$ }\n'&' { TokenOp $$ }\n'_' { TokenOp $$ }\n'`' { TokenOp $$ }\n'~' { TokenOp $$ }\n'=' { TokenOp $$ }\n'[' { TokenOp $$ }\n']' { TokenOp $$ }\n'{' { TokenOp $$ }\n'}' { TokenOp $$ }\n':' { TokenOp $$ }\n';' { TokenOp $$ }\n'<' { TokenOp $$ }\n'>' { TokenOp $$ }\n" ++
"%error {parseError}\n%%\n\n" ++
"Prog: AST Prog { $1:$2 }\n\t| AST { [$1] }\n\n" ++
"AST: ");
}
writeFooter :: IO ()
writeFooter = do {
putStr("\nMultiExpr : AST MultiExpr {$1:$2}\n | AST {[$1]}\n\n{\n\nparseError :: [Token] -> a\nparseError t = error (\"Parse Error on token \" ++ (show t))\nhappyError = parseError\n\ndata AST = Lit Int\n | Id String\n | Builtin String [AST]\n | UserDef String [AST]\n | IfThenElse AST AST AST\n | While AST AST\n | Do [AST]\n deriving Show\n\ntype MultiExpr = [AST]\n\ntype Prog = [AST]\n\n\nmain = do {\n s <- getContents;\n putStrLn (show (imp (alexScanTokens s)));\n}\n");}
writeFooter :: Handle -> IO ()
writeFooter h = do {
hPutStr h ("\nMultiExpr : AST MultiExpr {$1:$2}\n | AST {[$1]}\n\n{\n\nparseError :: [Token] -> a\nparseError t = error (\"Parse Error on token \" ++ (show t))\nhappyError = parseError\n\ndata AST = Lit Int\n | Id String\n | Builtin String [AST]\n | UserDef String [AST]\n | IfThenElse AST AST AST\n | While AST AST\n | Do [AST]\n deriving Show\n\ntype MultiExpr = [AST]\n\ntype Prog = [AST]\n\n\nmain = do {\n s <- getContents;\n putStrLn (show (imp (alexScanTokens s)));\n}\n}");}
ruleToString :: Grammar -> String
ruleToString SET =" | '(' set var AST ')' {Builtin \"set\" [(Id $3), $4]}\n"
......@@ -408,33 +409,57 @@ ruleToString VAR = " | var { Id $1 }\n"
ruleToString INT = " | int { Lit $1 }\n"
ruleToString USERDEF = " | '(' var MultiExpr ')' {UserDef $2 $3}\n"
ruleToString (BINOP s) = " | '(' '" ++ s ++ "' AST AST ')' {Builtin $2 [$3, $4]}\n"
ruleToString (BUILTIN s1 s2) = " | '(' " ++ s2 ++ " MultiExp ')' { Builtin \"" ++ s2 ++ "\" $3}\n"
ruleToString (BUILTIN s1 s2) = " | '(' " ++ s2 ++ " MultiExpr ')' { Builtin \"" ++ s2 ++ "\" $3}\n"
ruleToString (CUSTOM s1 s2) = undefined
newTokens :: Grammar -> Maybe String
newTokens (BINOP s) = Just ("'" ++ s ++ "'" ++ "\t{ TokenOp $$ }\n")
newTokens (BUILTIN s1 s2) = Just ("'" ++ s2 ++ "'" ++ "\t{ TokenBuiltin $$ }\n")
--newTokens (BUILTIN s1 s2) = Just (s2 ++ "\t{ Token" ++
-- ((toUpper (head s2)):(tail s2)) ++ "}\n")
newTokens _ = Nothing
lexerTokens :: Grammar -> Maybe String
lexerTokens (BINOP s) = Just ("\"" ++ s ++ "\"\t{ \\s -> TokenOp s }\n")
lexerTokens (BUILTIN s1 s2) = Just ("\"" ++ s2 ++ "\"\t{ \\s -> TokenBuiltin s }\n")
lexerTokens _ = Nothing
printFirstRule :: Grammar -> String
printFirstRule g = let str = ruleToString g
in tail (dropWhile (\c -> (c == ' ')) str)
makeParser :: [Grammar] -> IO ()
makeParser g = do {
writeHeader;
putStrLn (printFirstRule (head g));
putStr (foldl (++) [] (map ruleToString (tail g)));
writeFooter;
maybeAppend :: String -> Maybe String -> String
maybeAppend l (Just s) = s ++ l
maybeAppend l Nothing = l
makeParser :: [Grammar] -> Handle -> Handle -> IO ()
makeParser g lexerF parserF = do {
writeHeader parserF (foldl maybeAppend [] (map newTokens g));
hPutStrLn parserF (printFirstRule (head g));
hPutStr parserF (foldl (++) [] (map ruleToString (tail g)));
writeFooter parserF;
}
makeLexer :: Handle -> String -> IO ()
makeLexer h s =
do {
hPutStr h ("{\nmodule Lexer where\n}\n\n%wrapper \"basic\"\n\n$digit = 0-9\n$alpha = [a-z]\n$Alpha = [A-Z]\n\ntokens :-\n\n\t$white+\t;\n\tset\t{ \\s -> TokenSet }\n\tif\t{ \\s -> TokenIf }\n\twhile \t{ \\s -> TokenWhile }\n\tbegin \t{ \\s -> TokenBegin }\n");
hPutStr h s;
hPutStr h " [\\+\\-\\/\\*] { \\s -> TokenOp s}\n [\\(] { \\s -> TokenLParen }\n [\\)] { \\s -> TokenRParen }\n\t$digit+\t{ \\s -> TokenInt (read s) }\n\t$alpha [$alpha $Alpha \\_]* { \\s -> TokenVar s}\n . { \\s -> TokenSym (head s)}\n{\n\ndata Token = TokenSet\n\t | TokenIf\n\t | TokenWhile\n\t | TokenBegin\n | TokenSym Char\n | TokenOp String\n | TokenBuiltin String\n | TokenPlus\n | TokenMult\n | TokenDiv\n | TokenMinus\n | TokenLParen\n | TokenRParen\n\t | TokenInt Int\n\t | TokenVar String\n\t deriving (Eq, Show)\n\n}\n";
}
main = do {
args <- getArgs;
s <- if length args == 1 then readFile (head args)
else getContents;
-- s <- getContents;
let tokes = lexer s [] Ignore
else (error "No file given");
lexerF <- openFile "Lexer.x" WriteMode;
parserF <- openFile (takeWhile ((/=) '.') (head args)) WriteMode;
let
tokes = lexer s [] Ignore
gram = grammar tokes in
do {
-- putStrLn ("tokes: " ++ (show tokes));
-- putStrLn ("\n\ngrammar" ++ (show gram));
-- putStr "\n\n";
makeParser gram;
makeLexer lexerF (foldl maybeAppend [] (map lexerTokens gram));
makeParser gram lexerF parserF;
}
}
{-# LINE 1 "templates/GenericTemplate.hs" #-}
......
{
module Main where
import System.Environment
import Data.Char
import Data.Maybe
import System.IO
}
%name grammar
%tokentype { Token }
......@@ -114,25 +117,23 @@ lexer s acc Line =
lexer (x:s) acc Ignore = lexer s acc Ignore
lexer [] acc Ignore = reverse (TokenEOF:acc)
writeHeader :: IO ()
writeHeader = do {
putStr ("{\nmodule Main where\n\nimport Lexer\n}\n" ++
"%name imp\n%tokentype { Token }\n%token \n" ++
writeHeader :: Handle -> String -> IO ()
writeHeader h s = do {
hPutStr h ("{\nmodule Main where\n\nimport Lexer\n}\n" ++
"%name imp\n%tokentype { Token }\n%token \n" ++ s ++
"if { TokenIf }\nwhile { TokenWhile }\n" ++
"begin { TokenBegin }\nset { TokenSet }\n" ++
"'+' { TokenSym $$ }\n'-' { TokenSym $$ }\n" ++
"'/' { TokenSym $$ }\n'*' { TokenSym $$ }\n" ++
"'(' { TokenLParen }\n')' { TokenRParen }\n" ++
"int { TokenInt $$ }\nvar { TokenVar $$ }\n" ++
"'@' { TokenSym $$ }\n'!' { TokenSym $$ }\n'$' { TokenSym $$ }\n'%' { TokenSym $$ }\n'^' { TokenSym $$ }\n'&' { TokenSym $$ }\n'_' { TokenSym $$ }\n'`' { TokenSym $$ }\n'~' { TokenSym $$ }\n'=' { TokenSym $$ }\n'[' { TokenSym $$ }\n']' { TokenSym $$ }\n'{' { TokenSym $$ }\n'}' { TokenSym $$ }\n':' { TokenSym $$ }\n';' { TokenSym $$ }\n'<' { TokenSym $$ }\n'>' { TokenSym $$ }\n" ++
"'@' { TokenOp $$ }\n'!' { TokenOp $$ }\n'$' { TokenOp $$ }\n'%' { TokenOp $$ }\n'^' { TokenOp $$ }\n'&' { TokenOp $$ }\n'_' { TokenOp $$ }\n'`' { TokenOp $$ }\n'~' { TokenOp $$ }\n'=' { TokenOp $$ }\n'[' { TokenOp $$ }\n']' { TokenOp $$ }\n'{' { TokenOp $$ }\n'}' { TokenOp $$ }\n':' { TokenOp $$ }\n';' { TokenOp $$ }\n'<' { TokenOp $$ }\n'>' { TokenOp $$ }\n" ++
"%error {parseError}\n%%\n\n" ++
"Prog: AST Prog { $1:$2 }\n\t| AST { [$1] }\n\n" ++
"AST: ");
}
writeFooter :: IO ()
writeFooter = do {
putStr("\nMultiExpr : AST MultiExpr {$1:$2}\n | AST {[$1]}\n\n{\n\nparseError :: [Token] -> a\nparseError t = error (\"Parse Error on token \" ++ (show t))\nhappyError = parseError\n\ndata AST = Lit Int\n | Id String\n | Builtin String [AST]\n | UserDef String [AST]\n | IfThenElse AST AST AST\n | While AST AST\n | Do [AST]\n deriving Show\n\ntype MultiExpr = [AST]\n\ntype Prog = [AST]\n\n\nmain = do {\n s <- getContents;\n putStrLn (show (imp (alexScanTokens s)));\n}\n");}
writeFooter :: Handle -> IO ()
writeFooter h = do {
hPutStr h ("\nMultiExpr : AST MultiExpr {$1:$2}\n | AST {[$1]}\n\n{\n\nparseError :: [Token] -> a\nparseError t = error (\"Parse Error on token \" ++ (show t))\nhappyError = parseError\n\ndata AST = Lit Int\n | Id String\n | Builtin String [AST]\n | UserDef String [AST]\n | IfThenElse AST AST AST\n | While AST AST\n | Do [AST]\n deriving Show\n\ntype MultiExpr = [AST]\n\ntype Prog = [AST]\n\n\nmain = do {\n s <- getContents;\n putStrLn (show (imp (alexScanTokens s)));\n}\n}");}
ruleToString :: Grammar -> String
ruleToString SET =" | '(' set var AST ')' {Builtin \"set\" [(Id $3), $4]}\n"
......@@ -143,29 +144,57 @@ ruleToString VAR = " | var { Id $1 }\n"
ruleToString INT = " | int { Lit $1 }\n"
ruleToString USERDEF = " | '(' var MultiExpr ')' {UserDef $2 $3}\n"
ruleToString (BINOP s) = " | '(' '" ++ s ++ "' AST AST ')' {Builtin $2 [$3, $4]}\n"
ruleToString (BUILTIN s1 s2) = " | '(' " ++ s2 ++ " MultiExp ')' { Builtin \"" ++ s2 ++ "\" $3}\n"
ruleToString (BUILTIN s1 s2) = " | '(' " ++ s2 ++ " MultiExpr ')' { Builtin \"" ++ s2 ++ "\" $3}\n"
ruleToString (CUSTOM s1 s2) = undefined
newTokens :: Grammar -> Maybe String
newTokens (BINOP s) = Just ("'" ++ s ++ "'" ++ "\t{ TokenOp $$ }\n")
newTokens (BUILTIN s1 s2) = Just ("'" ++ s2 ++ "'" ++ "\t{ TokenBuiltin $$ }\n")
--newTokens (BUILTIN s1 s2) = Just (s2 ++ "\t{ Token" ++
-- ((toUpper (head s2)):(tail s2)) ++ "}\n")
newTokens _ = Nothing
lexerTokens :: Grammar -> Maybe String
lexerTokens (BINOP s) = Just ("\"" ++ s ++ "\"\t{ \\s -> TokenOp s }\n")
lexerTokens (BUILTIN s1 s2) = Just ("\"" ++ s2 ++ "\"\t{ \\s -> TokenBuiltin s }\n")
lexerTokens _ = Nothing
printFirstRule :: Grammar -> String
printFirstRule g = let str = ruleToString g
in tail (dropWhile (\c -> (c == ' ')) str)
makeParser :: [Grammar] -> IO ()
makeParser g = do {
writeHeader;
putStrLn (printFirstRule (head g));
putStr (foldl (++) [] (map ruleToString (tail g)));
writeFooter;
maybeAppend :: String -> Maybe String -> String
maybeAppend l (Just s) = s ++ l
maybeAppend l Nothing = l
makeParser :: [Grammar] -> Handle -> Handle -> IO ()
makeParser g lexerF parserF = do {
writeHeader parserF (foldl maybeAppend [] (map newTokens g));
hPutStrLn parserF (printFirstRule (head g));
hPutStr parserF (foldl (++) [] (map ruleToString (tail g)));
writeFooter parserF;
}
makeLexer :: Handle -> String -> IO ()
makeLexer h s =
do {
hPutStr h ("{\nmodule Lexer where\n}\n\n%wrapper \"basic\"\n\n$digit = 0-9\n$alpha = [a-z]\n$Alpha = [A-Z]\n\ntokens :-\n\n\t$white+\t;\n\tset\t{ \\s -> TokenSet }\n\tif\t{ \\s -> TokenIf }\n\twhile \t{ \\s -> TokenWhile }\n\tbegin \t{ \\s -> TokenBegin }\n");
hPutStr h s;
hPutStr h " [\\+\\-\\/\\*] { \\s -> TokenOp s}\n [\\(] { \\s -> TokenLParen }\n [\\)] { \\s -> TokenRParen }\n\t$digit+\t{ \\s -> TokenInt (read s) }\n\t$alpha [$alpha $Alpha \\_]* { \\s -> TokenVar s}\n . { \\s -> TokenSym (head s)}\n{\n\ndata Token = TokenSet\n\t | TokenIf\n\t | TokenWhile\n\t | TokenBegin\n | TokenSym Char\n | TokenOp String\n | TokenBuiltin String\n | TokenPlus\n | TokenMult\n | TokenDiv\n | TokenMinus\n | TokenLParen\n | TokenRParen\n\t | TokenInt Int\n\t | TokenVar String\n\t deriving (Eq, Show)\n\n}\n";
}
main = do {
args <- getArgs;
s <- if length args == 1 then readFile (head args)
else getContents;
let tokes = lexer s [] Ignore
else (error "No file given");
lexerF <- openFile "Lexer.x" WriteMode;
parserF <- openFile (takeWhile ((/=) '.') (head args)) WriteMode;
let
tokes = lexer s [] Ignore
gram = grammar tokes in
do {
makeParser gram;
makeLexer lexerF (foldl maybeAppend [] (map lexerTokens gram));
makeParser gram lexerF parserF;
}
}
}
File added
This diff is collapsed.
{
module Main where
import Lexer
}
%name imp
%tokentype { Token }
%token
new { TokenBuiltin $$}
"check-expect" { TokenOp $$ }
"-" { TokenOp $$ }
"/" { TokenOp $$ }
"*" { TokenOp $$ }
"+" { TokenOp $$ }
if { TokenIf }
while { TokenWhile }
begin { TokenBegin }
set { TokenSet }
"(" { TokenLParen }
")" { TokenRParen }
int { TokenInt $$ }
var { TokenVar $$ }
"@" { TokenOp $$ }
"!" { TokenOp $$ }
"$" { TokenOp $$ }
"%" { TokenOp $$ }
"^" { TokenOp $$ }
"&" { TokenOp $$ }
"_" { TokenOp $$ }
"`" { TokenOp $$ }
"~" { TokenOp $$ }
"=" { TokenOp $$ }
"[" { TokenOp $$ }
"]" { TokenOp $$ }
"{" { TokenOp $$ }
"}" { TokenOp $$ }
":" { TokenOp $$ }
";" { TokenOp $$ }
"<" { TokenOp $$ }
">" { TokenOp $$ }
%error {parseError}
%%
Prog: AST Prog { $1:$2 }
| AST { [$1] }
AST: "(" set var AST ")" {Builtin "set" [(Id $3), $4]}
| "(" if AST AST AST ")" {IfThenElse $3 $4 $5}
| "(" while AST AST ")" {While $3 $4}
| "(" begin MultiExpr ")" {Builtin "begin" $3}
| "(" "+" AST AST ")" {Builtin $2 [$3, $4]}
| "(" "*" AST AST ")" {Builtin $2 [$3, $4]}
| "(" "/" AST AST ")" {Builtin $2 [$3, $4]}
| "(" "-" AST AST ")" {Builtin $2 [$3, $4]}
| "(" "check-expect" AST AST ")" {Builtin $2 [$3, $4]}
| int { Lit $1 }
| var { Id $1 }
| "(" var MultiExpr ")" {UserDef $2 $3}
| "(" new MultiExpr ")" { Builtin "new" $3}
MultiExpr : AST MultiExpr {$1:$2}
| AST {[$1]}
{
parseError :: [Token] -> a
parseError t = error ("Parse Error on token " ++ (show t))
happyError = parseError
data AST = Lit Int
| Id String
| Builtin String [AST]
| UserDef String [AST]
| IfThenElse AST AST AST
| While AST AST
| Do [AST]
deriving Show
type MultiExpr = [AST]
type Prog = [AST]
main = do {
s <- getContents;
putStrLn (show (imp (alexScanTokens s)));
}
}
This diff is collapsed.
......@@ -15,11 +15,13 @@ tokens :-
if { \s -> TokenIf }
while { \s -> TokenWhile }
begin { \s -> TokenBegin }
[\+\-\/\*] { \s -> TokenSym (head s)}
'+' { \s -> TokenPlus }
'-' { \s -> TokenMinus }
'/' { \s -> TokenDiv }
'*' { \s -> TokenMult }
"new" { \s -> TokenBuiltin s }
"check-expect" { \s -> TokenOp s }
"-" { \s -> TokenOp s }
"/" { \s -> TokenOp s }
"*" { \s -> TokenOp s }
"+" { \s -> TokenOp s }
[\+\-\/\*] { \s -> TokenOp s}
[\(] { \s -> TokenLParen }
[\)] { \s -> TokenRParen }
$digit+ { \s -> TokenInt (read s) }
......@@ -32,6 +34,8 @@ data Token = TokenSet
| TokenWhile
| TokenBegin
| TokenSym Char
| TokenOp String
| TokenBuiltin String
| TokenPlus
| TokenMult
| TokenDiv
......
all: AST.y Lexer.x
alex Lexer.x && happy AST.y && stack build
all: AST.y
happy AST.y && ghc AST.hs && ./AST Imp.ast && alex Lexer.x && happy Imp.y && ghc Imp.hs
clean:
rm -f Lexer.hs && rm -f AST.hs && stack clean
......
......@@ -13,12 +13,6 @@ build-type: Simple
-- extra-source-files:
cabal-version: >=1.10
library
hs-source-dirs: src
exposed-modules: Lib
build-depends: base >= 4.7 && < 5
default-language: Haskell2010
executable gg-proto
hs-source-dirs: .
main-is: AST.hs
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment