Last active
September 1, 2017 07:07
-
-
Save valtih1978/b24d0d416c3f0ef95b8c to your computer and use it in GitHub Desktop.
vhdl parser based on scala parser combinators
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
object O extends App { | |
//https://gitter.im/tpolecat/atto?at=576313c2dfb1d8aa45a44a4f | |
import scalaz._, Scalaz._ | |
import atto._, Atto._ ; val char = atto.parser.character.char _ | |
//JVM hangs when https://gitter.im/tpolecat/atto?at=5762a24ddfb1d8aa45a415c7 | |
//(many(int).sepBy(many(whitespace))).parseOnly("10 = 20") | |
val QM = char('"') ; val id = token(for (head <- letter ; tail <- stringOf(letterOrDigit)) yield head + tail) ;id.parseOnly("abc1 d") | |
val stringLiteral = token(QM ~> (stringOf(notChar('"')) <~ QM).map('"' + _ + '"')) ; stringLiteral.parseOnly("\"abc\"") | |
val num = token(int) // token skips the post-token whitespaces | |
implicit def tokenize(c: Char) = token(char(c)) ; implicit def tokenize(s: String) = token(string(s)) | |
val term = id | stringLiteral | num ; val assignment = (id <~ "=") ~ term ; assignment.parseOnly("a = 1") | |
def par[A](p: => Parser[A]) = bracket(tokenize('('), p, tokenize(')')).named(s"par(${p.toString})") | |
def cs[A](p: Parser[A]): Parser[List[A]] = sepBy(p, ',') ; val charLiteral = tokenize(''') ~> (anyChar <~ ''') | |
case class Func(val name: String, val args: List[Any]) ; case class PhysicalLiteral(val amount: Int, val unit: String) ; case class CharLiteral(val c: Char) | |
val func = id ~ opt(par(cs(term))) map {case (name, args) => Func(name, args.getOrElse(Nil))} | |
val physicalLiteral = (opt(num) ~ id) map {case (amount, unit) => PhysicalLiteral(amount.getOrElse(1), unit)}; | |
val enumLiteral = (id | charLiteral) ; val name = func | physicalLiteral | enumLiteral | |
//tokens.parseOnly("program A begin x = \"hello world\" end") | |
name.parseOnly("meter") // I would like func/physical unit/ enum literal ambiguity here | |
//implicit def tokenize(s: String): Parser[String] = token(string(s)) | |
object kw extends Enumeration { type kw = Value ; val begin, end = Value;} | |
implicit def parse_kw(keyword: kw.Value): Parser[String] = token(string(keyword.toString)) | |
parse_kw(kw.begin) | kw.end //OlegYch|h says that unlike combinators kw.begin | kw.end, atto needs parse_kw(kw.begin) because atto.| uses implicits. | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// TODO: | |
// wait on statements and signals for IPC. IRC says that this is a job for akka FSM or scalaz Task | |
// (akka bus permits registering on events) | |
// Implementation should be Bulk Synchronous Parallel! | |
// Look at parprog week3. Там опис parallel ParTrieMaps. | |
// Возможно это пригодится для scheduler: Делаем snaphot | |
// и дальше каждый процесс может добавить туда события. | |
// Как узнать какое следущее? | |
import scala.util.parsing.combinator._ | |
import scala.concurrent._ | |
import scala.concurrent.duration._ | |
import scala.concurrent.ExecutionContext.Implicits.global | |
object SimpleParser extends RegexParsers { | |
def num: Parser[Int] = """\d+""".r ^^ { _.toInt } | |
val string_literal = """"[^\"]*"""".r | |
object kw extends Enumeration { | |
val /*miscellenious ops*/ not, abs, /*mult operators*/ rem, mod, /*shift ops*/ sll, srl, sla, sra, rol, ror, /*relations in logical expr*/ and, or, xor, nand, nor, xnor = Value | |
val signal, shared, variable, constant, array, record, of, is, `new`, `type`, subtype, attribute, alias = Value | |
val configuration, entity, architecture, component, block, body, procedure, impure, pure, function, begin, end = Value | |
val process, postponed = Value; val inertial, transport, reject, force, release, after = Value | |
val on, until, `for`, `while`, generate, loop = Value; val wait_ = Value("wait") | |
val generic, map, port, parameter = Value; val `protected`, access = Value | |
val library, context, `package`, use, vunit, all, others, default, open, file, to, downto, range = Value | |
val in, out, inout, buffer, linkage = Value; val group, label = Value | |
val `if`, then, elsif, `else`, `case`, `with`, select, when = Value | |
val next, exit, `return`, `null`, unaffected = Value; val guarded, bus, register, disconnect = Value | |
val report, assert, severity = Value; val units, literal, sequence, property = Value | |
}; | |
implicit def stringify(keyword: kw.Value) = keyword.toString | |
implicit def parse_kw(keyword: kw.Value): Parser[String] = basic_identifier_including_KW { _ == keyword.toString } | |
def acceptIf[T](p: Parser[T])(acceptable: T => Boolean, failMsg: T => String) = | |
p flatMap {t => if (acceptable(t)) success (t) else (failure (failMsg(t)))} | |
//p flatMap {t => t |> (if (acceptable(t)) success _ else failMsg andThen failure _)} // Scalaz allows to factor if-else func out | |
def basic_identifier_including_KW(acceptable: String => Boolean) = | |
acceptIf("[a-zA-Z_][a-zA-Z_0-9]*".r)(acceptable, id => s"identifier '$id' is not acceptable here") | |
val keywords = kw.values.map(_.toString).toSet; | |
val basic_identifier = basic_identifier_including_KW(id => !keywords.contains(id)) | |
val identifier = basic_identifier | |
case class Declaration(name: String, `type`: String) | |
val declaration = identifier ~ ":" ~ identifier ^^ {case name ~ colon ~ taip => Declaration(name, taip)}; | |
val declarations = rep(declaration <~ ";"); case class Increment(seed: Any, delta: Any) | |
def expression: Parser[Any] = (identifier | num | string_literal) ~ opt("+" ~> expression) ^^ { | |
case seed ~ appendix => appendix.map(expr => Increment(seed, expr)).getOrElse(seed) | |
}; | |
class Statement; case class Report(expr: Any) extends Statement | |
case class VariableAssignment(target: String, value: Any) extends Statement | |
case class SignalAssignment(target: String, value: Any) extends Statement | |
val assignment = identifier ~ "=" ~ expression ^^ {case name ~ eq ~ value => VariableAssignment(name, value)} | |
val signal_assignment = identifier ~ "<=" ~ expression ^^ {case name ~ eq ~ value => SignalAssignment(name, value)} | |
val report = kw.report ~> expression ^^ Report | |
val statement = report | assignment | signal_assignment | |
case class Process(declarations: List[Declaration], sos: List[Statement]) { | |
def run(signals: Scheduler) = { | |
val variables = declarations.foldLeft(Map[String, Any]()){ | |
case (env, vbl) => env + (vbl.name -> null)} | |
val env2 = sos.foldLeft(Env(variables, signals, Map.empty)){case (env, statement) => statement match { | |
case VariableAssignment(target, value) => //println(target + " := " + value) | |
assert(env.variables.contains (target), "assignment to undeclared variable " + target) | |
env . vupdate (target, env.evaluate(value)) | |
case SignalAssignment(target, value) => //println(target + " <= " + value) | |
assert(env.scheduler.signals.contains (target), "assignment to undeclared signal " + target) | |
env . supdate (target, env.evaluate(value)) | |
case Report(expr) => println(env.evaluate(expr)); | |
env | |
}} | |
println(variables + " => " + env2.variables + ", scheduled updates = " + env2.schedule) | |
//env2.scheduler.synchronized {env2.scheduler.signals ++= env2.schedule} | |
env2 | |
}; | |
} | |
val process = kw.process ~> declarations ~ kw.begin ~ rep(statement <~ ";") <~ kw.end ^^ {case vars ~ begin ~ sos => Process(vars, sos)} | |
//schedule is signals' next values | |
case class Env(val variables: Map[String, Any], val scheduler: Scheduler, val schedule: Map[String, Any]) { | |
// TODO: If name is not found among variables, resort to signals. | |
// Read of signals must be simple, write needs some scheduling. | |
def apply(name: String) = variables.get(name).getOrElse(scheduler.signals(name)) | |
def contains(name: String) = variables.contains(name) | |
def vupdate(nameVal: Tuple2[String, Any]) = Env(variables + nameVal, scheduler, schedule) | |
def supdate(nameVal: Tuple2[String, Any]) = Env(variables, scheduler, schedule + nameVal) | |
def unquote(s: String) = { if (!s.startsWith("\"")) this(s) | |
else s.substring(1, s.length()-1)} | |
def evaluate(expr: Any): Any = expr match { | |
case i: Int => i | |
case strLit: String if strLit startsWith ("\"") => strLit | |
case name: String => this(name) | |
case Increment(seed, delta) => (evaluate(seed), evaluate(delta)) match { | |
case (a:Int, b: Int) => a + b | |
case (a:String, b: String) => "\"" + unquote(a) + unquote(b) + "\"" | |
case (a:String, b) => "\"" + unquote(a) + b + "\"" | |
} | |
case u => throw new Exception("We don't know how to evaluate " + u) | |
} | |
} | |
case class Scheduler(var signals: Map[String, Any]) { | |
} | |
case class Program(signalDeclarations: List[Declaration], processes: List[Process]) { | |
val sigValues = signalDeclarations.foldLeft(Map[String, Any]()){ | |
case (sigMap, sig) => sigMap + (sig.name -> null) | |
} ; val scheduler = Scheduler(sigValues) | |
// run processes concurrently. We still cannot set the signals and wait on them | |
def run() = {val futures = processes.map(proc => future{proc.run(scheduler)}) | |
val result = Await.result(Future.sequence(futures), Duration.Inf) | |
result.foreach(scheduler.signals ++= _.schedule) // analogous to commented synchronyzed above | |
println("signals = " + scheduler + ", variables = " + result.map(_.variables)) | |
} | |
} | |
val program = declarations ~ kw.begin ~ rep(process) ^^ {case signals ~ begin ~ processes => Program(signals, processes)} | |
} | |
import SimpleParser._ | |
object TestSimpleParser extends App { | |
/* | |
def recover[T](okMsg: String/*, shouldRecover: T => Boolean = _ => true*/): ParseResult[T] => ParseResult[_] = pr => pr match { | |
case f @ Failure(msg, rem) => Success(okMsg, rem) //if (shouldRecover(msg)) Success(okMsg, rem) else f | |
case e @ Error(msg, rem) => println("Recovering error " + msg); Success(okMsg, rem) //if (shouldRecover(msg)) Success(okMsg, rem) else f | |
case s @ Success(msg, rem) => Failure(s"we had to fail because $okMsg but succeeded with " + msg, rem); case a => a | |
} | |
def expect[T](parsers: Iterable[Parser[T]], input: String, expected: T) {parsers.foreach{p=>expect(p, input, expected)}} | |
def eq[T](p: Parser[T], expected: T) = p flatMap {res => if (res == expected) success(res) else failure(s"received $res != expected $expected")} | |
def expect[T](p: Parser[T], input: String, expected: T) {println(input + " => " + parse(eq(p, expected), input)) } | |
def expectStr[T](p: Parser[T], input: String, expected: String) {println(input + " => " + parse(eq(p ^^ {case t => t.toString}, expected), input)) } | |
def fail[T](parsers: Iterable[Parser[T]], input: String, recovery: ParseResult[T] => ParseResult[_]) {parsers.foreach{p => fail(p, input, recovery)}} | |
def fail[T](p: Parser[T], input: String, recovery: ParseResult[T] => ParseResult[_]) { println(input + " => " + recovery(parseAll(p, input))) } | |
expect(identifier, "abc", "abc") | |
fail(basic_identifier, "to", recover("to is reserved")) | |
expect(declarations, "", List()); expect(declarations, " a: int;", List(Declaration("a", "int"))) | |
expect(declarations, " a: int; b:boolean;", List(Declaration("a", "int"), Declaration("b", "boolean"))) | |
expect(assignment, "a = 10", Assignment("a", 10)) | |
expect(process, "process a: int; begin a = 10; b = a; end", Process(List(Declaration("a", "int")), List(Assignment("a", 10), Assignment("b", "a")))) | |
expect(report, """ report "abc" + "xyz"""", Report(Increment(""""abc"""", """"xyz""""))) | |
*/ | |
parse(process, """process a: int; b: boolean; | |
begin a = 10; b = a+2; report "abc:" + a; report "abc " + 10 + 20; end""") match { | |
case Success(process, rem) => assert(process.run(Scheduler(Map.empty)).variables | |
== Map("a" -> 10, "b" -> 12), "process response error") | |
} | |
parse(program, """a:int; b: boolean; begin | |
process v11: int; begin v11 = 1; b<=2; report "proc1, v11=" + ",a =" + a; end | |
process v21:boolean; begin v21=10; v21=v21+2; a<=1; report "proc2, v21=" + v21; end""") match { | |
case Success(p @ Program(signals, processes), rem) => | |
println("got " + signals.length + " signals and " + processes.length + " processes") | |
p.run() | |
case f:Failure => println(f) | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import Vhdl._ | |
// TODO: CharLiteral -> Char, StringLiteral -> String to save memory | |
object AllTests extends App { | |
def recover[T](okMsg: String/*, shouldRecover: T => Boolean = _ => true*/): ParseResult[T] => ParseResult[_] = pr => pr match { | |
case f @ Failure(msg, rem) => Success(okMsg, rem) //if (shouldRecover(msg)) Success(okMsg, rem) else f | |
case e @ Error(msg, rem) => println("Recovering error " + msg); Success(okMsg, rem) //if (shouldRecover(msg)) Success(okMsg, rem) else f | |
case s @ Success(msg, rem) => Failure(s"we had to fail because $okMsg but succeeded with " + msg, rem); case a => a | |
} | |
def takeAll[T](p: Parser[T], input: String) = {val pa = parseAll(p, input) ; println(input + " => " + pa) ; pa} | |
// takeAll(expression, "1") //takeAll(expression, "1") | |
def expect[T](parsers: Iterable[Parser[T]], input: String, expected: T) {parsers.foreach{p=>expect(p, input, expected)}} | |
//def eq[T](p: Parser[T], expected: T) = parseMap(p){case (s @ Success(t, _), in) => if (t == expected) s else Failure(s"received $t != expected $expected", in)} | |
def eq[T](p: Parser[T], expected: T) = p flatMap {res => if (res == expected) success(res) else failure(s"received $res != expected $expected")} | |
def expect[T](p: Parser[T], input: String, expected: T) {println(input + " => " + parse(eq(p, expected), input)) } | |
def expectStr[T](p: Parser[T], input: String, expected: String) {println(input + " => " + parse(eq(p ^^ {case t => t.toString}, expected), input)) } | |
def fail[T](parsers: Iterable[Parser[T]], input: String, recovery: ParseResult[T] => ParseResult[_]) {parsers.foreach{p => fail(p, input, recovery)}} | |
def fail[T](p: Parser[T], input: String, recovery: ParseResult[T] => ParseResult[_]) { println(input + " => " + recovery(parseAll(p, input))) } | |
def expectChar[T](parser: Parser[T], char: Char) = expect(parser, s"'$char'", CharacterLiteral(char)) | |
println("""TODO: decide if abc'a' corresponds to enum + char literals, http://cs.stackexchange.com/a/51497/42370""") | |
//expect(enumeration_literal, "abc'a'", "abc'a' must always fail") | |
//fail(string_literal, "\"this is string literal, illegimately followed by some crap\"abc", recover("should not accept \"string literal\"abc")) | |
List('a', '.', '\"').foreach (expectChar(character_literal, _)) | |
expect(enumeration_literal, "abc", "abc") ; expectChar(enumeration_literal, 'a') | |
expect(string_literal, "\"ap\"\"a\"\"\"", StringLiteral("ap\"a\"")) // check that double quote is escaped all over the string | |
expect(string_literal, "\"aa\"\"\"a\"\"\"", StringLiteral("aa\"")) // early parser exist on early quote | |
expect(extended_identifier, "\\aa\\\"\"a\"\"\\", "\\aa\\") // early exit | |
expect(extended_identifier, "\\aa\\\\ a\\\\bb\\", "\\aa\\ a\\bb\\") // every slash is escaped | |
expect(string_literal, "\"\"", StringLiteral("")) // empty string literals are allowed | |
fail(Seq(extended_identifier, identifier), "\\\\", recover("empty extended identifier must fail and it did")) | |
expect(Seq(extended_identifier, identifier), "\\1\\", "\\1\\") | |
expect(identifier, "a_a; a", "a_a") ; expect(identifier, "a_a= a", "a_a") | |
expect(identifier, "a", "a") // single-char id failed when [letter][letter]+ is mistakenly used | |
fail(basic_identifier, "to", recover("to is reserved")) | |
expect(kw.to, " to 1", "to") | |
fail(kw.to, "toabc", recover("prevented kw.to to match 'toabc'")) | |
expect(decimal_literal, "1_111", IntDecimal(1111)) | |
expect(abstract_literal, "1_111e5", BasedDecimal(1111, 10, 5)) | |
expect(abstract_literal, "1_111.1_11e5", BasedDecimal(1111111, 10, 2)) | |
expect(abstract_literal, "1_111.1_11e-5", BasedDecimal(1111111, 10, -8)) | |
expect(abstract_literal, "1_111.1_11e-5_1", BasedDecimal(1111111, 10, -54)) | |
expect(based_literal, "1_1#0000_000#", IntDecimal(0)) | |
//fail(abstract_literal, "1_1#00F0_000#", recover("parsing F at base 11")) // won't recover from exception | |
expect(abstract_literal, "10#5#", IntDecimal(5)) | |
expect(abstract_literal, "1_6#F_Ff.0033000#e-5_5", BasedDecimal(0xfff0033, 16, -59)) | |
expect(physical_literal, "10.10e5 sec", PhysicalLiteral(BasedDecimal(101, 10, 4), "sec")) | |
expect(numeric_literal, "16#00a0.0a00#e-3 sec", PhysicalLiteral(BasedDecimal(0xa00a, 16, -5), "sec")) | |
expect(enumeration_literal, "second", "second") ; expect(basic_identifier, "second", "second") | |
// test("physical_literal", physical_literal)("second", NoCheck) | |
//test("physical_literal", physical_literal)("second(100)", NoCheck) // LRM says that units can be arbitrary names, not only complex ones | |
def bsl_test(input: String, expected: String) = expect(bit_string_literal, input, StringLiteral(strip(expected))) | |
def bsl_fail(input: String) = try {bsl_test(input, ""); error(input + " had to fail")} | |
catch { case e: NumberFormatException if (e.getMessage.contains("cropped")) => println(input + " failed ok wtih " +e.getMessage )} | |
//try test("000", "C", "111") catch { case e: AssertionError if (e.getMessage.contains ("unknown radix C")) => println("parser generates exception properly when unknown radix is supplied to it ")} | |
bsl_test ("B\"\"", "") ; //bsl_test ("5O\"\"", "00000") ; //bsl_test("12UX\"F\"\"\"", "0000_1111_\"\"\"\"") ; | |
bsl_test ("B\"01011\"", "01011") ; bsl_fail("3B\"01011\"") ; bsl_test ("3B\"00011\"", "011") ; bsl_test ("10B\"01011\"", "0000001011") | |
bsl_fail ("3UB\"01011\"") ; bsl_fail ("3UB\"11011\"") ; bsl_test ("3UB\"00100\"", "100") ; bsl_test ("5UB\"100\"", "00100") ; bsl_test ("5UB\"010\"", "00010") | |
bsl_fail ("3SB\"01011\"") ; bsl_fail ("3SB\"11011\"") ; bsl_test ("3SB\"11100\"", "100") ; bsl_test ("5SB\"100\"", "11100") ; bsl_test ("5SB\"010\"", "00010") | |
bsl_test ("O\"103\"", "001000011") ; bsl_test ("7O\"103\"", "1000011") ; bsl_fail ("6O\"103\"") ; bsl_test("4O\"1f\"", "1fff"); | |
bsl_test("4O\"1F\"", "1FFF"); bsl_test("4O\"18\"", "1888"); bsl_test("7SO\"42\"", "1100010"); bsl_test("7SO\"32\"", "0011010") | |
bsl_test("X\"FFF\"", "1111_1111_1111") ; bsl_test ("O\"777\"", "111111111"); bsl_test ("X\"777\"", "011101110111") | |
bsl_test("B\"XXXX_01LH\"", "XXXX01LH") ; bsl_test("UO\"27\"", "010_111") ; bsl_test("UO\"2C\"", "010_CCC") ; bsl_test("SX\"3W\"", "0011_WWWW") ; | |
bsl_test("D\"35\"", "100011") ; bsl_test("12UB\"X1\"", "0000_0000_00X1") ; bsl_test("12SB\"X1\"", "XXXX_XXXX_XXX1") ; | |
bsl_test("12UX\"F-\"", "0000_1111_----") ; bsl_test("12SX\"F-\"", "1111_1111_----") ; bsl_test("12D\"13\"", "0000_0000_1101") ; | |
bsl_test("12UX\"000WWW\"", "WWWW_WWWW_WWWW") ; bsl_test("12SX\"FFFC00\"", "1100_0000_0000") ; bsl_test("12SX\"XXXX00\"", "XXXX_0000_0000") ; | |
bsl_fail("8D\"511\"" ) ; bsl_fail("8UO\"477\""); bsl_fail ("8SX\"0FF\"") ; bsl_fail("8SX\"FXX\"" ) | |
expect(expression, "a + 1.1 ** c or 'c' sll -e = 10x\"0F \" ", B_or(B_$plus("a", B_exp(BasedDecimal(11,10,-1), "c")), B_$eq(B_sll(CharacterLiteral('c'), U_$minus("e")), StringLiteral("001111 ")))) | |
expect(expression, "or input", U_or("input")) // reduce op | |
// LRM: “(and A) and B” and “A and (and B)” are legal, whereas the expression “and A and B” | |
// and “A and and B” are not. Similarly, “and (and A)” is legal, whereas “and and A” is not | |
//expect(name, "a.b.c.d'e'f'g(1*1)", List("a", Suffix("b"), Suffix("c"), Suffix("d"), Attr("e", null), Attr("f", null), Attr("g", "1*1"))) ; | |
expect(name, "a.\\b\\.'c'", List("a", Suffix("\\b\\"), Suffix(CharacterLiteral('c')))) | |
expect(call, "f(10)", List("f", List(IntDecimal(10)))) | |
expect(name, "f(10)", List("f", List(IntDecimal(10)))) | |
expect(name, "f(a=> 10)", "f") | |
expect(expression, "f(a=> 10)", Call("f", List(NamedAssociation("a",IntDecimal(10))))) | |
expect(expression, "f(a => 10, b => c(2 +2 ** 3))", Call("f", List(NamedAssociation("a",IntDecimal(10)), NamedAssociation("b",List("c", List(B_$plus(IntDecimal(2), B_exp(IntDecimal(2), IntDecimal(3))))))))) | |
expect(name, "f1(arg).term'attribute1(1+1)'attribute2(2)", List("f1", (List("arg"),Suffix("term")), Attr("attribute1",B_$plus(IntDecimal(1), IntDecimal(1))), Attr("attribute2",IntDecimal(2)))) | |
expect(name, "abc(x to y)(a downto b)", List("abc", DiscreteRange(true,"x","y"), DiscreteRange(false,"a","b"))) | |
expect(name, "<<constant .a.b.so.lu.te: string>>", "External(constant .(a,b,so,lu,te):SubtInd(string))") | |
expect(name, "<<variable ^.^.r.e.la.ti.ve: string>>.a.b.c", List("External(variable ^.2(r,e,la,ti,ve):SubtInd(string))", Suffix("a"), Suffix("b"), Suffix("c"))) | |
expect(name, "<<signal @lib.ra.r.y: res_func string>>", "External(signal @lib.ra.ry target:SubtInd(res_func string))") | |
expect(attribute_name, "abc.xyz'abc", List("abc", Suffix("xyz"), Attr("abc",null))) | |
expectStr(attribute_name, "abc.xyz'abc'cde(1)", "List(abc, Suffix(xyz), Attr(abc,null), Attr(cde,IntDecimal(1)))") | |
fail(attribute_name, "abc.xyz'abc.cde", recover("non-attribute was rejected since attribute required")) | |
takeAll(relation, "-1") | |
takeAll(relation, "-1-2") | |
fail(expression, "--1", recover("--1 is not supported even in Modelsim")) ; | |
fail(shift_expression, "-1/-1", recover("-1/-1 is not supporeted expression even in Modelsim")) | |
expect(expression, "-1*2+3 mod 4 rem 5 /2 sla -1*2 > 1/ 2-3 ror 3 nor -1 < 9 sla 3*2+1", B_nor(B_$greater(B_sla(B_$plus(U_$minus(B_$times(IntDecimal(1), IntDecimal(2))), B_$div(B_rem(B_mod(IntDecimal(3), IntDecimal(4)), IntDecimal(5)), IntDecimal(2))), U_$minus(B_$times(IntDecimal(1), IntDecimal(2)))), B_ror(B_$minus(B_$div(IntDecimal(1), IntDecimal(2)), IntDecimal(3)), IntDecimal(3))), B_$less(U_$minus(IntDecimal(1)), B_sla(IntDecimal(9), B_$plus(B_$times(IntDecimal(3), IntDecimal(2)), IntDecimal(1)))))) | |
//fail(logical_expression, "1 nor 2 and 3") // either single nor or multiple ands and ors | |
//note that or is binary in "13 or 14" but it is unary in "1 * or 2" | |
expect(expression, "-1 * or 2+3 mod 4 rem 5 /6 sla -7*8**9 > 10/ 11-12 ror 13 or -14 < 14 sla 16*17+18 and -19*20 sll 21+22 > 23 ror 24", B_and(B_or(B_$greater(B_sla(B_$plus(U_$minus(B_$times(IntDecimal(1), U_or(IntDecimal(2)))), B_$div(B_rem(B_mod(IntDecimal(3), IntDecimal(4)), IntDecimal(5)), IntDecimal(6))), U_$minus(B_$times(IntDecimal(7), B_exp(IntDecimal(8), IntDecimal(9))))), B_ror(B_$minus(B_$div(IntDecimal(10), IntDecimal(11)), IntDecimal(12)), IntDecimal(13))), B_$less(U_$minus(IntDecimal(14)), B_sla(IntDecimal(14), B_$plus(B_$times(IntDecimal(16), IntDecimal(17)), IntDecimal(18))))), B_$greater(B_sll(U_$minus(B_$times(IntDecimal(19), IntDecimal(20))), B_$plus(IntDecimal(21), IntDecimal(22))), B_ror(IntDecimal(23), IntDecimal(24))))) | |
//expectStr(expression, "(-1 * or a.b'c(2)+f(3)) mod \\4 4\\ rem 5e-5 /\"6\" sla -'7'*8#7.7#e8 **9UX\"9\" > << constant .a.b.so.lu.te: string >>", """B_$greater(B_sla(B_$div(B_rem(B_mod(B_$plus(U_$minus(B_$times(IntDecimal(1), U_or(List(a, Suffix(b), Attr(c,IntDecimal(2)))))), List(f, List(IntDecimal(3)))), \4 4\), 5 * 10^-5), "6"), U_$minus(B_$times('7', B_exp(63 * 8^7, "000001001")))), External(constant .(a,b,so,lu,te):SubtInd(string)))""") | |
expect(expression, "(-1 * or a.b'c(2)+f(3)) mod \\4 4\\ rem 5e-5 /\"6\" sla -'7'*8#7.7#e8 **9UX\"9\" > << constant .a.b.so.lu.te: string >>", B_$greater(B_sla(B_$div(B_rem(B_mod(B_$plus(U_$minus(B_$times(IntDecimal(1), U_or(List("a", Suffix("b"), Attr("c",IntDecimal(2)))))), List("f", List(IntDecimal(3)))), "\\4 4\\"), BasedDecimal(5, 10,-5)), StringLiteral("6")), U_$minus(B_$times(CharacterLiteral('7'), B_exp(BasedDecimal(63,8,7), StringLiteral("000001001"))))), "External(constant .(a,b,so,lu,te):SubtInd(string))")) | |
// Statements: | |
println("---") | |
expect(assertion_statement, "assert \\3\\ + (8 sll 2e5) > \"101\" report \"assertion test\" severity NOTE", AssertionStatement(B_$greater(B_$plus("\\3\\", B_sll(IntDecimal(8), BasedDecimal(2,10,5))), StringLiteral("101")),Some(StringLiteral("assertion test")),Some("NOTE"))) | |
//((B_$greater(B_$plus("\\3\\", B_sll(IntDecimal(8), BasedDecimal(2,10,5))), StringLiteral("101"))~Some(StringLiteral("assertion test")))~Some("NOTE")) | |
expect(wait_statement, "wait on <<signal @lib.ra.r.y: int>> until time'now > 10 for x.y * (10 ns)", WaitStatement(Some(List("External(signal @lib.ra.ry target:SubtInd(int))")),Some(B_$greater(List("time", Attr("now",null)), IntDecimal(10))),Some(B_$times(List("x", Suffix("y")), PhysicalLiteral(IntDecimal(10),"ns"))))) | |
expect(wait_statement, "wait until time'now > 10", WaitStatement(None,Some(B_$greater(List("time", Attr("now",null)), IntDecimal(10))),None)) | |
fail(wait_statement, "wait until time'now 1 > 10", recover("rejected 'wait until 1 2 > 10'. Note that 1 1>10 is not a valid expression")) | |
expect(signal_assignment, "t <= transport unaffected", WaveformAssignment("t",("transport","unaffected"))) ; | |
val aggr1 = List("a", StringLiteral("111"), "b", (List(DiscreteRange(true,IntDecimal(1),B_$plus("k", IntDecimal(1)))),List("m", List(StringLiteral("000"))))) | |
//If the target is an aggregate, it must contain a variable name at each position. | |
expect(signal_assignment, "(a, \"111\", b, 1 to k+1 => m(\"000\")) <= (a, \"111\", b, 1 to k+1 => m(\"000\"))", WaveformAssignment(aggr1, List(aggr1))) | |
expect(signal_assignment, "t <= release out", ExpressionAssignment("t",Some("out"))) | |
expect(signal_assignment, "t'a(8) <= reject a.abc'a +5 inertial null, null, 3, 2 after x+y", WaveformAssignment(List("t", Attr("a",IntDecimal(8))),(Some(B_$plus(List("a", Suffix("abc"), Attr("a",null)), IntDecimal(5))),List(null, null, IntDecimal(3), WaveFormElement(IntDecimal(2),B_$plus("x", "y")))))) | |
expect(signal_assignment, "abc(x to y)(a downto b + 1) <= force in a+5", ExpressionAssignment(List("abc", DiscreteRange(true,"x","y"), DiscreteRange(false,"a",B_$plus("b", IntDecimal(1)))),("in",B_$plus("a", IntDecimal(5))))) | |
expect(signal_assignment, "\\t\\.t <= inertial 1+1 after 1 ns, waveform2 after 2 ns, null after time'now when a>b else 3 when c>d else 4 when r>m else blahblah", | |
WaveformAssignment(List("\\t\\", Suffix("t")),(None,List(List(WaveFormElement(B_$plus(IntDecimal(1), IntDecimal(1)),PhysicalLiteral(IntDecimal(1),"ns")), WaveFormElement("waveform2",PhysicalLiteral(IntDecimal(2),"ns")), WaveFormElement(null,List("time", Attr("now",null)))), B_$greater("a", "b"), List(IntDecimal(3)), B_$greater("c", "d"), List(IntDecimal(4)), B_$greater("r", "m"), List("blahblah"))))) | |
expect(signal_assignment, "t <= force out 1+1 when a>\\b\\(3) else 3 when c>d else 4 when r > m else blahblah", ExpressionAssignment("t",("out",List(B_$plus(IntDecimal(1), IntDecimal(1)), B_$greater("a", List("\\b\\",List(IntDecimal(3)))), IntDecimal(3), B_$greater("c", "d"), IntDecimal(4), B_$greater("r", "m"), "blahblah")))) | |
takeAll(selected_signal_assignment, "with a+1 select ? t <= reject 1 inertial 1 after 5, 2 after 10 when a to a+f'b, 33 after now, 44 after thau when b, 2 when b2, 3 when 1 to 4") | |
takeAll(selected_signal_assignment, "with a+1 select ? t <= force in 1 when a to a+f'b, 11 when b1, 22 when b2") | |
takeAll(selected_variable_assignment, "with a+1 select ? v := 1 when a to a+f'b, 11 when b1, 22 when b2") | |
takeAll(sequential_statement, "if a then wait on a; y <= 2; elsif b then c := 33; elsif e then dd <= 44 ; else assert b ; end if ;") | |
fail(sequential_statement, "case sel1 is when a => when b others c => null ; when d => null; end case; ", recover("`others` in a wrong position in case statement failed the parse as expected;")) | |
takeAll(sequential_statement, "case sel2 is when \"00\" to 3 1 to 8 => wait on w; y <= 2; when 'c' => e := e1; when others => null; end case; ") | |
takeAll(sequential_statement, "case sel3 is when a => null; end case ;") | |
takeAll(sequential_statement, "loop a := 1 ; b := 2 ; end loop;") | |
takeAll(sequential_statement, "while a>5 loop a := 1 ; b := 2 ; end loop;") | |
takeAll(sequential_statement, "for i in 1 to 10 loop a := 1 ; b := 2 ; end loop;") | |
takeAll(sequential_statement, "exit;") ; takeAll(sequential_statement, "exit when true > 1;") | |
takeAll(sequential_statement, "next;") ; takeAll(sequential_statement, "next when true > 2;") | |
takeAll(sequential_statement, "a: null;") ; takeAll(sequential_statement, "return;") ; takeAll(sequential_statement, "return a+b;") | |
// declarations | |
takeAll(signal_declaration, "signal a,b: string bus := 10+1") | |
takeAll(file_declaration, "file f1,f2: string_fle") | |
takeAll(file_declaration, "file f11,f22: string_fle open file_open_kind(abc) is \"file1\"+1 ") | |
//List(SignalDeclaration(a,SubtInd(string),Some(B_$plus(IntDecimal(10), IntDecimal(1)))), SignalDeclaration(b,SubtInd(string),Some(B_$plus(IntDecimal(10), IntDecimal(1))))) | |
takeAll(subprogram_specification, "procedure PLUS11") | |
takeAll(subprogram_specification, "procedure PLUS2 (A, b: Integer; signal c: out String)") | |
takeAll(subprogram_specification, "function PLUS3 (A, b: Integer; variable c: String) return INT") | |
takeAll(constant_declaration, "constant C1,C2: string := 1") | |
takeAll(variable_declaration, "signal S1,S2: string bus") | |
takeAll(type_definition, "b to 2.3 units ones; kilo = 1000 ones; \\mega\\ = 1000 kilo ; end units") | |
takeAll(type_definition, "(a, 'b', \\cc\\, 'd')") | |
takeAll(type_definition, "range 1 to 1.1") | |
takeAll(type_definition, "array (string range <>, int range <>) of byte") | |
takeAll(type_definition, "array (a to b, b to 1.1) of byte") | |
takeAll(type_definition, "record a,b,c: res_func string; x: type1 end record") | |
val attribute_spec = "attribute ATTR_SIMPLE_NAME of Ent1 arg1,arg2 return ret3, Ent2: entity is 1+1" | |
takeAll(attribute_specification, attribute_spec) | |
takeAll(type_definition, s"protected use a,b,c; $attribute_spec end protected") | |
takeAll(type_definition, "file of abc") | |
takeAll(type_definition, "access element_type range 1 to 10") | |
takeAll(type_definition, "protected body variable a: string := 1; variable c: string := 2; end protected body") | |
takeAll(alias_declaration, "alias 'c' is a arg1, arg2 return n'n1 ") | |
takeAll(component_declaration, "component COMP1 is generic (a,b: INT; constant C1: out STR) port (a: INT) end component C1") | |
takeAll(component_declaration, "component COMP1 is port (a: INT) end component C1") | |
takeAll(subtype_declaration, "subtype SHORT_STR is string(1 to 10)") | |
takeAll(type_declaration, "type STRING is range 1 to 10") | |
takeAll(package_instantiation_declaration, "package PKG1 is new NEW_NAME'nn generic map (1,a, c=>cc)") | |
takeAll(attribute_declaration, "attribute ATTR1 : STRING") | |
takeAll(configuration_specification, "for others : COMP_ALL use entity E1" ) | |
takeAll(configuration_specification, "for L1, L2, L3 : COMP1 use entity E1(arch1) generic map (1,g=>gg) port map (2,p=>pp); use vunit VU1, VU2; use vunit VU3, VU4 end for" ) | |
takeAll(group_template_declaration, "group GRUOP_TEMPLATE_1 is (entity, architecture <>, configuration)") | |
takeAll(group_declaration, "group GRUOP1: TEMPLATE1'x(12) (a,b,'c')") | |
takeAll(component_instantiation_statement, "component COMP1 generic map(1,b)") | |
takeAll(component_instantiation_statement, "configuration CONF1 generic map(1,c) port map(2,rr=>r)") | |
takeAll(component_instantiation_statement, "entity Entity1'abc(3)(1 to 8)(arch1) generic map(1,c) port map(2,rr=>r)") | |
takeAll(process_statement, "process (all) begin end process") | |
takeAll(process_statement, """ | |
postponed process (a,b(3),c'c) | |
variable a: string; constant c: string := c1; | |
begin a := 1; if x'x then null; else b <= 2+3; end if; | |
wait on cc'ee until x = y; | |
end process""".replaceAll("(\\s)+", " ")) | |
takeAll(concurrent_signal_assignment_statement, "postponed a <= 3+3 ") | |
// TODO: test subprograms, block statements and Design Units | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
THe long-standing problem is if we parse and elaborate in single pass, or elaborate lazily, | |
on-demand, which seems Zamia approach to build quickly in large libraries. Real VHDL seems | |
to elaborate top-down also but it uses the third, intermediate analysis step. We could pro | |
bably do it in the single pass with parsing and leave type checking to elaboration. | |
// What is vhdl analysis: https://electronics.stackexchange.com/questions/286688 | |
// Demo interpreter (we do not need AST but look at the scopes) http://www.codecommit.com/blog/scala/formal-language-processing-in-scala | |
-4. File order: analyze files in specified order if specified and compile others detecting the order automatically. | |
We need the order detection anyway for incremental recompilation. What if user-specified list contradicts our detection? | |
-3. I have discovered how to unwrap ParserCombinators from wrapping parser class, to be used with scala interpreter | |
https://gitter.im/lihaoyi/fastparse?at=576be15f2554bbe049bb4405 | |
-1. parser combinator test http://henkelmann.eu/2011/01/29/an_introduction_to_scala_parser_combinators-part_3_unit_tests | |
-1. Пример of lexer with position tracking https://enear.github.io/2016/03/31/parser-combinators | |
0. Fuck off the combinators. They do not check themselves at compilation time, always fail with stack overflow at runtime and this situation is terribly painful to debug. Furthermore, they don't support left-associative operators, http://stackoverflow.com/a/22650533/4563974 and we have to suck opt(a) ~ ab | success(None) ~ ab instead of simply opt(a) ~ ab. Stupid combinators just cannot backtrack. They cannot lookahead http://stackoverflow.com/a/34537639/4563974. | |
AST вообще имеет смысл если при изменении зависимостей, мы его re-evaluate. Однако, если семантич. анализ встроен прямо в парсер, то | |
придётся перепарсивать, что лишает содержание AST всякого смысла. В принципе, разработчики Scala plugin говорят что осн. время заним. | |
компиляц. а не парсинг, так что можно было бы попробывать отказаться от AST. С другой стороны, в случаях когда допустимы amibiguities, | |
можно попробывать |-комбинатор заменить на ||, чтобы PEG http://blog.reverberate.org/2013/09/ll-and-lr-in-context-why-parsing-tools.html | |
парсер вместо первой угодной альтернативы, возвращал все приемлимые для дальнейшего disambiguation на сематническом уровне, что я | |
предложил создателям atto https://gitter.im/tpolecat/atto?at=5761bce9dfb1d8aa45a3cf66. | |
The latter is necessary for context-sensitive parsing (if elaboration occurs right away) because there are too much context-sentisive | |
constructions. They are not only `a(b)` but also `physical_literal = [ num ] unit` indistinguishable from enum literal when num is | |
ommited as well as arg-free func, name[(args)] is indistinguishable from unnumbered physical and enum literal. And these are only | |
literals. What a mess will it be when identifiers will be enabled? Pure, unelaborated AST in meaningless because we don't know if | |
f(1) is a function call or we call f and index the result? | |
Разобрать f(1) когда f перегружена как f и f(int) в двух случ.: 1) f возвр. массив и 2) f(int) возвр элемент массива. | |
atto is sayd to be good at semantic actions: "In fact, it's a monad. This allows the result of one parser to influence the behavior of subsequent parsers. Here we build a parser that parses an integer followed by an arbitrary string of that length." I am not sure about position tracking and error reporting. | |
Я узнал что в копбинаторах, semantic sensitivity = Parser.flatMap http://stackoverflow.com/a/6317906/6267925 | |
Если мы отказываемся от AST, то как мы будем показыать Contents. Но если свзять ambigous то как показывать Contents? | |
Выход: я смотрю в http://www.cakesolutions.net/teamblogs/genetic-programming-in-scala что Symbolic map это просто | |
отображене name -> value. Может оно сойдёт за промежуточный вариант, сделать парсинг, без elaboration или сразу | |
elaboration? Без элаборации мы всё равно должны будем resolvить ссылки на другие design units, в других файлах, | |
то есть создаются зависимости между юнитами. | |
Consider parse-time elaboration since VHDL is designed for single-pass compilation (I dedided parse-time analysis, | |
which establishes refs, not type check anything). This may also be advantageous to performance since we eliminate | |
the need for building temporary AST and exploit | |
the cache. It is much faster to build the parsed structure right away than to restore AST into the cache. | |
Elaboration of the current module may however affect other modules, which takes much longer. Can | |
we defer it? We will build IG right away and defer the lenthy tasks like type checking and re-elaborating dependent stuff for | |
later instead defering IG build. The only issue in this case is dealing with incomplete program for auto-completion. | |
Eclipse: For example, in the JDT plug-ins, the reconciler performs a parse of the class as the user makes changes. This parser gathers enough information to update the Java model, allowing accurate content assist, refactoring, and other common operations. The same happens in SDT, scala does not remove errors in other files when they become obsolete after fixing current file. | |
Simon says that not only typechecking is done in second pass after parsing but dependencies are estabilshed by type checker. The code | |
is generated (elaborated) in one more pass https://gitter.im/scala-ide/scala-ide?at=573af96a0cb634927f7f95ea. | |
AST is needed for Outline (file structure). Also, only latest design unit should take effect in VHDL. It makes no sense to elaborate them all and the same design unit at the top of the file is allowed to contain semantic errors, therefore. Right? | |
Советы с Freenode.Eclipse: Делать Outline Asynchronous ("Updating") пока обновляется -- это лучше чем заставлять обновляться когда пользователь кликнул. Сам Эклипс позиции of syntacitc items не обновляет, как я понял, пока пользователь печатает. | |
1. Aliases for objects, types and procedures. Try object = value + alias, http://programmers.stackexchange.com/q/309956 | |
2. Compiled simulator ([javassist-based](http://www.javaranch.com/journal/200711/creating_java_classes_runtime_expression_evaluation.html)?) | |
2.2 multithreaded simulation. Determine which signals need to be delivered to other processes. Synthesis could do that. Synthesys also mainly eliminates | |
the scheduling from the combinatorial circuits and even enables the lazy evaluation. The lazy evaluation however would need the past history. | |
But, (FSM) Actors could write a parallel partitioned log, which could be useful for lazy evaluation. However, logging everything is probably overkill | |
- waveform logs only interesting parts. | |
- Parallelism with actors. When I looked at modelsim a couple of years ago, it did single thread. Similarly, GHDL is still | |
single-threaded. Meantime, parallel scheduler would be simply an Actor per process. It would wait for `start(events)` | |
message, update its cache for the updated signals and run. The process would maintain the scheduler and current value | |
for the signals it drives. Therefore, it should also accept the read(signal) requests from other processes, which may | |
cache the current value or, in case of arrays, parts of it. In the end, process sends the next earliest time to the main | |
scheduler. Main scheduler may be hierarchial to avoid a congestion when all processes end up simultaneously. When all processes | |
terminate, main scheduler sends `start(schedulers)` message. Schedulers deliver them downwards to the source processes, which | |
pong the even sources upwards. Hierarchical schedulers merge the events up to the top scheduler, which finally sends the | |
`start(events)` downward and the cycle repeats. This needs `cycle done -> your schedule is earliest -> here is mine event -> start` | |
2 full roundrips. It would be probably easier and simpler to implement using only a single roundrip if we send the earliest | |
event along with the `round_done` message. But, that would be probably not that efficient if there is an early large event is | |
proposed in some of the processes, which will be throughput demanding if that event is not scheduled many times. However, taking | |
into account that, when accepted, that event will be needed to propagate to all the child processes, the overhead is not | |
supposed be that big. Moreover, the top may memorize the large events and they won't be needed to be retransmitted or demand | |
2 roundtrips only for the large events. Actually, roundrips for the large events would be avoided altogether. Also, other processes | |
need not to know about (update) values which never leave a given process. If you know the end processes, you may send `here is mine | |
event` message right to them and they will forward an empty value upwards for in-order guarantees. | |
We probably need barrier synchronization for every VHDL delta cycle and registration for event listeners. | |
A process, interested in some signal events, has a chance to (un)register for them after all processes stopped | |
at the barrier, if they did not do that so far. That is, processes synchronized on clock alone would register only | |
once, whereas fancy processes which listen for one some signals then wait on the others, may re-register. | |
how do we distribute the clock event among millions of processes? H-bridge/tree? | |
- Compiling into CUDA is not an option since it is SIMD. Wrong, it is more powerful than SPMD(MIMD with fixed number of processes). CUDA is actually more powerful than we need since it allows for any number of threads https://books.google.ee/books?id=3b63x-0P3_UC&lpg=SL1-PA22&ots=Nzik60xcQ5&dq=spmd%20cuda&pg=SL1-PA23#v=onepage&q=spmd%20cuda&f=true | |
3. Incremental elaboration at module level. If compilation of a module (Design Unit) has demands unelaborated module, | |
compile that first and proceed with elaboration of the current. Collect dependencies and use the collection in re-elaboration. | |
Parents need to be updated when children elaborated. This means that we do not even need to wait for the children. | |
3.2 packages are like modules but instead of entities they export types, constants and functions. | |
3.3 Learn from http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html. They say that all the complexity is due to | |
the need to relaborate the parents. This needs to be done when API changes. The interface change is checked easily: | |
just print the canonic signatures and hash (to save memory). | |
The problem is that we need to rebuild all units that import the package or entities. In case of entities, we know which | |
entities are expected. Interface of the entity is constituated by port and generic declarations. Importing packages creates | |
a lot of fine-grain dependencies, which may demand a lot of maintanence information. Scala seems to manage deps at compilation | |
unit level. But, to reduce unnecessary rebuilds, it is advised to check if users use the added/removed/edited objects. | |
JDT compares .class file structure to detect differences and recompile dependencies if they appear, http://aosabook.org/en/eclipse.html | |
3.4 We can apply process parser if reconciler realizes that only a single process was changed, as per https://en.wikipedia.org/wiki/Incremental_compiler#Imperative_programming. If process parser finishes without errors, we can replace the old process with | |
a new one and copy the state variables if simulator is on (https://en.wikipedia.org/wiki/Incremental_compiler#Interactive_programming). | |
Тут будет очень удобна функциональность ручного ввода текущих значений сигналов. Если добавили новый сигнал во время hot swap сипуляции | |
то его нужно проинициализировать правильно на данный момент. Правда тут изменения более масштабные требуется чем просто внутри одного | |
процесса. | |
If process/instantiation/block was just deleted, delete them from the model. If text added into architecture body, try all 3 parsers. | |
3.5 This incremental model should enable the fair amount of autocompletion. However, true auto-completion needs to see the | |
partially elaborated model (its declarations). That is, parser must produce not error vs. compiled unit but both. | |
5 VHDL package is a module which does not has any structure and cannot declare the signals (or can since 2008?). | |
6 Get rid of the ugly buildpath. In Eclipse, options will come from the preferences. We can create the project with buil options, specified in the buildpath. But, options are parsed before the project is created. The file should be virtual and irrelevant for the compiler. | |
6.1 Modelsim does not specify the toplevel for elaboration, only for simulation, right? (ниже я разбираю то что подход замии -- | |
ребилд только требуемых под top-level модулей, тогда как остальные среды похоже анализируют зависимости между всеми юнитами) | |
7. Manage context sensitivity. VHDL is context sensitive. It is not possible, for instance, to determine what ’foo(x)’ means: | |
a call to a function, indexing an array or a type conversion. The actual meaning of this statement is dependent on how foo and | |
x are declared in the context of the statement, hence context sensitive as http://essay.utwente.nl/66066/1/main.pdf says in | |
chap 2.1.2. It proposes 5-pass compilation. I think that we could resolve the references in the first pass. As soon as the ref | |
is resolved, we could update the abstract function foo, updating the the AST by converting abstract function call to the concrete | |
subtype to expediate subsequent requests. Actually, resolved declaration will serve as cache itself. | |
It may be actually fine that func, indexing and type-casting are all indistinguishable since they all act like a function that converts | |
one value into another value. We just need to check if this entails any complexity? One-pass (semantic) parsing will need re-parsing | |
on dependency update. Otherwise, we could just re-evaluate the meaning(s). | |
8 [Dynamic Variable - based logging](http://stackoverflow.com/questions/5116352). No need to drag this shit all the time with us in every function call. The logger's error method could raise an exception. | |
9 Store strings (vectors) encoded rather than 2 byts per element. For binaries we will achieve 16x compression, for multivalued 4x. It is worth to do since there is a lot of vectors in RTL design. But how can we if we maintain an object per element? | |
10 Try to elaborate Binary/Unary operations. How is the syntax mapped to them? | |
11 Elaborate types into runtime-generated Java classes. Сработате ли тут метод overloading? | |
Попробывать в Эклипсе простые языки и, как Папа Карло, визуализировать граф зависимостей в run-time, чтобы во время набора текста | |
они как на ладони были. Ниже мы замечаем что замия исповедует top-dwon approach -- быстрый поиск design unit (fragments) благодаря | |
тому что в VHDL всё начинается с entity/arch/package NAME начала фрагментов всегда можно установить чтобы запустить парсер оттуда. | |
Вот его и нужно визуализировать в первую очередь. Узнаём в каких файлах какие units, компилируем нужные units (а не файлы). | |
- L -> a+ 1+ c+ | a+ 2+ c+ // именно так, чтобы в результате замены 1 -> 2 выбиралась другая продукция или язык | |
Подвох в том что инкрементальный парсер должен усечь что родительская продукция тоже меняется когда меняется 1 -> 2 | |
- L -> aaa | a+, так что продукция всегда одна и только в одном случае цвет меняется на другой. | |
- a^n b^m c^n // the program recornizes that it is corrected when we edit a and a^n matches number of cs and declares a failure | |
otherwise. Можно ли программы такого рода решить при помощи google.com/search?q=papa+carlo+parser? Я вижу что один контекст | |
влияет на удалённые. Они говорят что инкрементальная компиляция возможна благодаря фрагметам вида architecture-end, | |
process-end, внутри которых может быть что угодно, вклчая ошибки http://lakhin.com/projects/papa-carlo/#fragments-definition. | |
Тне кажется что между юнитами тоже может быть мусор, pkg P .. end pgk; some garbage garbage garbage entity E .. end entity; | |
Мы можем рачинать парсинг с первого ключевого слова. Благо они не вложенные, нельзя определить один entity внутри другого. | |
Там где заканчивается правильный entity может начинаться мусор. Нас не волнует. Мы ищем где след. design unit. При редактировании | |
в области мусора, между units, следует немного отматывать чтобы захватить слово целиком, чтобы проверить не ключевое ли оно, | |
как я делал в ref hyperlinking. Испытать в этой связи Eclipse partitioner назначив одно ключевое слово в одну область и всё | |
остальное в -- другую. При удалении какой-то буквы в середине абракадабры текст превращается в ключевое слово. Сообразит ли | |
наш разбиватель, вернётся ли сканировать сначала чтобы установить этот факт? | |
Похоже экслип обеспечивает token-level, когда кавычка может перечеркнуть синтаксический блок, включив его в строку. | |
Из этих токенов и строятся фрагменты, как говорит Папа Карло! | |
- assignment -> alpha+ = expr ; expr -> (alpha+["("expr (, expr)*")"] | num+) [(+|-) expr] | |
; func -> alpha+"("alpha+ (, alpha+)")* body ; body -> assignment* "return" expr | |
- expr -> (alpha+ | num+) [+/- expr] ; ("signal" alpha+)* ("process" alpha+ "=" expr "end") | |
- once this is done, partition the program into the declaration-expressions blocks и парсить блоки инкрементально. | |
The incremental compiler needs backward references: whenever some declaration is updated, dependencies must be retargeted and, probably, | |
re-elaborated. Keeping two references instead of one might be slower (much slower if we update declaration backreference in ZDB | |
whenever it is referenced). Therefore, in addition to incremental IDE compiler, there must be a single-run batch compiler for | |
building and simulation. The latter should be much simpler and faster. The full rebuild may be necessary to ensure that incremental | |
compiler composed the fragments right (Pluto говорит о internal и total consistency). | |
On the other hand, the backward references are useful for the search (where is that signal used?) even when used with batch compiler. | |
The backward links would be needed for the renaming. One problem with renaming is that some Design Units may be inactive, not | |
elaborated. | |
Симон говорит что токены нужны редактору даже после компиляции. И вправду, отрезолвленные ссылки лучше сопровождать AST-именем, из | |
которого они сотворены, для rename хотя бы. Для rename нам нужны обратные связи. Хотя JDT их похоже не хранит. Видимо много места | |
занимают -- удваивают размер дизайна, который практически из ссылок и состоит. Поэтому в тех редких случаях когда они требуются, | |
их просто ищут. | |
Recompiling dependent units and file-level re-parsing may be simpler (hash the non-whitespace token stream, which constituates a unit). Alternatively, include whitespaces and comments into the document, как в MS Rosln подходе, | |
such that в текстовом редакторе мы сможем увидеть что ничего не поменялось когда пользователь | |
обновил WS/comment и только обновить смещения элементов в эт. случ. | |
смещения | |
We do not need (error) recovery since declarations must come first in VHDL. The only need for recovery is outlining ahd fragmentation. | |
We should build AST objects during analysis phase. They should establish the references alreadly for correct parsing tree. | |
Child parsers add child nodes to the parent object being built and, thefefore, should take parent object as one of the inputs. | |
This will solve the problem of semantic ambiguity. Elaboration should do the type checking. The evaluation of static expressions | |
must be done at the very last stage of elaboration since the design is going to be intensly edited, more often than simulated and | |
evaluation makes not much sense (probably only for testing the CAD istelf to see which expressions are supported and which are not). | |
That is, initial value computation should not be done until the whole design is checked for correctness. | |
Хотя фича замии -- компилировать только top-level + зависимости. Тогда при изменении модуля, зависимые не перестраиваются. Это типа | |
для больших проектов. It looks like trading code base consistency vs. compilatoin performance. | |
SBT намекает что POJO ссылки не нужны даже для проверки типов. Генерируешь байткод и проверяешь типы. Приемущество в том что пользователю | |
не нужно обновлять ссылки когда обновился библиотечный модуль, ему не нужно делать перекомпиляцию до тех пор пока в зависимости не | |
обновился интерфейс. То есть не переоценивать architecture если entity signature не изменилась (сначала arch включала entity, потом | |
расширяла, потом снова включала, говорит Ashednen. Но в люб случае зависит). | |
Я тут поинтересовался, зачем им откладывать элаборацию (проверку типов) на post-parsing stage. | |
https://gitter.im/scala-ide/scala-ide?at=57399bcfc61823687d3c3941? Мартин грит там что мол для Eclipse скорости. They also say that | |
Scala presentation compiler is responsible for detecting the compilation units | |
http://scala-ide.org/docs/dev/architecture/presentation-compiler.html | |
Но возможно если мы будем объеденять проверку типов с парсингом, то семантические ошибки остановят парстинг и мы не сможем просроить AST, | |
Тогда как для incremental было бы наверное полезно не возвращаться к парсингу если мы не редактировали модуль. Но в этом случае тип | |
может превратиться в функцию и продукция func(arg) может потребовать перестройки дерева -- парсинг. То есть для context-sensitive parsing | |
нам, при изменении зависимостей, потребуется re-parsing зависимых модулей. Может это не страшно так как говорят что в scala парсинг -- не | |
проблема. Да и запускать перепарсинг неизменённого файла можно параллельно -- мы знаем откуда до куда парсить. Если конечно первый | |
парсинг был полным, что затруднительно если мы встраиваем в него полагание на зависимости. | |
(Eclipse не дасть пользователю Ctrl+S пока вы перебилд https://gitter.im/scala-ide/scala-ide?at=573e36fc5b4261a6333c4e33) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import scala.util.parsing.combinator.RegexParsers | |
import scala.util.matching.Regex | |
object Vhdl extends RegexParsers { | |
// To save memory, we tend to avoid wrapper classes and replacing opt(a) ~ b ^^ {case a ~ b => a.map(_->b).getOrElse(b)} | |
// This has additional advantage that stringified parse tree looks very nice, that is Plus(IntDecimal(1)+IntDecimal(1)) | |
// looks much clearer than LogicalExpression(Relaion(ShiftExpr(SimpleExpr(Term(1), Term(1)))), None) | |
// Probably, we can also convert CharLiteral(Char) and StringLiteral(String) into Char and String | |
// conrrespondingly. But, Name is currently reduced to String if we have simple identifier and | |
// identifier would conflict with string literals in this case, if we meet "something" in the expression | |
// we won't be able to resolve if it is an identifier or string literal if both are strings. We therefore | |
// leave basic identifier as string because I expect much more identifers than string literals. | |
// We could convert string literals to character arrays, which should also save memory. http://programmers.stackexchange.com/q/308395 | |
// L E X I N G | |
// todo: probably range matching is more efficient but it has a problem with escaping | |
//val digit = "0-9" ; val lower_case_letter = "a-zßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ" ; val upper_case_letter = "A-ZÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ" | |
val digit = "0123456789"; val lower_case_letter = "abcdefghijklmnopqrstuvwxyzßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ" | |
val upper_case_letter = "ABCDEFGHIJKLMNOPQRSTUVWXYZÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ" | |
val letter = lower_case_letter + upper_case_letter; | |
lazy val graphic_character = basic_graphic_character + lower_case_letter + other_special_character | |
lazy val basic_graphic_character = upper_case_letter + digit + special_character + space_character | |
//NB! I had to disable the double quotation. It seems that guenter did the same and supplies double quotes \"\" as additional alternative to the string literals whenever a double qote can be a part of the string | |
val special_character = "\"#&'()*+,-./:;<=>?@[]_`|"; val other_special_character = "!$%\\^{}~¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿×÷-" | |
def qe(charset: String) = s"\\Q" + charset + "\\E"; val space_character = " \u00A0" | |
val escapedCharacter = qe(graphic_character) | |
//val basic_character = basic_graphic_character | format_effector | |
object kw extends Enumeration { | |
// type kw = Value | |
val /*miscellenious ops*/ not, abs, /*mult operators*/ rem, mod, /*shift ops*/ sll, srl, sla, sra, rol, ror, /*relations in logical expr*/ and, or, xor, nand, nor, xnor = Value | |
val signal, shared, variable, constant, array, record, of, is, `new`, `type`, subtype, attribute, alias = Value | |
val configuration, entity, architecture, component, block, body, procedure, impure, pure, function, begin, end = Value | |
val process, postponed = Value; val inertial, transport, reject, force, release, after = Value | |
val on, until, `for`, `while`, generate, loop = Value; val wait_ = Value("wait") | |
val generic, map, port, parameter = Value; val `protected`, access = Value | |
val library, context, `package`, use, vunit, all, others, default, open, file, to, downto, range = Value | |
val in, out, inout, buffer, linkage = Value; val group, label = Value | |
val `if`, then, elsif, `else`, `case`, `with`, select, when = Value | |
val next, exit, `return`, `null`, unaffected = Value; val guarded, bus, register, disconnect = Value | |
val report, assert, severity = Value; val units, literal, sequence, property = Value | |
}; implicit def stringify(keyword: kw.Value) = keyword.toString | |
//15.3 § Each lexical element is either a delimiter, an identifier (which may be a reserved word), | |
//TODO: create a parser for every keyword at the start | |
implicit def parse_kw(keyword: kw.Value): Parser[String] = basic_identifier_including_KW { _ == keyword.toString } | |
//eq1(basic_identifier, keyword.toString) | |
/* Probably this ugly parseMap is faster but I realized that Parsers's flatMap is a way to do acceptIf | |
def parseMap[T](p: Parser[T])(f: (Success[T], Input) => ParseResult[T]) = new Parser[T] { | |
def apply(in: Input) = p(in) match { case s @ Success(res, rem) => f(s, in); case a => a } | |
}; def parseSucc[T](p: Parser[T])(f: Success[T] => ParseResult[T]) = parseMap(p) { case (s, _) => f(s) } | |
def acceptIf[T](p: Parser[T])(acceptable: T => Boolean, failure: T => String) = | |
parseMap(p) { case (s @ Success(t, _), in) => if (acceptable(t)) s else Failure(failure(t), in) } | |
/*def basic_identifier_including_KW(acceptable: String => Boolean) = parseMap(s"[$letter][_${letter+digit}]*".r) | |
{case (s @ Success(id, _), in) => if (acceptable(id)) s else Failure(s"identifier $id is not acceptable", in)}*/ | |
*/ | |
def acceptIf[T](p: Parser[T])(acceptable: T => Boolean, failMsg: T => String) = | |
p flatMap {t => if (acceptable(t)) success (t) else (failure (failMsg(t)))} | |
//p flatMap {t => t |> (if (acceptable(t)) success _ else failMsg andThen failure _)} // Scalaz allows to factor if-else func out | |
def basic_identifier_including_KW(acceptable: String => Boolean) = | |
acceptIf(s"[$letter][_${letter + digit}]*".r)(acceptable, id => s"identifier '$id' is not acceptable here") | |
val keywords = kw.values.map(_.toString).toSet; | |
val basic_identifier = basic_identifier_including_KW(id => !keywords.contains(id)) | |
val extended_identifier: Parser[String] = delimited('\\', 1) ^^ { case s => "\\" + s + "\\" } | |
val identifier: Parser[String] = basic_identifier | extended_identifier | |
def delimited(delim: Char, minLen: Int = 0) = { | |
val escapedL = qe(delim + "") | |
def decimate(string: String, duplicated: Char) = { // used instead of str.replaceAll, which sucks http://stackoverflow.com/questions/23869613 | |
val sb = new StringBuilder(string.length); string.zipWithIndex.foreach { | |
case (c, i) => | |
if (c != duplicated || (i & 1) == 1) sb.append(c) | |
}; sb.toString | |
} | |
val delimiterFreeGC = qe(graphic_character.replace(delim + "", "")) | |
s"$escapedL([$delimiterFreeGC]|$escapedL$escapedL){$minLen,}$escapedL".r ^^ { | |
case str => decimate(str.subSequence(1, str.length - 1).toString, delim) | |
} | |
} // https://groups.google.com/forum/#!topic/comp.lang.vhdl/lUL6I2u_VAw | |
// L I T E R A L S | |
case class StringLiteral(val value: String) { override def toString = s""""$value"""" } | |
val string_literal: Parser[StringLiteral] = delimited('\"') ^^ StringLiteral | |
val operator_symbol = string_literal // operator is first time used with suffix but we cannot declare it there because it causes wierd uninitialized error in that case | |
val suffix = operator_symbol | character_literal | identifier | |
//def parse(parser: Parser[_], input: String) = println(parseAll(parser, input)) | |
//parse(string_literal, "\"aaa ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ\"") | |
case class CharacterLiteral(val c: Char) { override def toString = s"'$c'" } | |
val character_literal = s"'[${qe(graphic_character)}]'".r ^^ { case c => CharacterLiteral(c.charAt(1)) } | |
val enumeration_literal = identifier | character_literal | |
def underlined_re(charset: String) = s"([$charset](_?[$charset])*)" // ^^ {case str => str.replace("_", "")} | |
lazy val integer_re = underlined_re("0-9"); lazy val based_integer_re = underlined_re("0-9a-zA-Z") | |
val exponent_re = s"[Ee]([+-])?$integer_re" | |
def reMatch(restr: String): Parser[Regex.Match] = new Parser[Regex.Match] { | |
def apply(in: Input) = { | |
val r = restr.r; val source = in.source; val offset = in.offset | |
val start = handleWhiteSpace(source, offset); (r findPrefixMatchOf | |
(source.subSequence(start, source.length))) match { | |
case Some(matched) => | |
Success(matched, in.drop(start + matched.end - offset)) | |
case None => Failure("string matching regex " + r + " expected but " + in.first + | |
" found", in.drop(start - offset)) | |
} | |
} | |
} //takeAll(reMatch("#(a+)(11)#") ^^ {case m => m.subgroups.foreach{println}}, "#aaa11#") | |
def strip(int: String) = int.replaceAll("_", ""); def strip0(int: String) = if (int == null) 0 else strip(int).toInt | |
def decimal(int: String, fractional: String, base: Int, sign: String, exp: String): Decimal = { | |
val iexp = strip0(exp) | |
val stillThere = (if (fractional == null) None else Some(strip(fractional))) | |
.map(_.reverse.dropWhile { _ == '0' }.reverse).getOrElse("") // clean up the trailing zeroes afther the "." | |
val finalExp = (if ("-" equals sign) -iexp else iexp) - stillThere.length | |
val mantissa = Integer.parseInt(strip(int) + stillThere, base) | |
if (finalExp == 0) IntDecimal(mantissa) else BasedDecimal(mantissa, base, finalExp) | |
}; class Decimal; case class IntDecimal(val mantissa: Int) extends Decimal | |
case class BasedDecimal(mantissa: Int, base: Int, exp: Int) extends Decimal { override def toString = s"$mantissa * $base^$exp" } | |
val decimal_literal = reMatch(s"($integer_re)(\\.$integer_re)?($exponent_re)?") ^^ { | |
case m => | |
decimal(m.group(1), m.group(5), 10, m.group(8), m.group(9)) | |
} | |
def error(msg: String) = throw new java.lang.NumberFormatException(msg) | |
def assert(passed: Boolean, msg: => String) = if (!passed) error(msg) | |
val based_literal = reMatch(s"($integer_re)#($based_integer_re)(\\.($based_integer_re))?#($exponent_re)?") ^^ { | |
case m => //println((1 to m.groupCount) map (i => s"$i => " + m.group(i))) | |
val base = strip(m.group(1)).toInt; if (base < 2 || base > 16) error("base must be within 2 to 16 but " + base + " was found") | |
decimal(m.group(4), m.group(8), base, m.group(12), m.group(13)) | |
} | |
val abstract_literal = based_literal | decimal_literal | |
case class PhysicalLiteral(val num: Decimal, val unit: Any) | |
// I had to make physical amount mandatory. Otherwise, I am getting a conflict with enumeration, which can also be a simple identifier | |
// https://groups.google.com/forum/#!topic/comp.lang.vhdl/Znyt9ze7SzA | |
val physical_literal = /*opt( abstract_literal )*/ abstract_literal ~ name ^^ { case al ~ u => PhysicalLiteral(al, u) } // todo: is it right that we can give extended name and function all here? | |
val numeric_literal: Parser[_] = physical_literal | abstract_literal | |
val base_specifier = Some("B|O|X|UB|UO|UX|SB|SO|SX|D").map(s => s.toUpperCase + "|" + s.toLowerCase).get | |
object bitStringLiteral { | |
val hexChars = 'a' - 10; val baseWidth = Map('B' -> 1, 'O' -> 3, 'X' -> 4) | |
def apply(length: String, base: String, input: String) = { | |
val radix = base.last | |
var sb: StringBuilder = null; def pad(len: Int, char: Char) { if (len != 0) { sb.append(char); pad(len - 1, char) } } | |
baseWidth.get(radix).map { bits => | |
sb = new StringBuilder(input.length << 3) | |
input.toVector.reverse.map { letter => | |
def fill(digit: Int) = { | |
def bin(value: Int, string: Int) { if (string != 0) { sb.append(((value % 2) + '0').toChar); bin(value / 2, string - 1) } } | |
if (digit >= (1 << bits)) pad(bits, letter) else bin(digit, bits) | |
}; letter toLower match { | |
case letter if letter.isDigit => fill(letter - '0') | |
case letter if 'a' to 'f' contains letter => fill(letter - hexChars) | |
case _ => pad(bits, letter) | |
} | |
} | |
}.getOrElse { assert(radix == 'D', "unknown radix " + radix); sb = new StringBuilder(input.toInt.toBinaryString).reverse } | |
if (length != null) { | |
val proficit = sb.length - strip(length).toInt; val signed = base.head == 'S' | |
if (proficit > 0) { | |
val a = sb.takeRight(proficit).toString; sb.setLength(sb.length - proficit) | |
val b = Vector.fill(proficit)(if (!signed) '0' else sb.last).mkString(""); assert(a == b, s"cropped bits must be $b whereas $a cropped in fact") | |
}; if (proficit < 0) pad(-proficit, if (!signed) '0' else sb.last) | |
}; StringLiteral(sb.reverse.toString) | |
} | |
}; val bit_string_literal = reMatch(s"""($integer_re)?($base_specifier)"([${qe(graphic_character.replace("\"", ""))}]*)"""") ^^ { | |
case m => bitStringLiteral(m.group(1), m.group(4).toUpperCase, strip(m.group(5))) | |
} | |
val literal: Parser[_] = bit_string_literal | numeric_literal | enumeration_literal | string_literal | kw.`null` | |
// E X P R E S S I O N S | |
//kw.and | kw.or | kw.xor | kw.xnor | kw.nand | kw.nor | |
trait Op { def op: String; def arg: Any } | |
abstract class UnaryOp(override val arg: Any) extends Op { override def toString = /*s"$op($arg)"*/ getClass.getSimpleName + s"($arg)" }; | |
case class U_abs(override val arg: Any) extends UnaryOp(arg) { def op = kw.abs } | |
case class U_not(override val arg: Any) extends UnaryOp(arg) { def op = kw.not } | |
//case class NoOp(override val arg: Any) extends UnaryOp(arg) {def op = arg.toString ; override def toString = arg.toString} | |
case class U_-(override val arg: Any) extends UnaryOp(arg) { def op = "-" } | |
case class U_??(override val arg: Any) extends UnaryOp(arg) { def op = "??" } | |
case class U_and(override val arg: Any) extends UnaryOp(arg) { def op = "and" } | |
case class U_or(override val arg: Any) extends UnaryOp(arg) { def op = "or" } | |
case class U_xor(override val arg: Any) extends UnaryOp(arg) { def op = "xor" } | |
case class U_xnor(override val arg: Any) extends UnaryOp(arg) { def op = "xnor" } | |
case class U_nand(override val arg: Any) extends UnaryOp(arg) { def op = "nand" } | |
case class U_nor(override val arg: Any) extends UnaryOp(arg) { def op = "nor" } | |
abstract class BinOp(override val arg: Any, val arg2: Any) extends Op { override def toString = /*s"($arg $op $arg2)"*/ /*s"BinOp($op, $arg,$arg2)"*/ getClass.getSimpleName + s"($arg, $arg2)" }; | |
case class B_exp(val arg1: Any, override val arg2: Any) extends BinOp(arg1, arg2) { def op = "**" } | |
case class B_+(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "+" } | |
case class B_-(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "-" } | |
case class B_&(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "-" } | |
case class B_*(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "&" } | |
case class B_/(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "/" } | |
case class B_rem(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = kw.rem } | |
case class B_mod(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = kw.mod } | |
case class B_sll(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "sll" } | |
case class B_srl(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "srl" } | |
case class B_sla(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "sla" } | |
case class B_sra(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "sra" } | |
case class B_rol(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "rol" } | |
case class B_ror(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "ror" } | |
case class B_=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "=" } | |
case class B_/=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "/=" } | |
case class B_<(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "<" } | |
case class B_<=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "<=" } | |
case class B_>(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = ">" } | |
case class B_>=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = ">=" } | |
case class B_?=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "?=" } | |
case class B_?/=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "?/=" } | |
case class B_?<(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "?<" } | |
case class B_?<=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "?<=" } | |
case class B_?>(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "?>" } | |
case class B_?>=(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "?>=" } | |
case class B_and(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "and" } | |
case class B_or(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "or" } | |
case class B_xor(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "xor" } | |
case class B_xnor(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "xnor" } | |
case class B_nand(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "nand" } | |
case class B_nor(override val arg: Any, override val arg2: Any) extends BinOp(arg, arg2) { def op = "nor" } | |
// it is probably suboptimal to store reference to the enumeration op in the classes generated this way | |
// Actually, we must use ^^^ for operator mapping http://jim-mcbeath.blogspot.com.ee/2008/09/scala-parser-combinators.html | |
//primary ::= name | literal | aggregate | function_call | qualified_expression | type_conversion | allocator | ( expression ) | |
val primary = /*function_*/ call | literal | pared(expression) | aggregate | failure("primary expected") | |
val ufParser = U_parserv(kw.abs -> U_abs, kw.not -> U_not, kw.and -> U_and, kw.or -> U_or, kw.xor -> U_xor, kw.xnor -> U_xnor, kw.nand -> U_nand, kw.nor -> U_nor) | |
val factor: Parser[Any] = ufParser ~ primary ^^ { case op ~ primary => op(primary) } | binOpt(primary, B_parser("**" -> B_exp)) | |
val termOps = B_parser("*" -> B_*, "/" -> B_/, "rem" -> B_rem, "mod" -> B_mod) | |
val term = (factor ~ rep(termOps ~ factor)) ^^ { | |
case seed ~ suffices => | |
suffices.foldLeft(seed: Any) { case (arg1, op ~ arg2) => op(arg1, arg2) } | |
} | |
//val simple_expression = binRep(term, "+" -> B_+, "-" -> B_-) | |
//def U_parser(sign2op: (String, Any => UnaryOp)*) = sign2op.map{case(sign, toUnary) => Parser(sign) ^^ {case _ => toUnary}} | |
def U_parserv(sign2op: (kw.Value, Any => UnaryOp)*) = //sign2op.map{case (v, op) => Parser(v.toString) ^^ {case _ => toBinOp} }.reduce{_ | _} | |
sign2op.map { case (v, op) => val s: String = v.toString; Parser(s) ^^ { case _ => op } }.reduce { _ | _ } | |
def B_parser(sign2op: (String, (Any, Any) => BinOp)*): Parser[(Any, Any) => BinOp] = | |
sign2op.map { case (str, toBinOp) => Parser(str) ^^ { case _ => toBinOp } }.reduce { _ | _ } | |
def B_parserv(sign2op: (kw.Value, (Any, Any) => BinOp)*) = B_parser(sign2op.map { case (value, op) => (value.toString, op) }: _*) | |
def binFold(arg1: Any, suffices: Seq[~[(Any, Any) => BinOp, Any]]) = | |
suffices.foldLeft(arg1) { case (acc, op ~ term) => op(acc, term) } | |
val simple_expression = { | |
(opt("+" | "-") ~ term ~ rep(B_parser("+" -> B_+, "-" -> B_-, "&" -> B_&) ~ term)) ^^ { | |
case sign ~ first ~ rest => val signed = (if (sign == Some("-")) U_-(first) else first); binFold(signed, rest) | |
} | |
} | |
def binOpt[T](operand: Parser[T], op: Parser[(Any, Any) => BinOp]) = operand ~ opt(op ~ operand) ^^ { | |
case a ~ o => o.map { case (op ~ a2) => op(a, a2) }.getOrElse(a) | |
} | |
val shift_expression = binOpt(simple_expression, B_parserv(kw.sll -> B_sll, kw.srl -> B_srl, kw.sla -> B_sla, kw.sra -> B_sra, kw.rol -> B_rol, kw.ror -> B_ror)) | |
val relation = binOpt(shift_expression, B_parser("=" -> B_=, "/=" -> B_/=, "<" -> B_<, "<=" -> B_>=, ">" -> B_>, ">=" -> B_>=, "?=" -> B_?=, "?/=" -> B_?/=, "?<" -> B_?<, "?<=" -> B_?<=, "?>" -> B_?>, "?>=" -> B_?>=)) | |
// val relation = binOpt(relational_operator, shift_expression) | |
val rel_op = B_parserv(kw.and -> B_and, kw.or -> B_or, kw.xor -> B_xor, kw.xnor -> B_xnor); | |
val rel_par = relation ~ rep1(rel_op ~ relation) ^^ { case a1 ~ tail => binFold(a1, tail) } | |
val logical_expression = rel_par | binOpt(relation, B_parserv(kw.nand -> B_nand, kw.nor -> B_nor)) | |
val expression: Parser[_] = "??" ~> primary ^^ { case pr => U_??(pr) } | logical_expression | |
// I_D_E_N_T_I_F_I_R_E_S | |
val simple_name = identifier; val label = identifier; lazy val type_mark: Parser[_] = /*(sub)type_*/ name; | |
//def quoted[T](content: Parser[T]) = ("\"": Parser[String]) ~> content <~ "\"" | |
def pared[T](content: Parser[T]) = "(" ~> content <~ ")" | |
def commasep[T](elem: Parser[T]) = rep1sep(elem, ","); def pcomma[T](elem: Parser[T]) = pared(commasep(elem)) | |
class NamePart; case class Attr(val spec: String, val args: Any) extends NamePart; case class Suffix(val spec: Any) extends NamePart | |
case class DiscreteRange(ascendign: Boolean, from: Any, to: Any) extends NamePart | |
// class SubProgAttr(val signature: Any, name: Name, args: Any) extends Attr(name, args) | |
//case class Args(parsed: Option[List[_]]) {override def toString = parsed.map(" with args(" + _.mkString(",") + ")").getOrElse("")} | |
case class Call(val name: Any, val args: List[_]) { override def toString = s"$name with args $args" } | |
//case class Name(val entry: Object, val rest: List[_]) {override def toString = {val subobj = if(rest.length==0) "" else rest.mkString(",",",",""); s"Name($entry$subobj)"} } | |
lazy val /*function_*/ call = name ~ args ^^ { case n ~ args => args.map(args => Call(n, args)).getOrElse(n) }; | |
val prefix = /*function_*/ call; | |
//prefix = name | function_call where function_call = name [(args)] can be a name also | |
//attribute_name ::= prefix [ signature ] ' /*attribute_*/simple_name [ ( expression ) ] | |
//lazy val attribute_name = name ~ attribute_name_suffix//prefix ~ opt( signature ) <~ "'" ~ simple_name ~ opt( pared(expression) ) | |
/*lazy val attribute_name = parseSucc(name) { case s @ Success(name, reminder) => | |
if (name.isInstanceOf[Seq[_]] && name.asInstanceOf[Seq[_]].last.isInstanceOf[Attr]) s else Failure(s"$name fails to encode an attribute", reminder) | |
} //~ attribute_name_suffix // name altready can be attribute_name*/ | |
lazy val attribute_name = acceptIf(name)(name => name.isInstanceOf[Seq[_]] && name.asInstanceOf[Seq[_]]. | |
last.isInstanceOf[Attr], _ + " fails to encode an attribute") | |
val attribute_name_suffix = ("'" ~> simple_name) ~ opt(pared(expression)) ^^ { case n ~ a => Attr(n, if (a == None) null else a.get) } | |
//lazy val attribute_name_suffix = signature ~ ("'" ~> simple_name) ~ opt( pared(expression) )^^ {case s ~ name ~ o => s + s"'$name"+ o.map(args => s"($args)").getOrElse("")} | |
val selected_name_suffix = "." ~> (kw.all | suffix) ^^ Suffix; val selected_name = prefix ~ selected_name_suffix | |
val association_list = pcomma(association_element); val args: Parser[Option[List[_]]] = opt(association_list) //^^ Args | |
// lazy val name: Parser[Any] = rep(selected_name_suffix | attribute_name_suffix | slice_name_suffix) ^^ | |
//{case entry ~ tail => Name(entry, tail)} | |
val name_suffix = selected_name_suffix | attribute_name_suffix | /*slice_suffix*/ pared(discrete_range) | /*indexed_suffix*/ pcomma(expression) | |
// use success(None) ~ pcomma(expression) alternative because args can consume the indexing name suffix, as per http://stackoverflow.com/a/22650533/4563974 | |
def optA[A,B](ab: Parser[Option[A] ~ B]) = ab ^^ {case a ~ b => a.map(_ -> b).getOrElse(b)} | |
def ooptA[A,B](a: Parser[A], b: Parser[B]) = opt(a) ~ b ^^ {case a ~ b => a.map(_ -> b).getOrElse(b)} | |
lazy val name: Parser[Any] = (suffix | external_name) ~ rep(optA(args ~ name_suffix | success(None) ~ | |
pcomma(expression))) ^^ { case entry ~ ext => if (ext.length != 0) entry :: ext else entry } | |
val subtype_indication = (opt(resolution_indication) ~ type_mark | success(None) ~ type_mark) ~ // we needed this alternative because of parser combinators suck http://stackoverflow.com/a/22650533/4563974 | |
opt(constraint) ^^ { case ri ~ tm ~ c => s"SubtInd(${ri.map(_ + " ").getOrElse("")}$tm)" } | |
val range = simple_expression ~ direction ~ simple_expression ^^ { case from ~ dir ~ to => DiscreteRange(dir equals "to", from, to) } | /*range_*/ attribute_name ^^ { case range => s"Range($range)" } | |
val discrete_range = range | /*discrete_*/ subtype_indication ; val slice_name_suffix = pared(discrete_range) | |
val direction = kw.to | kw.downto; val range_constraint = kw.range ~> range | |
val signature = commasep(type_mark) ~ opt(kw.`return` ~> type_mark) ^^ { case args ~ res => val as = args.mkString(","); val full = res.map(as + " " + _).getOrElse(as); if (full.length == 0) "" else s"Signature($full)" } | |
// todo: exploit this code | |
/*def namedAfterPos[P, N](pos: Parser[P], nmd: Parser[N], sep: Parser[_] = ",") = {type Res = (List[P], List[N]); def args(positional: Boolean, acc: Res): Parser[Res] = {def recur(positional: Boolean, acc: Res) = (sep flatMap {_ => args(positional, acc)}) | success(acc); (nmd flatMap {n => recur(false, acc match {case (p,nn)=> (p, n :: nn)})}) | (pos flatMap {p => if (positional) recur(true, acc match {case (pp, n) => (p :: pp, n)}) else failure("positional is not expected after named")})}; args(true, (Nil, Nil))} | |
List("a", "a,a,a", "a,a,a=b,a=b", "a=b, a=b") map (_ p namedAfterPos("a", "a=b")) map {case Success(res, _) => res} | |
val Failure("positional is not expected after named", pos) = "a,a=b,a" p namedAfterPos("a", "a=b") | |
def paredArgList[K,V](name: Parser[K] = identifier, value: Parser[V] = expr) = pared(namedAfterPos(value, name ~ ("=" ~> value) map {case n~v => (n,v)})) | |
List("a+b-1", "b=1+1", "a,a+1", "b=3+1,c=c+1", "1,b=g+g,d=123,bd=123+1") map ("(" + _ + ")" p paredArgList() map {case Success(res, _) => res}) | |
*/ | |
case class NamedAssociation(val formal: Any, actual: Any) | |
lazy val association_element = opt(name <~ "=>") ~ /*opt(kw.inertial) ~ */ expression ^^ { | |
case formal ~ /*in ~*/ actual => formal.map(new NamedAssociation(_, actual)).getOrElse(actual) | |
} | |
val actual_designator = kw.open | opt(kw.inertial) ~ expression | subtype_indication | |
//lazy val subtype_indication = opt(resolution_indication) ~ type_mark ~ opt(constraint) ^^ {case ri ~ tm ~ c => s"SubtInd(${ri.map(_ + " ").getOrElse("")}$tm)"} | |
lazy val resolution_indication: Parser[_] = /*resolution_function_*/ name | pared(element_resolution) | |
val element_resolution = resolution_indication | commasep( /*record_element_*/ simple_name ~ resolution_indication) | |
val index_constraint = pcomma(discrete_range) | |
val constraint = range_constraint | array_constraint | record_constraint | |
val array_constraint: Parser[_] = pared(index_constraint | kw.open) ~ opt(element_constraint) | |
val element_constraint = array_constraint | record_constraint; | |
val record_constraint: Parser[_] = pcomma( /*record_element_*/ simple_name ~ element_constraint) | |
//val pathname_element = /*entity/package_*/simple_name | /*component_instantiation_/block_/generate_statement_*/label ~ opt( pared( /*static_*/expression )) | |
val pathname_element = identifier ~ opt(pared( /*static_*/ expression)) ^^ { case id ~ opt => id + opt.map(expr => s"($expr)").getOrElse("") } | |
val partial_pathname = rep(pathname_element <~ ".") ~ /*object_*/ simple_name ^^ { case path ~ target => path.mkString("", ",", "," + target) } | |
//val package_pathname = "@" ~> (/*library_logical_*/name <~ ".") ~ rep1(/*package_*/simple_name <~ "." ) ~ /*object_*/simple_name ^^ {case lib ~ pkg ~ name => s"@($lib:$pkg:$name)"} | |
val package_pathname = "@" ~> rep( /*library_logical_*/ simple_name <~ ".") ~ simple_name ^^ { case path ~ target => "@" + path.mkString("", ".", s"$target target") } // it should be 'name . (pkg_simple_name .) simple_name' but name can be selected name and consumes everything in combinators http://stackoverflow.com/questions/23869613 | |
val absolute_pathname = "." ~> partial_pathname ^^ { case path => s".($path)" } | |
val relative_pathname = rep("^" <~ ".") ~ partial_pathname ^^ { case parent ~ path => "^." + parent.length + s"($path)" } | |
val external_name = "<<" ~> (kw.constant | kw.signal | kw.variable) ~ (absolute_pathname | relative_pathname | package_pathname) ~ (":" ~> subtype_indication) <~ ">>" ^^ { case kind ~ path ~ subtype => s"External($kind $path:$subtype)" } | |
// STATEMENTS (refered by processes and procedures) | |
val condition = expression ; def target = name | aggregate | |
// conditional produces plain expr when no conditions are specified and List(expr :: cond*) otherwise. The list may come with final default expr if specified. | |
def conditional[T](value: Parser[T]) = value ~ opt(kw.when ~> condition ~ rep(kw.`else` ~> value ~ | |
(kw.when ~> condition)) ~ opt(kw.`else` ~> value)) ^^ {case e ~ when => when.map{case (cond ~ rep ~ deflt) => | |
val conditionals = rep.foldLeft(List(cond, e)){case(acc, e ~ c) => c :: e :: acc} | |
deflt.map(_ :: conditionals).getOrElse(conditionals).reverse}.getOrElse(e)} | |
case class AssertionStatement(val condition: Any, val report: Option[Any], val severity: Option[Any]) | |
val assertion_statement = kw.assert ~> condition ~ opt(kw.report ~> expression) ~ opt(kw.severity ~> expression) ^^ | |
{case cond ~ rep ~ sev => AssertionStatement(cond, rep, sev)} | |
val choices = rep1(kw.others | discrete_range | /*/*element_*/simple_name |*/ simple_expression ) // discrete range produces SubtypeIndicators on simple names. | |
val sensitivity_list = commasep( /*signal_*/ name); val aggregate = pcomma(ooptA(choices <~ "=>", expression)) | |
case class WaitStatement(val on: Option[List[Any]], val until: Option[Any], val `for`: Any) | |
val wait_statement = kw.wait_ ~> opt(kw.on ~> sensitivity_list) ~ opt(kw.until ~> condition) ~ opt(kw.`for` ~> | |
/*time_*/ expression) ^^ {case on ~ until ~ f4r => WaitStatement(on, until, f4r)} | |
val report_statement = kw.report ~> expression ~ opt(kw.severity ~> expression) | |
// VHDL LRM says that conditional is outght to be reduced to if-elsif and selected to case statement. | |
// We can do it later and see the extent to which such replacement affects the memory requirements and performance. | |
trait Assignment {def target: Any ; def value: Any} | |
case class WaveformAssignment(val target: Any, val value: Any) extends Assignment | |
case class ExpressionAssignment(val target: Any, val value: Any) extends Assignment | |
case class WaveformSelectedAssignment(val target: Any, val selector: Any, val value: Any) extends Assignment | |
case class ExpressionSelectedAssignment(val target: Any, val selector: Any, val value: Any) extends Assignment | |
case class WaveFormElement(val value: Any, val after: Any) | |
val waveform = kw.unaffected | commasep(( /*value_*/ expression | kw.`null`) ~ opt(kw.after ~> /*time_*/ expression) ^^ | |
{case e ~ after => val v = if (e == "null") null else e; after.map(after => WaveFormElement(v,after)).getOrElse(v)}) | |
def str(ignored: Parser[String], output: String) = ignored ^^ {case _ => output} // constant strings can be matched faster whth Scala's eq operation | |
val delay_mechanism = opt(str(kw.transport, "transport") | opt(kw.reject ~> /*time_*/ expression) <~ kw.inertial) // produces either "transport" or Option[expression] | |
val force_mode = opt(str(kw.in, "in") | str(kw.out, "out")); val force = kw.force ~> force_mode | |
def force[EC](cases: Parser[Any] => Parser[EC]) = optA(kw.force ~> force_mode ~ cases(expression)) | |
def dm_cwf[WC](cases: Parser[Any] => Parser[WC]) = optA(delay_mechanism ~ cases(waveform)) | |
def assignment(value: Parser[_], ctor: (Any, Any) => Assignment, tsign: Parser[String] = "<=") = | |
(target <~ tsign) ~ value ^^ {case t ~ v => ctor(t,v)} | |
def selectedAssignment(value: Parser[_], ctor: (Any, Any, Any) => Assignment, tsign: Parser[String] | |
= "<=") = kw.`with` ~> expression ~ (kw.select ~> opt("?")) ~ (target <~ tsign) ~ value ^^ {case s ~ t ~ v => ctor(t,s, v)} | |
// I could reduce the case statement assignment to if but I won't because theoretically cases have the same | |
// priority and activated case can be computed more efficienty than evaluating them one-by-one. | |
val signal_assignment = assignment(kw.release ~> force_mode | force(conditional _), | |
ExpressionAssignment) | assignment(dm_cwf(conditional _), WaveformAssignment) | |
def selected[T](value: Parser[T]) = commasep(value ~ (kw.when ~> choices)) //^^ {case list => list.map} | |
val selected_signal_assignment = selectedAssignment(force(selected _), ExpressionSelectedAssignment) | | |
selectedAssignment(dm_cwf(selected _), WaveformSelectedAssignment) | |
val variable_assignment = assignment(conditional(expression), ExpressionAssignment, ":=") | |
val selected_variable_assignment = selectedAssignment(selected(expression), ExpressionSelectedAssignment, ":=") | |
case class CaseStatement(var label: String, val selector: Any, val options: List[_]) | |
val case_statement = Parser { in => | |
val workhorse = end(kw.`case`, opt("?") ~ (expression <~ kw.is) ~ rep1(kw.when ~> (choices <~ "=>") ~ | |
sequence_of_statements)) ~ opt("?") ~ opt( /*case_*/ label) ^^ { case o1 ~ selector ~ rep ~ o2 ~ label => | |
val options = rep.foldLeft(Nil: List[Any]){case (list, when ~ block) => when :: block :: list} | |
val invalid = rep.dropRight(1).exists{case (when ~ _) => (when contains "others")} | |
invalid -> CaseStatement(label.getOrElse(null), selector, options.reverse) | |
} | |
workhorse(in) match { case Success((invalid, result), rem) => | |
if (invalid) Error("others in a wrong position", in) else Success(result, rem); case a => a } | |
} | |
case class IfStatement(var label: String, val options: List[_]) | |
val if_statement = end(kw.`if`, (condition <~ kw.then) ~ sequence_of_statements ~ rep((kw.elsif ~> condition) ~ | |
(kw.then ~> sequence_of_statements)) ~ opt(kw.`else` ~> sequence_of_statements)) ~ opt( /*if_*/ label) ^^ { | |
case cond ~ then ~ rep ~ otherwise ~ label => val options = rep.foldLeft(cond :: then :: Nil) | |
{case(list, cond ~ then) => cond :: then :: list} | |
IfStatement(label.getOrElse(null), otherwise.map(_ :: options).getOrElse(options).reverse) | |
} | |
def parsePair[A, B](pair: Parser[A ~ B]) = pair ^^ {case a ~ b => a -> b} | |
// produces single condition for "while" and (identifier -> range) for the "for" | |
val parameter_specification = parsePair(identifier ~ (kw.in ~> discrete_range)); | |
val iteration_scheme = kw.`while` ~> condition | kw.`for` ~> /*loop_*/ parameter_specification | |
case class LoopStatement(val scheme: Any, val code: List[Any]) | |
case class NextStatement(val condition: Option[Any]) ; case class ExitStatement(val condition: Option[Any]) | |
case class ReturnStatement(val result: Any) ; case class NullStatement() | |
lazy val loop_statement: Parser[_] = opt(iteration_scheme) ~ end(kw.loop, sequence_of_statements) <~ | |
opt( /*loop_*/ label) ^^ {case scheme ~ code => LoopStatement(scheme.getOrElse(null), code)} | |
def control_smt[Res](kwd: kw.Value, mkRes: Option[Any] => Res) = | |
kwd ~> opt( /*loop_*/ label) ~ opt(kw.when ~> condition) ^^ {case o ~ cond => mkRes(cond)} | |
val next_statement = control_smt(kw.next, NextStatement); val exit_statement = control_smt(kw.exit, ExitStatement) | |
val return_statement = kw.`return` ~> opt(expression) ^^ ReturnStatement; val null_statement = kw.`null` ^^ {case _ => NullStatement()} | |
val sequential_statement = { val body = wait_statement | assertion_statement | report_statement | | |
signal_assignment | selected_signal_assignment | variable_assignment | selected_variable_assignment | | |
/*procedure_*/ call | if_statement | case_statement | loop_statement | next_statement | | |
exit_statement | return_statement | null_statement | |
(opt(label <~ ":") ~ body | success(None) ~ body) <~ ";"; // TODO: check that statement label matches the one specified here | |
} | |
val sequence_of_statements: Parser[List[_]] = rep(sequential_statement) | |
// DECLARATIONS (declare procedures and, thus refer statements) | |
// C O N C U R R E N T S T A T M E N T S | |
val identifier_list = commasep(identifier); val mode = kw.in | kw.out | kw.inout | kw.buffer | kw.linkage | |
trait DeclarativeItem {def id: String; def subt: Any} | |
case class ObjectDeclaration(val kind: kw.Value, val id: List[String], val subt: Any, val value: Option[Any]) | |
val subprogram_kind = kw.procedure | kw.function; val signal_kind = kw.register | kw.bus | |
def object_declaration(kword: kw.Value) = kword ~ identifier_list ~ (":" ~> subtype_indication) ~ | |
opt(signal_kind) ~ opt(":=" ~> expression) ^^ {case kind ~ ids ~ stype ~ regbus ~ initv => | |
val v = ObjectDeclaration(kw.withName(kind), ids, stype, initv) | |
if (regbus != None) println("warning!" + regbus.get + " is ignored for v " + v) ; v | |
} ; val constant_declaration = object_declaration(kw.constant) ; val variable_declaration = object_declaration(kw.variable) | |
val signal_declaration = object_declaration(kw.signal) | |
val interface_constant_declaration = opt(kw.constant) ~ identifier_list ~ (":" ~> opt(kw.in)) ~ subtype_indication ~ opt(":=" ~> /*static_*/ expression) | |
val interface_variable_declaration = opt(kw.variable) ~ identifier_list ~ ":" ~> opt(mode) ~ subtype_indication ~ opt(":=" ~> /*static_*/ expression) | |
val interface_signal_declaration = opt(kw.signal) ~ identifier_list ~ ":" ~> opt(mode) ~ subtype_indication ~ opt(kw.bus) ~ opt(" :=" ~> /*static_*/ expression) | |
case class InterfaceObjectDeclaration(val kind: Option[kw.Value], val ids: List[String], val mode: kw.Value, val subt: Any, val init: Any) | |
val interface_object_declaration = opt(kw.signal | kw.variable | kw.constant) ~ identifier_list ~ (":" ~> opt(mode)) ~ | |
subtype_indication ~ opt(kw.bus) ~ opt(" :=" ~> /*static_*/ expression) ^^ { | |
case kind ~ names ~ mode ~ st ~ bus ~ init => val kindVal = kind.map(kind => kw.withName(kind)) | |
val modeVal = kw.withName(mode.getOrElse("in")) ; InterfaceObjectDeclaration(kindVal, names, modeVal, st, init) | |
} | |
object FILE_OPEN_KIND extends Enumeration {type FILE_OPEN_KIND = Value; val READ_MODE, WRITE_MODE, APPEND_MODE = Value} | |
case class FileDeclaration(val id: String, val subt: Any, val fileOpenInfo: Option[(/*FILE_OPEN_KIND.Value*/ Any, Any)]) extends DeclarativeItem | |
val interface_file_declaration = kw.file ~> identifier_list ~ (":" ~> subtype_indication) | |
val file_open_information = opt(kw.open ~> /*file_open_kind_*/ expression) ~ (kw.is ~> | |
/*file_logical_name = */ expression) ^^ {case kind ~ name => kind.getOrElse(FILE_OPEN_KIND.READ_MODE) -> name}; | |
val file_declaration = interface_file_declaration ~ opt(file_open_information) ^^ {case names ~ st ~ foi => | |
names.map(FileDeclaration(_, st, foi)) | |
} | |
def map_aspect(aspect: kw.Value) = aspect ~> kw.map ~> association_list | |
val generic_map_aspect = map_aspect(kw.generic) ; val port_map_aspect = map_aspect(kw.port) | |
val interface_package_declaration = kw.`package` ~ identifier ~ kw.is ~ kw.`new` ~ (generic_map_aspect | kw.generic ~> kw.map ~> pared(kw.default | "<>")) | |
val designator = identifier | operator_symbol; val interface_list: Parser[List[_]] = pared(repsep(interface_constant_declaration | interface_signal_declaration | interface_variable_declaration | | |
interface_file_declaration | kw.`type` ~> identifier | interface_subprogram_declaration | interface_package_declaration, ";")) | |
val ifsubprog = designator ~ opt(opt(kw.parameter) ~> /*parameter_*/ interface_list); | |
val subprogram_header = opt(kw.generic ~> /*generic_*/ interface_list ~ opt(generic_map_aspect)) | |
val subprogram_instantiation_declaration = subprogram_kind ~ designator ~ kw.is ~> kw.`new` ~> /*uninstantiated_subprogram_*/ name ~> opt(signature) ~ opt(generic_map_aspect) | |
val subprog = designator ~ subprogram_header ~ opt(opt(kw.parameter) ~> /*formal_parameter_*/ interface_list) | |
val procedure_specification = kw.procedure ~> subprog; val interface_procedure_specification = kw.procedure ~> ifsubprog | |
val interface_function_specification = opt(kw.pure | kw.impure) ~ kw.function ~ ifsubprog ~ kw.`return` ~ type_mark | |
val function_specification = opt(kw.pure | kw.impure) ~> (kw.function ~> subprog) ~ (kw.`return` ~> type_mark) ^^ {case subp ~ result => subp -> result} | |
val subprogram_specification = procedure_specification | function_specification; val subprogram_declaration = subprogram_specification | |
val interface_subprogram_specification = interface_procedure_specification | interface_function_specification | |
val interface_subprogram_declaration = interface_subprogram_specification ~ opt(kw.is ~ ( /*subprogram_*/ name | "<>")) | |
case class EnumerationType(val values: List[Any]) extends ScalarType // can we use List strightforwardly? | |
case class NumericType(val range: Any) extends ScalarType | |
val physical_type_definition = range ~ endo(kw.units, (identifier <~ ";") ~ rep( /*secondary_unit*/ identifier ~ ("=" ~> physical_literal) <~ ";")) ^^ { | |
case range ~ ( primary ~ secondary) => PhysicalType(range, primary, secondary.map{case id ~ pliteral => id -> pliteral}) | |
} ; case class PhysicalType(val range: Any, val primary: Any, val secondary: List[(String, Any)]) extends ScalarType | |
val scalar_type_definition = pcomma(enumeration_literal) ^^ EnumerationType | | |
/*integer_, floating_ = */ range_constraint ^^ NumericType | physical_type_definition | |
case class ArrayTypeU(val range: Any, val elemType: Any) extends TypeDefinition | |
case class ArrayType(val range: Any, val elemType: Any) extends TypeDefinition | |
def array_def[T](range_def: Parser[T], parseResult: (Any, Any) => TypeDefinition) = kw.array ~> range_def ~ (kw.of ~> /*element_*/ subtype_indication) ^^ | |
{case range ~ elem => parseResult(range, elem) } | |
val unbounded_array_definition = array_def(pcomma(type_mark <~ kw.range <~ "<>"), ArrayTypeU) | |
val constrained_array_definition = array_def(index_constraint, ArrayType) | |
val array_type_definition = unbounded_array_definition | constrained_array_definition | |
case class RecordType(val fields: List[Any]) extends TypeDefinition | |
val record_type_definition = endo(kw.record, rep1sep(identifier_list ~ (":" ~> subtype_indication), ";"))^^ RecordType | |
val composite_type_definition = array_type_definition | record_type_definition | |
val alias_designator = identifier | character_literal | operator_symbol; val entity_tag = alias_designator | |
val alias_declaration = kw.alias ~> alias_designator ~ opt(":" ~> subtype_indication) ~ kw.is ~> name ~ opt(signature) | |
val entity_name_list = commasep(entity_tag ~ opt(signature)) | kw.others | kw.all | |
val entity_class = kw.entity | kw.architecture | kw.configuration | kw.procedure | kw.function | kw.`package` | kw.`type` | kw.subtype | kw.constant | kw.signal | kw.variable | kw.component | kw.label | kw.literal | kw.units | kw.group | kw.file | kw.property | kw.sequence | |
val attribute_specification = kw.attribute ~> /*attribute_*/ simple_name ~ (kw.of ~> entity_name_list) ~ | |
(":" ~> entity_class) ~ (kw.is ~> expression) | |
val declareSequential: Parser[List[_]] = declare(variable_declaration) ; val use_clause = "use" ~ commasep(identifier) | |
val protected_type_body = endo(kw.`protected` ~ kw.body, declareSequential) | |
val protected_type_declaration = endo(kw.`protected`, repsep((subprogram_declaration | | |
subprogram_instantiation_declaration | attribute_specification | use_clause), ";")) | |
val protected_type_definition = protected_type_declaration | protected_type_body | |
trait TypeDefinition; trait ScalarType extends TypeDefinition | |
case class FileType(val tmark: Any) extends TypeDefinition | |
case class AccessType(val subtIndication: Any) extends TypeDefinition | |
case class ProtectedType(val spec: Any) extends TypeDefinition | |
val type_definition: Parser[TypeDefinition] = scalar_type_definition | composite_type_definition | kw.access ~> | |
subtype_indication ^^ AccessType | kw.file ~> kw.of ~> type_mark ^^ FileType | protected_type_definition ^^ ProtectedType | |
val generic_clause = kw.generic ~> /*generic_*/ interface_list; val port_clause = kw.port ~> /*port_*/ interface_list | |
val oClause = optList(kw.generic ~> interface_list) ~ optList(kw.port ~> interface_list) | |
def optList(parser: Parser[List[_]]): Parser[List[_]] = opt(parser) ^^ {case None | Some(List()) => null ; case list => list.get} | |
// We probably do not need the declaration classes. Just create a Map[String name => TypeDefinition] (container) and | |
// extend it on every declaration parse event. | |
trait Declaration {def name: Any} ; case class TypeDeclaration(val name: String, val definition: TypeDefinition) | |
val type_declaration = kw.`type` ~> identifier ~ (kw.is ~> type_definition) ^^ { // missing option -> icomplete type declaration | |
case id ~ td => TypeDeclaration(id, td )} ; val subtype_declaration = kw.subtype ~> identifier ~ (kw.is ~> subtype_indication) ^^ { | |
case id ~ st => SubtypeDeclaration(id, st) } ; case class SubtypeDeclaration(val name: String, val definition: Any) | |
val package_instantiation_declaration = kw.`package` ~> identifier ~ (kw.is ~> kw.`new` ~> /*uninstantiated_package_*/ | |
name) ~ opt(generic_map_aspect) ^^ { case id1 ~ newname ~ gen_map => PackageInstantiationDeclaration(id1, newname, gen_map) | |
} ; case class PackageInstantiationDeclaration(val name: String, val newName: Any, val genericMap: Option[Any]) | |
val attribute_declaration = kw.attribute ~> identifier ~ (":" ~> type_mark) ^^ { | |
case id ~ tmark => AttributeDeclaration(id, tmark) | |
} ; case class AttributeDeclaration(val name: String, val aType: Any) extends Declaration | |
val component_declaration = endo(kw.component, (identifier <~ kw.is) ~ /*local_*/ oClause ) ^^ | |
{case id ~ (generic ~ ports) => ComponentDeclaration(id, generic, ports) } ; | |
case class ComponentDeclaration(val name: String, val generics: List[Any], val ports: List[Any]) | |
// todo: optimize name list and entity_class: match unreserved identifier and Success = identifier in enabled or not in dissabled. Do not parse the identifier anew for every alternative | |
val component_specification = (commasep( /*instantiation_*/ label) | kw.others | kw.all) ~ (":" ~> /*component_*/ name) | |
case class EntityAspect(val name: Any, val arch: Option[String]) | |
val entity_aspect = kw.configuration ~> /*configuration_*/ name | kw.open | | |
kw.entity ~> /*entity_*/ name ~ opt(pared( /*architecture_*/ identifier)) ^^ {case name ~ oarg => EntityAspect(name, oarg)} | |
val binding_indication = opt(kw.use ~> entity_aspect) ~ optList(generic_map_aspect) ~ optList(port_map_aspect) | |
val compound_configuration_specification = end(kw.`for`, component_specification ~ (binding_indication <~ ";") ~ rep1sep(verification_unit_binding_indication, ";")) ^^ | |
{case labels ~ name ~ (entity_aspect ~ generic ~ port) ~ vunits => CompoundConfiguration(labels, name, entity_aspect, generic, port, vunits)} | |
case class CompoundConfiguration(val appliesto: Any, val name: Any, val entityAspect: Any, val generics: List[_], val ports: List[_], val vunits: Any) extends Declaration | |
val configuration_specification = compound_configuration_specification | kw.`for` ~> component_specification ~ binding_indication <~ opt(";" ~ kw.end ~ kw.`for`) ^^ | |
{case labels ~ name ~ (entity_aspect ~ generic ~ port) => CompoundConfiguration(labels, name, entity_aspect, generic, port, null)} | |
val verification_unit_binding_indication = kw.use ~> kw.vunit ~> commasep( /*verification_unit_*/ name) | |
val disconnection_specification = kw.disconnect <~ (commasep( /*signal_*/ name) | kw.others | kw.all) <~ ":" ~ type_mark <~ kw.after ~ /*time_*/ expression | |
def ooptB[A,B](a: Parser[A], b: Parser[B]) = a ~ opt(b) ^^ {case a ~ b => b.map(a -> _).getOrElse(a)} | |
val group_template_declaration = kw.group ~> (identifier <~ kw.is) ~ pcomma(ooptB(entity_class, "<>")) ^^ { | |
case id ~ classes => GroupTemplate(id, classes) | |
} ; case class GroupTemplate(val name: String, val entryClasses: List[_]) | |
val group_declaration = kw.group ~> identifier ~ (":" ~> /*group_template_*/ name) /*~ pcomma(name | character_literal)*/ ^^ { | |
case id ~ tn => GroupDeclaration(id, tn) // TODO: check that template name ends with pcomma(name | char) suffix | |
} ; case class GroupDeclaration(val name: String, val template: Any/*, val constituents: List[_]*/) extends Declaration | |
/*val PSL_Property_declaration = val PSL_Sequence_declaration = val PSL_Clock_declaration =*/ | |
lazy val subprogram_body = subprogram_specification ~ (kw.is ~> declareSequential) ~ | |
(kw.begin ~> sequence_of_statements <~ kw.end) ~ opt(subprogram_kind) ~ opt(designator) | |
val block_declarative_item_rep = declare(signal_declaration | kw.shared ~> variable_declaration | | |
disconnection_specification | component_declaration | configuration_specification /*| PSL_Property_declaration | PSL_Sequence_declaration | PSL_Clock_declaration*/ ) | |
val package_declaration: Parser[_] = endoo(kw.`package`, identifier <~ kw.is ~ opt(generic_clause ~ opt(generic_map_aspect | |
<~ ";")) ~ declareMinimal(disconnection_specification | signal_declaration | variable_declaration | component_declaration /*| PSL_Property_Declaration | PSL_Sequence_Declaration*/ )) | |
def declareMinimal(specific: Parser[_] = failure("it is not a declaration")): Parser[List[_]] = | |
{ rep((subprogram_instantiation_declaration | | |
package_declaration | file_declaration | subprogram_declaration | type_declaration | subtype_declaration | | |
constant_declaration | group_declaration | alias_declaration | attribute_declaration | attribute_specification | | |
use_clause | group_template_declaration | package_instantiation_declaration | specific) <~ ";") | |
} | |
def declare(specific: Parser[_]) = declareMinimal(package_body | subprogram_body | specific) | |
lazy val package_body = kw.is ~ declareSequential | |
val alternative_label = opt( /*alternative_*/ label <~ ":"); val block_declarative_part = block_declarative_item_rep | |
val entity_statement = opt(kw.postponed) ~ (/*passive*/process_statement | assertion_statement | /*passive procesure_*/ call /*| PSL_PSL_Directive*/ ) | |
lazy val concurrent_statement = opt(label) ~ (component_instantiation_statement | block_statement | | |
(for_generate_statement | if_generate_statement | case_generate_statement) <~ kw.end <~ kw.generate ~ opt(label) | | |
entity_statement | concurrent_signal_assignment_statement | selected_signal_assignment) <~ ";" | |
val block_statement: Parser[_] = kw.block ~> opt(pared( /*guard_*/ condition)) ~ opt(kw.is) ~ | |
opt(generic_clause ~ opt(generic_map_aspect <~ ";")) ~ opt(port_clause ~ opt(port_map_aspect <~ ";")) ~ | |
block_declarative_part ~ kw.begin ~> rep(concurrent_statement) <~ kw.end <~ kw.block <~ opt(label) | |
trait ConcurrentStatement | |
val postponed = opt(kw.postponed) ^^ {case pp => pp.map(_ => true).getOrElse(false)} | |
val process_statement = (postponed <~ kw.process) ~ opt(pared(kw.all | sensitivity_list)) ~ (opt(kw.is) ~> | |
declareSequential) ~ (kw.begin ~> sequence_of_statements) <~ kw.end <~ opt(kw.postponed) <~ kw.process ^^ { | |
case pp ~ waiton ~ declarations ~ statements => Process(pp, waiton, declarations, statements) | |
} ; case class Process(val postponed: Boolean, val sensitivity: Option[Any], val declarations: List[_], val statements: List[_]) extends ConcurrentStatement | |
//val concurrent_signal_assignment_statement = opt(kw.postponed) ~ (target <~ "<=") ~ opt(kw.guarded) ~ optA(delay_mechanism ~ conditional(waveform)) | |
// TODO: recover 'guarded'. Concurrent assignments can be guarded. | |
val concurrent_signal_assignment_statement = postponed ~ (assignment(dm_cwf(conditional _), WaveformAssignment) | | |
selected_signal_assignment) ^^ { case pp ~ assignment => s"$pp : $assignment" + assignment.value | |
Process(pp, Some("TODO: extract the resignals from " -> assignment.value), null, List(assignment)) | |
} | |
val generate_statement_body = opt(block_declarative_part ~ kw.begin) /*~ rep { concurrent_statement } ~ | |
opt(kw.end ~> opt(/*alternative_*/label) <~ ";"*/ | |
def generate[T](body: Parser[T]) = end(kw.generate, body) | |
val if_generate_statement = kw.`if` ~> alternative_label ~ condition ~ generate(generate_statement_body) ~ | |
rep(kw.elsif ~> alternative_label ~ condition ~ generate_statement_body) ~ | |
opt(kw.`else` ~> alternative_label ~ generate_statement_body) | |
val for_generate_statement = kw.`for` ~> /*generate_*/ parameter_specification ~ generate(generate_statement_body) | |
val case_generate_statement = kw.`case` ~> expression ~ generate(rep1(kw.when ~> alternative_label ~ choices ~ "=>" ~> generate_statement_body)) | |
val instantiated_unit = opt(kw.component) ~> /*component_*/ name ^^ {case name => kw.component -> name}| | |
kw.configuration ~> /*configuration_*/ name ^^ {case name => kw.configuration -> name} | | |
kw.entity ~> /*entity_*/ name /*~ opt(pared(/*architecture_*/ identifier))*/ ^^ {case name => kw.entity -> name }// name whould end with single-length indexed suffix | |
val component_instantiation_statement = instantiated_unit ~ optList(generic_map_aspect) ~ optList(port_map_aspect) ^^ { | |
case (kw.component, name) ~ g ~ p => ComponentInstantiation(name, g, p) | |
case (kw.configuration, name) ~ g ~ p => ConfigurationInstantiation(name, g, p) | |
case (kw.entity, name) ~ g ~ p => name.asInstanceOf[List[_]].reverse match { // name suffix is the arch | |
case arch :: prefix => EntityInstantiation(prefix.reverse, arch.asInstanceOf[List[String]].head, g,p) | |
} | |
//EntityInstantiation(name, arch, g, p) | |
} | |
trait Instantiation {def name: Any; def genMap: List[_]; def portMap: List[_]} | |
case class ComponentInstantiation(val name: Any, val genMap: List[_], val portMap: List[_]) extends Instantiation | |
case class ConfigurationInstantiation(val name: Any, val genMap: List[_], val portMap: List[_]) extends Instantiation | |
case class EntityInstantiation(val name: Any, val arch: String, val genMap: List[_], val portMap: List[_]) extends Instantiation | |
def end[T](kword: Parser[_], body: Parser[T]) = kword ~> body <~ (kw.end ~ kword) | |
def endo[T](kword: Parser[_], body: Parser[T]) = end(kword, body) <~ opt(simple_name) | |
def endoo[T](kword: Parser[_], body: Parser[T]) = kword ~ body <~ kw.end <~ opt(kword) <~ opt(simple_name) | |
// D E S I G N U N I T S | |
val context_reference = kw.context ~ commasep(selected_name); | |
val library_clause = kw.library ~ commasep(identifier); | |
val context_clause = rep((library_clause | use_clause | context_reference) <~ ";") | |
val context_declaration = endoo(kw.context, identifier ~ kw.is ~ context_clause); | |
val entity_header = /*formal_*/ oClause | |
val entity_declaration = kw.is ~> entity_header ~ declare(signal_declaration | kw.shared ~> variable_declaration | | |
disconnection_specification /*| PSL_Property_Declaration | PSL_Sequence_Declaration | PSL_Clock_Declaration */ ) ~ | |
opt(kw.begin ~> repsep(entity_statement /*| PSL_PSL_Directive*/ , ";")) | |
val component_configuration = component_specification ~ opt(binding_indication <~ ";") ~ | |
repsep(verification_unit_binding_indication, ";") ~ opt(block_configuration) | |
val generate_specification = opt( /*static_*/ discrete_range | /*static_*/ expression | /*alternative_*/ label) | |
lazy val block_configuration: Parser[_] = end(kw.`for`, ( /*architecture_*/ name | /*block_statement_ generate_statement_*/ | |
label ~ opt(generate_specification)) ~ rep(use_clause) ~ rep(block_configuration | component_configuration)) | |
val configuration_declaration = kw.of ~> /*entity_*/ name <~ kw.is ~ rep(use_clause | attribute_specification | | |
group_declaration) ~ repsep(verification_unit_binding_indication, ";") ~ block_configuration | |
def designUnits(kwNparser: (Parser[_], Parser[_])*) = kwNparser.map { | |
case (kword, body) => | |
endoo(kword, identifier ~ body) /*TODO: check the name matches identifier*/ }.reduce(_ | _) | |
implicit def kw2parser[T](couple: (kw.Value, Parser[T])) = couple match { case (kwd, par) => (parse_kw(kwd), par) } | |
val primary_unit = designUnits(kw.entity -> entity_declaration, kw.configuration -> configuration_declaration, | |
kw.`package` -> package_declaration, kw.context -> context_declaration) | | |
package_instantiation_declaration <~ ";" // | /*PSL_*/Verification_Unit | |
val architecture_body = kw.of ~> /*entity_*/ name ~ kw.is ~> block_declarative_item_rep ~ kw.begin ~> rep(concurrent_statement) | |
val secondary_unit = designUnits(kw.architecture -> architecture_body, kw.`package` ~ kw.body -> package_body) <~ ";" | |
val library_unit = (primary_unit | secondary_unit) <~ ";"; val design_unit = context_clause ~ library_unit | |
val design_file = rep(design_unit) | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
I do not know what is this file | |
// Guenter has made an increadable achievement when automatically detected the file order. Because VHDL grammar mostly looks | |
// like formal_part ::= /*function_*/name ( formal_designator ) | type_mark ( formal_designator ) | formal_designator | |
// we decide which production to take checking the semantic context. Now, when we receive undefined function name, does | |
// it mean that there is not going to be such function and we should try other productions or it is unknown because some | |
// dependency is not compiled? We can figure this out if we request the environment for the given name. If it is not specified | |
// then there is no such object and will never be because all packages are attached. We know that we need to compile a depen- | |
// dency only if unknown package is attached. | |
//But, then, we are left with the problem that keyword "to" matches "toa". | |
import scala.util.parsing.combinator._ | |
object o extends VhdlParser { | |
/* | |
*/ | |
val designUnits = parseAll(vhdl, """ | |
entity aa is | |
ports (in a: int, out b: boolean) | |
end entity | |
entity bb is | |
ports (in a: int, out b: boolean) | |
end entity | |
architecture arch1 of aa is | |
signal s1: integer | |
type T1 is (1,2,3) | |
signal s2: T1 = 22 | |
begin | |
a: entity aa(a => s2, b) | |
b: entity pp() | |
c: process | |
variable v: integer | |
begin | |
wait for 1 ns | |
a := 1+1 | |
end process | |
end architecture | |
architecture arch2 of aa is | |
end architecture | |
""") match { | |
case Success(designUnits, _) => designUnits foreach println | |
case Failure(msg, _) => println (msg) | |
} | |
def main(args: Array[String]) { | |
} | |
11 | |
11 | |
11 | |
11 | |
11 | |
} | |
// What is the point to elaborate off the fly? The only reason is to auto-detect the VHDL file order. | |
// If file order is specified properly, we can elaborate right away. Moreover, if we pause as soon as | |
// unknown design unit is detected and switch to another file, we still can autodetect the file order. | |
// The current version of VHDL is sensitive to the order in which files are compiled and is not capable of specifying this order | |
// in the language itself. This can be particularly problematic when using large designs. Some tools try to solve this issue by | |
// automatically generating the compilation order, however this does not always yield the right result as sayed in | |
// http://essay.utwente.nl/66066/1/main.pdf. It also proposes the solution: | |
//The option is using a compiler with multiple passes. In a first pass identifiers and declarations can be collected. Then, | |
// during the second pass, these identifiers and declarations can be linked to each other. After this pass, signal declarations | |
// have a link to the declaration of their type for example. After this, in a third pass, the expressions can be evaluated. | |
class VhdlParser extends JavaTokenParsers { | |
class DesignUnit(id: String) | |
case class Entity(id: String, ports: List[Port]) extends DesignUnit(id) | |
{override def toString = s"entity $id" + ports.mkString("\n ", "\n ", "\n")} | |
case class Architecture(id: String, entity: String, declarations: List[Declaration], body: List[ParallelStatement]) extends DesignUnit(id) | |
{override def toString = s"architecture $entity($id)" + declarations.mkString("\n ", "\n ", "\n") + | |
{if (body.isEmpty) "" else "begin" + body.mkString("\n ", "\n ", "\n ")} | |
} | |
class Declaration(id: String) | |
case class Type(id: String, values: List[Any]) extends Declaration(id) | |
class ParallelStatement(id: String) {override def toString = s"$id: "} | |
case class Instance(id: String, entity: String, bindings: List[(String, Option[String])]) extends ParallelStatement(id) | |
{override def toString = s"$id: entity $entity (" + bindings.map{ _ match { | |
case (actual, None) => actual | |
case (formal, Some(actual)) => s"$formal => $actual" | |
}} + ")"} | |
class SeqSmt | |
case class Process(id: String, declarations: List[Declaration], code: List[SeqSmt]) extends ParallelStatement(id) {override def toString = super.toString + "process" + | |
declarations.mkString("\n ", "\n ", "\n") + " begin" + code.mkString("\n ", "\n ", "\n") | |
} | |
/* | |
class Binding(actual: String) {override def toString = actual} | |
case class PositionalBinding(actual: String) extends Binding(actual) | |
case class NamedBinding(formal: String, actual: String) extends Binding(actual) {override def toString = s"$formal => $actual"} | |
*/ | |
def id = ident | |
def literal = id | wholeNumber | |
def portDeclarations = "ports" ~> parenthised (port) | |
def entity = ("entity" ~> id ~ ("is" ~> portDeclarations) ) <~ "end" <~ opt("entity") ^^ {case (id ~ ports) => Entity (id, ports)} | |
def parenthised[T](p: Parser[T]) = "(" ~> repsep(p, ",") <~ ")" | |
class Object(id: String, тип: String) extends Declaration(id) | |
def obj[T](kind: Parser[String], f:(String, String, String, Option[Any]) => T): Parser[T] = kind ~ ( id <~ ":") ~ id ~ opt("=" ~> literal) ^^ {case(kind ~ id ~ тип ~ value) => f(kind, id, тип, value)} | |
case class Port(id: String, тип: String, dir: String) extends Object(id, тип) {override def toString = s"$dir $id:$тип"} | |
//def port = ("in" | "out") ~ id ~ (":" ~> id) ^^ {case (dir ~ id ~ тип) => Port(id, dir, тип)} | |
def port: Parser[Port] = obj("in" | "out", (dir, id, тип, _) => Port(id, тип, dir)) | |
case class Signal(id: String, тип: String, value: Option[Any]) extends Object(id, тип) | |
case class Variable(id: String, тип: String, value: Option[Any]) extends Object(id, тип) | |
case class Constant(id: String, тип: String, value: Any) extends Object(id, тип) | |
def signal = obj( "signal", (_, id, тип, value) => Signal(id, тип, value)) | |
def variable = obj("variable", (_, id, тип, value) => Variable(id, тип, value)) | |
def const = obj("constant", (_, id, тип, value) => Constant(id, тип, value.get)) | |
def тип = ("type" ~> id <~ "is") ~ parenthised(literal) ^^ {case (id ~ list) => Type(id, list)} | |
def declarations = rep(signal | тип | variable) | |
def architecture = ("architecture" ~> id) ~ ("of" ~> id <~ "is") ~ declarations ~ opt("begin" ~> rep(parSmt)) <~ | |
"end" <~ opt("architecture") ^^ {case (id ~ of ~ declarations ~ body) => | |
val list: List[ParallelStatement] = body.getOrElse(Nil) | |
Architecture(id, of, declarations, list)} | |
def labledStatement[T](p: Parser[T]) = id ~ (":" ~> p) | |
def block = instance | |
def parSmt = instance | process | block | |
def instance = labledStatement("entity" ~> id) ~ parenthised(id ~ opt("=>" ~> id)) ^^ {case (id ~ ref ~ mappings) => | |
/* | |
case (actual, None) => PositionalBinding(actual) | |
case (formal, Some(actual)) => NamedBinding(formal, actual) | |
*/ | |
Instance(id, ref, mappings.map { case (actual ~ formal) => (actual, formal)})} | |
def process = labledStatement("process" ~> declarations) ~ opt("begin" ~> rep(seqSmt)) <~ ("end" <~ "process") ^^ {case id ~ declarations ~ code => | |
Process(id, declarations, code.getOrElse(Nil))} | |
trait Expr | |
case class Literal(text: String) extends Expr {override def toString = text} | |
case class BinOp(sign: String, a: Expr, b: Expr) extends Expr {override def toString = s"$a $sign $b"} | |
case class UnaryOp(sign: String, a: Expr) extends Expr {override def toString = s"$sign $a"} | |
case class Parenthesis(child: Expr) extends Expr {override def toString = s"($child)"} | |
def literalExpr: Parser[Literal] = literal ^^ {case (literal) => Literal(literal)} | |
def unaryOp: Parser[UnaryOp] = {println("unary") ; ("-" | "not") ~ expr ^^ {case sign ~ child => UnaryOp(sign, child)}} | |
def parenthesis: Parser[Parenthesis] = {println("par") ; "(" ~> expr <~ ")" ^^ {case (expr) => Parenthesis(expr)}} | |
class BoolOp(sign: String, a: Expr, b: Expr) extends BinOp(sign, a, b) | |
def boolOp: Parser[BoolOp] = {println("bool") ; expr ~ ("=" | "<" | ">") ~ expr ^^ {case a ~ sign ~ b => new BoolOp(sign, a, b)}} | |
def binOp: Parser[BinOp] = {println("bin") ; boolOp | | |
(expr ~ ("+" | "-" | "*" | "/" | "&" | "or" | "and" | "xor" ) ~ expr ^^ {case (a ~ sign ~ b) => BinOp(sign, a, b)})} | |
def expr: Parser[Expr] = {println("expr") ; literalExpr | unaryOp | binOp | parenthesis} | |
case class Assignment(dest: String, source: Expr) extends SeqSmt | |
def assign = (id <~ (":=" | "<=")) ~ expr ^^ {case target ~ value => Assignment(target, value)} | |
def timeunit = "ps" | "sec" | "fs" | "ns" | "ms" | "us" | |
case class WaitFor(num: Int, unit: String) extends SeqSmt | |
def waitFor = "for" ~> decimalNumber ~ timeunit ^^ {case num ~ unit => WaitFor(num toInt, unit)} | |
case class WaitOn(signal: String, until: Option[BoolOp]) extends SeqSmt; | |
def waitOn = ("on" ~> id) ~ opt("until" ~> boolOp) ^^ {case signal ~ until => WaitOn(signal, until)} | |
def waitSmt: Parser[SeqSmt] = "wait" ~> (waitOn | waitFor) | |
case class Report(msg: String, severity: Option[String]) extends SeqSmt | |
def report = stringLiteral ~ opt("severity" ~> "note" | "warning" | "error" | "fatal") ^^ {case text ~ severity => Report(text, severity)} | |
def seqSmt: Parser[SeqSmt] = (waitSmt | report | assign) | |
def vhdl = rep(entity | architecture) | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment