diff --git a/doc/interpolations.md b/doc/interpolations.md
index 9280da807..1e87005fe 100644
--- a/doc/interpolations.md
+++ b/doc/interpolations.md
@@ -7,7 +7,7 @@ Inox String Interpolation
- ***[Introduction](#introduction)***
- [Importing](#importing)
-- ***[Syntax](#syntax)***
+- ***[Expressions](#expressions)***
- [Literals](#literals)
- [Boolean](#boolean-literals)
- [Numeric](#numeric-literals)
@@ -20,7 +20,6 @@ Inox String Interpolation
- [Lambda expressions](#lambda-expressions)
- [Quantifiers](#quantifiers)
- [Universal quantifiers](#universal-quantifiers)
- - [Existential quantifiers](#existential-quantifiers)
- [Choose](#choose)
- ***[Primitives](#primitives)***
@@ -51,23 +50,23 @@ Once imported, it is possible to build Inox types and expressions using a friend
```scala
scala> val tpe = t"Boolean"
-tpe: inox.trees.interpolator.trees.Type = Boolean
+tpe: inox.trees.Type = Boolean
scala> val expr = e"1 + 1 == 2"
-expr: inox.trees.interpolator.trees.Expr = 1 + 1 == 2
+expr: inox.trees.Expr = 1 + 1 == 2
```
It is also possible to embed types and expressions:
```scala
-scala> e"let x: $tpe = $expr in !x"
-res1: inox.trees.interpolator.trees.Expr =
-val x: Boolean = 1 + 1 == 2
-¬x
+scala> e"let x: $tpe = $expr; !x"
+res1: inox.trees.Expr =
+let x: Boolean = 1 + 1 == 2;
+!x
```
-
-# Syntax
+
+# Expressions
## Literals
@@ -77,10 +76,10 @@ val x: Boolean = 1 + 1 == 2
```scala
scala> e"true"
-res2: inox.trees.interpolator.trees.Expr = true
+res2: inox.trees.Expr = true
scala> e"false"
-res3: inox.trees.interpolator.trees.Expr = false
+res3: inox.trees.Expr = false
```
@@ -88,35 +87,35 @@ res3: inox.trees.interpolator.trees.Expr = false
```scala
scala> e"1"
-res4: inox.trees.interpolator.trees.Expr = 1
+res4: inox.trees.Expr = 1
```
-Note that the type of numeric expressions is inferred. In case of ambiguity, `BigInt` is chosen by default.
+Note that the type of numeric expressions is inferred. In case of ambiguity, `Integer` is chosen by default.
```scala
scala> val bigIntLit = e"1"
-bigIntLit: inox.trees.interpolator.trees.Expr = 1
+bigIntLit: inox.trees.Expr = 1
scala> bigIntLit.getType
-res5: inox.trees.interpolator.trees.Type = BigInt
+res5: inox.trees.Type = Integer
```
It is however possible to annotate the desired type.
```scala
-scala> val intLit = e"1 : Int"
-intLit: inox.trees.interpolator.trees.Expr = 1
+scala> val intLit = e"1 as Int"
+intLit: inox.trees.Expr = 1
scala> intLit.getType
-res6: inox.trees.interpolator.trees.Type = Int
+res6: inox.trees.Type = Int
```
```scala
-scala> val realLit = e"1 : Real"
-realLit: inox.trees.interpolator.trees.Expr = 1
+scala> val realLit = e"1 as Real"
+realLit: inox.trees.Expr = 1
scala> realLit.getType
-res7: inox.trees.interpolator.trees.Type = Real
+res7: inox.trees.Type = Real
```
@@ -124,7 +123,7 @@ res7: inox.trees.interpolator.trees.Type = Real
```scala
scala> e"3.75"
-res8: inox.trees.interpolator.trees.Expr = 15/4
+res8: inox.trees.Expr = 15/4
```
@@ -132,7 +131,7 @@ res8: inox.trees.interpolator.trees.Expr = 15/4
```scala
scala> e"'Hello world!'"
-res9: inox.trees.interpolator.trees.Expr = "Hello world!"
+res9: inox.trees.Expr = "Hello world!"
```
@@ -140,7 +139,7 @@ res9: inox.trees.interpolator.trees.Expr = "Hello world!"
```scala
scala> e"`a`"
-res10: inox.trees.interpolator.trees.Expr = 'a'
+res10: inox.trees.Expr = 'a'
```
@@ -150,7 +149,7 @@ Arithmetic operators are infix and have there usual associativity and priority.
```scala
scala> e"1 + 2 * 5 + 6 - 7 / 17"
-res11: inox.trees.interpolator.trees.Expr = ((1 + 2 * 5) + 6) - 7 / 17
+res11: inox.trees.Expr = ((1 + 2 * 5) + 6) - 7 / 17
```
@@ -158,7 +157,7 @@ res11: inox.trees.interpolator.trees.Expr = ((1 + 2 * 5) + 6) - 7 / 17
```scala
scala> e"if (1 == 2) 'foo' else 'bar'"
-res12: inox.trees.interpolator.trees.Expr =
+res12: inox.trees.Expr =
if (1 == 2) {
"foo"
} else {
@@ -170,32 +169,39 @@ if (1 == 2) {
## Let bindings
```scala
-scala> e"let word: String = 'World!' in concatenate('Hello ', word)"
-res13: inox.trees.interpolator.trees.Expr =
-val word: String = "World!"
-"Hello " + word
+scala> e"let word: String = 'World!'; concatenate('Hello ', word)"
+res13: inox.trees.Expr =
+let word: String = "World!";
+concatenate("Hello ", word)
```
## Lambda expressions
```scala
-scala> e"lambda x: BigInt, y: BigInt. x + y"
-res14: inox.trees.interpolator.trees.Expr = (x: BigInt, y: BigInt) => x + y
+scala> e"lambda (x: Integer, y: Integer) => x + y"
+res14: inox.trees.Expr = (x: Integer, y: Integer) => x + y
```
It is also possible to use the Unicode `λ` symbol.
```scala
-scala> e"λx: BigInt, y: BigInt. x + y"
-res15: inox.trees.interpolator.trees.Expr = (x: BigInt, y: BigInt) => x + y
+scala> e"λ(x: Integer, y: Integer) => x + y"
+res15: inox.trees.Expr = (x: Integer, y: Integer) => x + y
+```
+
+Or even use this syntax:
+
+```scala
+scala> e"(x: Integer, y: Integer) => x + y"
+res16: inox.trees.Expr = (x: Integer, y: Integer) => x + y
```
Type annotations can be omitted for any of the parameters if their type can be inferred.
```scala
-scala> e"lambda x. x * 0.5"
-res16: inox.trees.interpolator.trees.Expr = (x: Real) => x * 1/2
+scala> e"lambda (x) => x * 0.5"
+res17: inox.trees.Expr = (x: Real) => x * 1/2
```
@@ -205,33 +211,22 @@ res16: inox.trees.interpolator.trees.Expr = (x: Real) => x * 1/2
### Universal Quantifier
```scala
-scala> e"forall x: Int. x > 0"
-res17: inox.trees.interpolator.trees.Expr = ∀x: Int. (x > 0)
-
-scala> e"∀x. x || true"
-res18: inox.trees.interpolator.trees.Expr = ∀x: Boolean. (x || true)
-```
-
-
-### Existential Quantifier
-
-```scala
-scala> e"exists x: BigInt. x < 0"
-res19: inox.trees.interpolator.trees.Expr = ¬∀x: BigInt. ¬(x < 0)
+scala> e"forall (x: Int) => x > 0"
+res18: inox.trees.Expr = ∀ (x: Int) => (x > 0)
-scala> e"∃x, y. x + y == 0"
-res20: inox.trees.interpolator.trees.Expr = ¬∀x: BigInt, y: BigInt. (x + y ≠ 0)
+scala> e"∀(x) => x || true"
+res19: inox.trees.Expr = ∀ (x: Boolean) => (x || true)
```
## Choose
```scala
-scala> e"choose x. x * 3 < 17"
-res21: inox.trees.interpolator.trees.Expr = choose((x: BigInt) => x * 3 < 17)
+scala> e"choose (x) => x * 3 < 17"
+res20: inox.trees.Expr = choose (x: Integer) => x * 3 < 17
-scala> e"choose x: String. true"
-res22: inox.trees.interpolator.trees.Expr = choose((x: String) => true)
+scala> e"choose (x: String) => length(x) == 10"
+res21: inox.trees.Expr = choose (x: String) => length(x) == 10
```
@@ -252,9 +247,9 @@ res22: inox.trees.interpolator.trees.Expr = choose((x: String) => true)
| Function | Type | Description | Inox Constructor |
| -------- | ---- | ----------- | ---------------- |
-| `length` | `String => BigInt` | Returns the length of the string. | `StringLength` |
+| `length` | `String => Integer` | Returns the length of the string. | `StringLength` |
| `concatenate` | `(String, String) => String` | Returns the concatenation of the two strings. | `StringConcat` |
-| `substring` | `(String, BigInt, BigInt) => String` | Returns the substring from the first index inclusive to the second index exclusive. | `SubString ` |
+| `substring` | `(String, Integer, Integer) => String` | Returns the substring from the first index inclusive to the second index exclusive. | `SubString ` |
### Operators
@@ -271,14 +266,6 @@ res22: inox.trees.interpolator.trees.Expr = choose((x: String) => true)
| ----------- | ----------- | ---------------- |
| `Set[A](elements: A*)` | Returns a set containing the given `elements`. | `FiniteSet` |
-### Literal Syntax
-
-```
-{}
-{1, 2, 3}
-{'foo', 'bar', 'baz'}
-```
-
### Functions
| Function | Type | Description | Inox Constructor |
@@ -307,20 +294,13 @@ res22: inox.trees.interpolator.trees.Expr = choose((x: String) => true)
| Constructor | Description | Inox Constructor |
| ----------- | ----------- | ---------------- |
-| `Bag[A](bindings: (A -> BigInt)*)` | Returns a bag containing the given `bindings`. | `FiniteBag` |
-
-### Literal Syntax
-
-```
-{1 -> 2, 2 -> 4, 3 -> 6}
-{'foo' -> 5, 'bar' -> 2, 'baz' -> 2}
-```
+| `Bag[A](bindings: (A -> Integer)*)` | Returns a bag containing the given `bindings`. | `FiniteBag` |
### Functions
| Function | Type | Description | Inox Constructor |
| -------- | ---- | ----------- | ---------------- |
-| `multiplicity[A]` | `(Bag[A], A) => BigInt` | Returns the number of occurrences in the given bag of the given value. | `MultiplicityInBag` |
+| `multiplicity[A]` | `(Bag[A], A) => Integer` | Returns the number of occurrences in the given bag of the given value. | `MultiplicityInBag` |
| `bagAdd[A]` | `(Bag[A], A) => Bag[A]` | Returns the bag with an element added. | `BagAdd` |
| `bagUnion[A]` | `(Bag[A], Bag[A]) => Bag[A]` | Returns the unions of the two bags. | `BagUnion` |
| `bagIntersection[A]` | `(Bag[A], Bag[A]) => Bag[A]` | Returns the intersection of the two bags. | `BagIntersection` |
@@ -333,14 +313,7 @@ res22: inox.trees.interpolator.trees.Expr = choose((x: String) => true)
| Constructor | Description | Inox Constructor |
| ----------- | ----------- | ---------------- |
-| `Map[A](default: A, bindings: (A -> BigInt)*)` | Returns a map with default value `default` containing the given `bindings`. | `FiniteMap` |
-
-### Literal syntax
-
-```
-{*: Int -> 42}
-{* -> '???', 'hello' -> 'HELLO', 'world' -> 'WORLD'}
-```
+| `Map[A](default: A, bindings: (A -> Integer)*)` | Returns a map with default value `default` containing the given `bindings`. | `FiniteMap` |
### Functions
@@ -348,3 +321,29 @@ res22: inox.trees.interpolator.trees.Expr = choose((x: String) => true)
| -------- | ---- | ----------- | ---------------- |
| `apply[K, V]` | `(Map[K, V], K) => V` | Returns the value associated to the given key. | `MapApply` |
| `updated[K, V]` | `(Map[K, V], K, V) => Map[K, V]` | Returns the map with a bidding from the key to the value added. | `MapUpdated` |
+
+
+# Type Definitions
+
+```scala
+scala> td"type List[A] = Cons(head: A, tail: List[A]) | Nil()"
+res22: inox.trees.ADTSort = type List[A] = Cons(head: A, tail: List[A]) | Nil()
+```
+
+```scala
+scala> td"type Option[A] = Some(value: A) | None()"
+res23: inox.trees.ADTSort = type Option[A] = Some(value: A) | None()
+```
+
+
+# Function Definitions
+
+```scala
+scala> fd"def id[A](x: A): A = x"
+res24: inox.trees.FunDef = def id[A](x: A): A = x
+```
+
+```scala
+scala> fd"def twice[A](f: A => A): A => A = (x: A) => f(f(x))"
+res25: inox.trees.FunDef = def twice[A](f: (A) => A): (A) => A = (x: A) => f(f(x))
+```
diff --git a/src/main/doc/interpolations.md b/src/main/doc/interpolations.md
index 3bec49b6a..3bfed7c75 100644
--- a/src/main/doc/interpolations.md
+++ b/src/main/doc/interpolations.md
@@ -7,7 +7,7 @@ Inox String Interpolation
- ***[Introduction](#introduction)***
- [Importing](#importing)
-- ***[Syntax](#syntax)***
+- ***[Expressions](#expressions)***
- [Literals](#literals)
- [Boolean](#boolean-literals)
- [Numeric](#numeric-literals)
@@ -20,7 +20,6 @@ Inox String Interpolation
- [Lambda expressions](#lambda-expressions)
- [Quantifiers](#quantifiers)
- [Universal quantifiers](#universal-quantifiers)
- - [Existential quantifiers](#existential-quantifiers)
- [Choose](#choose)
- ***[Primitives](#primitives)***
@@ -57,11 +56,11 @@ val expr = e"1 + 1 == 2"
It is also possible to embed types and expressions:
```tut
-e"let x: $tpe = $expr in !x"
+e"let x: $tpe = $expr; !x"
```
-
-# Syntax
+
+# Expressions
## Literals
@@ -81,7 +80,7 @@ e"false"
e"1"
```
-Note that the type of numeric expressions is inferred. In case of ambiguity, `BigInt` is chosen by default.
+Note that the type of numeric expressions is inferred. In case of ambiguity, `Integer` is chosen by default.
```tut
val bigIntLit = e"1"
@@ -91,12 +90,12 @@ bigIntLit.getType
It is however possible to annotate the desired type.
```tut
-val intLit = e"1 : Int"
+val intLit = e"1 as Int"
intLit.getType
```
```tut
-val realLit = e"1 : Real"
+val realLit = e"1 as Real"
realLit.getType
```
@@ -141,26 +140,32 @@ e"if (1 == 2) 'foo' else 'bar'"
## Let bindings
```tut
-e"let word: String = 'World!' in concatenate('Hello ', word)"
+e"let word: String = 'World!'; concatenate('Hello ', word)"
```
## Lambda expressions
```tut
-e"lambda x: BigInt, y: BigInt. x + y"
+e"lambda (x: Integer, y: Integer) => x + y"
```
It is also possible to use the Unicode `λ` symbol.
```tut
-e"λx: BigInt, y: BigInt. x + y"
+e"λ(x: Integer, y: Integer) => x + y"
+```
+
+Or even use this syntax:
+
+```tut
+e"(x: Integer, y: Integer) => x + y"
```
Type annotations can be omitted for any of the parameters if their type can be inferred.
```tut
-e"lambda x. x * 0.5"
+e"lambda (x) => x * 0.5"
```
@@ -170,24 +175,16 @@ e"lambda x. x * 0.5"
### Universal Quantifier
```tut
-e"forall x: Int. x > 0"
-e"∀x. x || true"
-```
-
-
-### Existential Quantifier
-
-```tut
-e"exists x: BigInt. x < 0"
-e"∃x, y. x + y == 0"
+e"forall (x: Int) => x > 0"
+e"∀(x) => x || true"
```
## Choose
```tut
-e"choose x. x * 3 < 17"
-e"choose x: String. true"
+e"choose (x) => x * 3 < 17"
+e"choose (x: String) => length(x) == 10"
```
@@ -208,9 +205,9 @@ e"choose x: String. true"
| Function | Type | Description | Inox Constructor |
| -------- | ---- | ----------- | ---------------- |
-| `length` | `String => BigInt` | Returns the length of the string. | `StringLength` |
+| `length` | `String => Integer` | Returns the length of the string. | `StringLength` |
| `concatenate` | `(String, String) => String` | Returns the concatenation of the two strings. | `StringConcat` |
-| `substring` | `(String, BigInt, BigInt) => String` | Returns the substring from the first index inclusive to the second index exclusive. | `SubString ` |
+| `substring` | `(String, Integer, Integer) => String` | Returns the substring from the first index inclusive to the second index exclusive. | `SubString ` |
### Operators
@@ -227,14 +224,6 @@ e"choose x: String. true"
| ----------- | ----------- | ---------------- |
| `Set[A](elements: A*)` | Returns a set containing the given `elements`. | `FiniteSet` |
-### Literal Syntax
-
-```
-{}
-{1, 2, 3}
-{'foo', 'bar', 'baz'}
-```
-
### Functions
| Function | Type | Description | Inox Constructor |
@@ -263,20 +252,13 @@ e"choose x: String. true"
| Constructor | Description | Inox Constructor |
| ----------- | ----------- | ---------------- |
-| `Bag[A](bindings: (A -> BigInt)*)` | Returns a bag containing the given `bindings`. | `FiniteBag` |
-
-### Literal Syntax
-
-```
-{1 -> 2, 2 -> 4, 3 -> 6}
-{'foo' -> 5, 'bar' -> 2, 'baz' -> 2}
-```
+| `Bag[A](bindings: (A -> Integer)*)` | Returns a bag containing the given `bindings`. | `FiniteBag` |
### Functions
| Function | Type | Description | Inox Constructor |
| -------- | ---- | ----------- | ---------------- |
-| `multiplicity[A]` | `(Bag[A], A) => BigInt` | Returns the number of occurrences in the given bag of the given value. | `MultiplicityInBag` |
+| `multiplicity[A]` | `(Bag[A], A) => Integer` | Returns the number of occurrences in the given bag of the given value. | `MultiplicityInBag` |
| `bagAdd[A]` | `(Bag[A], A) => Bag[A]` | Returns the bag with an element added. | `BagAdd` |
| `bagUnion[A]` | `(Bag[A], Bag[A]) => Bag[A]` | Returns the unions of the two bags. | `BagUnion` |
| `bagIntersection[A]` | `(Bag[A], Bag[A]) => Bag[A]` | Returns the intersection of the two bags. | `BagIntersection` |
@@ -289,14 +271,7 @@ e"choose x: String. true"
| Constructor | Description | Inox Constructor |
| ----------- | ----------- | ---------------- |
-| `Map[A](default: A, bindings: (A -> BigInt)*)` | Returns a map with default value `default` containing the given `bindings`. | `FiniteMap` |
-
-### Literal syntax
-
-```
-{*: Int -> 42}
-{* -> '???', 'hello' -> 'HELLO', 'world' -> 'WORLD'}
-```
+| `Map[A](default: A, bindings: (A -> Integer)*)` | Returns a map with default value `default` containing the given `bindings`. | `FiniteMap` |
### Functions
@@ -304,3 +279,25 @@ e"choose x: String. true"
| -------- | ---- | ----------- | ---------------- |
| `apply[K, V]` | `(Map[K, V], K) => V` | Returns the value associated to the given key. | `MapApply` |
| `updated[K, V]` | `(Map[K, V], K, V) => Map[K, V]` | Returns the map with a bidding from the key to the value added. | `MapUpdated` |
+
+
+# Type Definitions
+
+```tut
+td"type List[A] = Cons(head: A, tail: List[A]) | Nil()"
+```
+
+```tut
+td"type Option[A] = Some(value: A) | None()"
+```
+
+
+# Function Definitions
+
+```tut
+fd"def id[A](x: A): A = x"
+```
+
+```tut
+fd"def twice[A](f: A => A): A => A = (x: A) => f(f(x))"
+```
diff --git a/src/main/scala/inox/ast/Definitions.scala b/src/main/scala/inox/ast/Definitions.scala
index d5eeadcc2..526b10fb8 100644
--- a/src/main/scala/inox/ast/Definitions.scala
+++ b/src/main/scala/inox/ast/Definitions.scala
@@ -3,7 +3,6 @@
package inox
package ast
-import inox.parsing.Interpolator
import inox.transformers._
import inox.utils._
@@ -336,7 +335,7 @@ trait Definitions { self: Trees =>
}
/** An invariant that refines this [[ADTSort]] */
- def invariant(implicit s: Symbols): Option[FunDef] =
+ def invariant(implicit s: Symbols): Option[FunDef] =
flags.collectFirst { case HasADTInvariant(id) => s.getFunction(id) }
def hasInvariant(implicit s: Symbols): Boolean = invariant.isDefined
diff --git a/src/main/scala/inox/ast/Printers.scala b/src/main/scala/inox/ast/Printers.scala
index cb4491aa7..01dcf3f93 100644
--- a/src/main/scala/inox/ast/Printers.scala
+++ b/src/main/scala/inox/ast/Printers.scala
@@ -109,6 +109,459 @@ trait Printer {
}
}
+ protected def ppBody(tree: Tree)(implicit ctx: PrinterContext): Unit = tree match {
+ case Variable(id, _, _) =>
+ p"$id"
+
+ case Let(b, d, e) =>
+ p"""|let $b = $d;
+ |$e"""
+
+ case Forall(args, e) =>
+ p"\u2200 (${nary(args)}) => $e"
+
+ case Choose(res, pred) =>
+ p"choose ($res) => $pred"
+
+ case Assume(pred, body) =>
+ p"""|assume($pred);
+ |$body"""
+
+ case e @ ADT(id, tps, args) =>
+ p"$id${nary(tps, ", ", "[", "]")}($args)"
+
+ case And(exprs) => optP {
+ p"${nary(exprs, " && ")}"
+ }
+ case Or(exprs) => optP {
+ p"${nary(exprs, "| || ")}"
+ } // Ugliness award! The first | is there to shield from stripMargin()
+ case Not(Equals(l, r)) => optP {
+ p"$l != $r"
+ }
+ case Implies(l, r) => optP {
+ p"$l ==> $r"
+ }
+ case UMinus(expr) => p"-$expr"
+ case Equals(l, r) => optP {
+ p"$l == $r"
+ }
+
+ case StringConcat(lhs, rhs) => optP {
+ p"concatenate($lhs, $rhs)"
+ }
+ case SubString(expr, start, end) => p"substring($expr, $start, $end)"
+ case StringLength(expr) => p"length($expr)"
+
+ case Int8Literal(v) => p"$v"
+ case Int16Literal(v) => p"$v"
+ case Int32Literal(v) => p"$v"
+ case Int64Literal(v) => p"$v"
+ case BVLiteral(_, bits, size) => p"x${(size to 1 by -1).map(i => if (bits(i)) "1" else "0").mkString("")}"
+ case IntegerLiteral(v) => p"$v"
+ case FractionLiteral(n, d) =>
+ if (d == 1) p"$n"
+ else p"$n/$d"
+ case CharLiteral(v) => p"'${StringEscapeUtils.escapeJava(v.toString)}'"
+ case BooleanLiteral(v) => p"$v"
+ case UnitLiteral() => p"()"
+ case StringLiteral(v) =>
+ val escaped = StringEscapeUtils.escapeJava(v)
+ p"$dbquote$escaped$dbquote"
+ case GenericValue(tp, id) => p"$tp#$id"
+ case Tuple(exprs) => p"($exprs)"
+ case TupleSelect(t, i) => p"$t._$i"
+ case IsConstructor(e, id) => p"$e is $id"
+ case ADTSelector(e, id) => p"$e.$id"
+
+ case FunctionInvocation(id, tps, args) =>
+ p"$id${nary(tps, ", ", "[", "]")}"
+ if (args.nonEmpty) {
+ p"($args)"
+ }
+
+ case Application(caller, args) =>
+ p"$caller($args)"
+
+ case Lambda(args, body) =>
+ optP {
+ p"($args) => $body"
+ }
+
+ case Plus(l, r) => optP {
+ p"$l + $r"
+ }
+ case Minus(l, r) => optP {
+ p"$l - $r"
+ }
+ case Times(l, r) => optP {
+ p"$l * $r"
+ }
+ case Division(l, r) => optP {
+ p"$l / $r"
+ }
+ case Remainder(l, r) => optP {
+ p"$l % $r"
+ }
+ case Modulo(l, r) => optP {
+ p"$l mod $r"
+ }
+ case LessThan(l, r) => optP {
+ p"$l < $r"
+ }
+ case GreaterThan(l, r) => optP {
+ p"$l > $r"
+ }
+ case LessEquals(l, r) => optP {
+ p"$l <= $r"
+ }
+ case GreaterEquals(l, r) => optP {
+ p"$l >= $r"
+ }
+ case BVNot(e) => optP {
+ p"~$e"
+ }
+ case BVXor(l, r) => optP {
+ p"$l ^ $r"
+ }
+ case BVOr(l, r) => optP {
+ p"$l | $r"
+ }
+ case BVAnd(l, r) => optP {
+ p"$l & $r"
+ }
+ case BVShiftLeft(l, r) => optP {
+ p"$l << $r"
+ }
+ case BVAShiftRight(l, r) => optP {
+ p"$l >> $r"
+ }
+ case BVLShiftRight(l, r) => optP {
+ p"$l >>> $r"
+ }
+
+ case BVWideningCast(e, BVType(_, size)) => p"widen$size($e)"
+ case BVNarrowingCast(e, BVType(_, size)) => p"narrow$size($e)"
+
+ case fs @ FiniteSet(rs, _) => p"Set(${rs})"
+ case fs @ FiniteBag(rs, _) => p"Bag(${rs.toSeq})"
+ case fm @ FiniteMap(rs, dflt, _, _) => p"Map($dflt, ${rs.toSeq})"
+ case ElementOfSet(e, s) => p"elementOfSet($e, $s)"
+ case SubsetOf(l, r) => p"subset($l, $r)"
+ case SetAdd(s, e) => p"setAdd($s, $e)"
+ case SetUnion(l, r) => p"setUnion($l, $r)"
+ case BagUnion(l, r) => p"bagUnion($l, $r)"
+ case SetDifference(l, r) => p"setDifference($l, $r)"
+ case BagDifference(l, r) => p"bagDifference($l, $r)"
+ case SetIntersection(l, r) => p"setIntersection($l, $r)"
+ case BagIntersection(l, r) => p"bagIntersection($l, $r)"
+ case BagAdd(b, e) => p"bagAdd($b, $e)"
+ case MultiplicityInBag(e, b) => p"multiplicity($b, $e)"
+ case MapApply(m, k) => p"apply($m, $k)"
+ case MapUpdated(m, k, v) => p"updated($m, $k, $v)"
+
+ case Not(expr) => p"!$expr"
+
+ case vd @ ValDef(id, tpe, flags) =>
+ p"$id: $tpe"
+
+ case (tfd: TypedFunDef) => p"typed def ${tfd.id}[${tfd.tps}]"
+ case (afd: TypedADTSort) => p"typed class ${afd.id}[${afd.tps}]"
+ case (afd: TypedADTConstructor) => p"typed class ${afd.id}[${afd.tps}]"
+
+ case tpd: TypeParameterDef =>
+ p"${tpd.tp}"
+
+ case TypeParameter(id, flags) => p"$id"
+
+ case IfExpr(c, t, ie: IfExpr) =>
+ optP {
+ p"""|if ($c) {
+ | $t
+ |} else $ie"""
+ }
+
+ case IfExpr(c, t, e) =>
+ optP {
+ p"""|if ($c) {
+ | $t
+ |} else {
+ | $e
+ |}"""
+ }
+
+ // Types
+ case Untyped => p""
+ case UnitType() => p"Unit"
+ case Int32Type() => p"Int"
+ case BVType(true, size) => p"Int$size"
+ case BVType(false, size) => p"UInt$size"
+ case IntegerType() => p"Integer"
+ case RealType() => p"Real"
+ case CharType() => p"Char"
+ case BooleanType() => p"Boolean"
+ case StringType() => p"String"
+ case SetType(bt) => p"Set[$bt]"
+ case BagType(bt) => p"Bag[$bt]"
+ case MapType(ft, tt) => p"Map[$ft, $tt]"
+ case TupleType(tpes) => p"($tpes)"
+ case FunctionType(fts, tt) => p"($fts) => $tt"
+ case adt: ADTType =>
+ p"${adt.id}${nary(adt.tps, ", ", "[", "]")}"
+
+ case RefinementType(vd, pred) =>
+ p"|{ $vd "
+ ctx.sb.append("|")
+ p"| $pred }"
+
+ case PiType(params, to) => p"Pi ($params) => $to"
+ case SigmaType(params, to) => p"Sigma ($params) => $to"
+
+ // Definitions
+ case sort: ADTSort =>
+ for (an <- sort.flags) p"""|@${an.asString(ctx.opts)}
+ |"""
+ p"type ${sort.id}"
+ if (sort.tparams.nonEmpty) p"${nary(sort.tparams, ", ", "[", "]")}"
+ p"${nary(sort.constructors, "| | ", " = ", "")}"
+
+ case cons: ADTConstructor =>
+ p"${cons.id}"
+ p"(${cons.fields})"
+
+ case fd: FunDef =>
+ for (an <- fd.flags) {
+ p"""|@${an.asString(ctx.opts)}
+ |"""
+ }
+
+ p"def ${fd.id}${nary(fd.tparams, ", ", "[", "]")}(${fd.params})"
+ p": ${fd.returnType} = "
+ p"${fd.fullBody}"
+
+ case _ => ctx.sb.append("Tree? (" + tree.getClass + ")")
+ }
+
+ protected def ppSuffix(tree: Tree)(implicit ctx: PrinterContext): Unit = {
+ if (ctx.opts.printTypes) {
+ tree match {
+ case t: Expr =>
+ p" : ${t.getType(ctx.opts.symbols.get)} ⟩"
+
+ case _ =>
+ }
+ }
+ if (ctx.opts.printPositions) {
+ tree.getPos match {
+ case op: OffsetPosition =>
+ p"@($op)"
+ case rp: RangePosition =>
+ if (rp.lineFrom == rp.lineTo) {
+ if (rp.colFrom == rp.colTo) {
+ p"@(${rp.lineFrom}:${rp.colFrom})"
+ } else {
+ p"@(${rp.lineFrom}:${rp.colFrom}-${rp.colTo})"
+ }
+ } else {
+ p"@(${rp.focusBegin}-${rp.focusEnd})"
+ }
+ case _ =>
+ p"@(?)"
+ }
+ }
+ }
+
+ protected def isSimpleExpr(e: Expr): Boolean = e match {
+ case _: Let => false
+ case _: Assume => false
+ case _ => true
+ }
+
+ protected def noBracesSub(e: Tree): Seq[Expr] = e match {
+ case Let(_, _, bd) => Seq(bd)
+ case IfExpr(_, t, e) => Seq(t, e) // if-else always has braces anyway
+ case Assume(_, bd) => Seq(bd)
+ case _ => Seq()
+ }
+
+ protected def requiresBraces(ex: Tree, within: Option[Tree]) = (ex, within) match {
+ case (e: Expr, _) if isSimpleExpr(e) => false
+ case (e: Expr, Some(within)) if noBracesSub(within) contains e => false
+ case (e: Expr, Some(_)) => true
+ case _ => false
+ }
+
+ protected def precedence(ex: Expr): Int = ex match {
+ // 0: Letters
+ case (_: ElementOfSet | _: Modulo) => 0
+ // 1: |
+ case (_: Or | _: BVOr) => 1
+ // 2: ^
+ case (_: BVXor) => 2
+ // 3: &
+ case (_: And | _: BVAnd | _: SetIntersection) => 3
+ // 4: < >
+ case (
+ _: GreaterThan | _: GreaterEquals | _: LessEquals | _: LessThan |
+ _: BVAShiftRight | _: BVLShiftRight | _: BVShiftLeft
+ ) => 4
+ // 5: = !
+ case (_: Equals | _: Not | _: Implies) => 5
+ // 6: :
+ // 7: + -
+ case (_: Plus | _: Minus | _: SetUnion | _: SetDifference | _: StringConcat) => 7
+ // 8: * / %
+ case (_: Times | _: Division | _: Remainder) => 8
+ // 9: Other special characters
+ case _ => 9
+ }
+
+ protected def requiresParentheses(ex: Tree, within: Option[Tree]): Boolean = (ex, within) match {
+ case (_, None) => false
+ case (_, Some(
+ _: Definition | _: Let | _: IfExpr | _: ADT | _: Lambda | _: Choose | _: Tuple | _: Assume
+ )) => false
+ case (ex: StringConcat, Some(_: StringConcat)) => false
+ case (_, Some(_: FunctionInvocation)) => false
+ case (ie: IfExpr, _) => true
+ case (e1: Expr, Some(e2: Expr)) if precedence(e1) > precedence(e2) => false
+ case (_, _) => true
+ }
+
+ implicit class PrintWrapper(val f: PrinterContext => Any) {
+ def print(ctx: PrinterContext) = f(ctx)
+ }
+
+ implicit class PrintingHelper(val sc: StringContext) {
+
+ def p(args: Any*)(implicit ctx: PrinterContext): Unit = {
+ val sb = ctx.sb
+
+ val strings = sc.parts.iterator
+ val expressions = args.iterator
+
+ var extraInd = 0
+ var firstElem = true
+
+ while (strings.hasNext) {
+ val currval = strings.next
+ val s = if (currval != " || ") {
+ currval.stripMargin
+ } else currval
+
+ // Compute indentation
+ val start = s.lastIndexOf('\n')
+ if (start >= 0 || firstElem) {
+ var i = start + 1
+ while (i < s.length && s(i) == ' ') {
+ i += 1
+ }
+ extraInd = (i - start - 1) / 2
+ }
+
+ firstElem = false
+
+ // Make sure new lines are also indented
+ sb.append(s.replaceAll("\n", "\n" + (" " * ctx.lvl)))
+
+ val nctx = ctx.copy(lvl = ctx.lvl + extraInd)
+
+ if (expressions.hasNext) {
+ val e = expressions.next
+
+ e match {
+ case (t1, t2) =>
+ nary(Seq(t1, t2), " -> ").print(nctx)
+
+ case ts: Seq[Any] =>
+ nary(ts).print(nctx)
+
+ case t: Tree =>
+ // Don't add same tree twice in parents
+ val parents = if (nctx.parents.headOption contains nctx.current) {
+ nctx.parents
+ } else {
+ nctx.current :: nctx.parents
+ }
+ val nctx2 = nctx.copy(parents = parents, current = t)
+ pp(t)(nctx2)
+
+ case id: Identifier =>
+ val name = if (ctx.opts.printUniqueIds) {
+ id.uniqueName
+ } else {
+ id.toString
+ }
+ p"$name"
+
+ case p: PrintWrapper =>
+ p.print(nctx)
+
+ case e =>
+ sb.append(e.toString)
+ }
+ }
+ }
+ }
+ }
+
+ def nary(ls: Seq[Any], sep: String = ", ", init: String = "", closing: String = ""): PrintWrapper = {
+ val (i, c) = if (ls.isEmpty) ("", "") else (init, closing)
+ val strs = i +: List.fill(ls.size - 1)(sep) :+ c
+
+ implicit pctx: PrinterContext =>
+ new StringContext(strs: _*).p(ls: _*)
+ }
+
+ def typed(t: Tree with Typed)(implicit s: Symbols): PrintWrapper = {
+ implicit pctx: PrinterContext =>
+ p"$t : ${t.getType}"
+ }
+
+ def typed(ts: Seq[Tree with Typed])(implicit s: Symbols): PrintWrapper = {
+ nary(ts.map(typed))
+ }
+}
+
+trait LegacyPrinter {
+ protected val trees: Trees
+ import trees._
+
+ protected def optP(body: => Any)(implicit ctx: PrinterContext) = {
+ if (requiresParentheses(ctx.current, ctx.parent)) {
+ ctx.sb.append("(")
+ body
+ ctx.sb.append(")")
+ } else {
+ body
+ }
+ }
+
+ private val dbquote = "\""
+
+ def pp(tree: Tree)(implicit ctx: PrinterContext): Unit = {
+ if (requiresBraces(tree, ctx.parent) && !ctx.parent.contains(tree)) {
+ p"""|{
+ | $tree
+ |}"""
+ } else {
+ ppPrefix(tree)
+ ppBody(tree)
+ ppSuffix(tree)
+ }
+ }
+
+ protected def ppPrefix(tree: Tree)(implicit ctx: PrinterContext): Unit = {
+ if (ctx.opts.printTypes) {
+ tree match {
+ case t: Expr =>
+ p"⟨"
+
+ case _ =>
+ }
+ }
+ }
+
protected def ppBody(tree: Tree)(implicit ctx: PrinterContext): Unit = tree match {
case Variable(id, _, _) =>
p"$id"
diff --git a/src/main/scala/inox/ast/Trees.scala b/src/main/scala/inox/ast/Trees.scala
index efb8c6c85..1ce3053bf 100644
--- a/src/main/scala/inox/ast/Trees.scala
+++ b/src/main/scala/inox/ast/Trees.scala
@@ -5,7 +5,7 @@ package ast
import scala.language.implicitConversions
-import inox.parsing.Interpolator
+import inox.parser._
trait Trees
extends Expressions
@@ -38,9 +38,9 @@ trait Trees
protected val trees: Trees.this.type = Trees.this
} with DSL
- val interpolator: Interpolator { val trees: Trees.this.type } = new {
+ val interpolator: MacrosInterpolators { val trees: Trees.this.type } = new {
protected val trees: Trees.this.type = Trees.this
- } with Interpolator
+ } with MacrosInterpolators
def aliased(id1: Identifier, id2: Identifier) = {
id1.toString == id2.toString
diff --git a/src/main/scala/inox/package.scala b/src/main/scala/inox/package.scala
index 595a7b097..af8ca7edc 100644
--- a/src/main/scala/inox/package.scala
+++ b/src/main/scala/inox/package.scala
@@ -27,7 +27,7 @@ package object inox {
/** We provide aliases to [[ast.Identifier]] and [[ast.FreshIdentifier]] here
* for a more natural import experience.
- *
+ *
* Indeed, as Inox typically follows a pattern of nesting package clauses with
* the outer-most being {{{package inox}}}, including these basic definitions
* in the default imports makes my (@nv) life easier.
diff --git a/src/main/scala/inox/parser/Elaborators.scala b/src/main/scala/inox/parser/Elaborators.scala
new file mode 100644
index 000000000..70ca93a0a
--- /dev/null
+++ b/src/main/scala/inox/parser/Elaborators.scala
@@ -0,0 +1,205 @@
+package inox
+package parser
+
+import elaboration._
+import elaborators._
+
+trait Elaborators
+ extends Trees
+ with IRs
+ with Constraints
+ with SimpleTypes
+ with SimpleBindings
+ with SimpleFunctions
+ with SimpleADTs
+ with BindingElaborators
+ with ExprElaborators
+ with TypeElaborators
+ with IdentifierElaborators
+ with FunctionElaborators
+ with ADTsElaborators
+ with ProgramElaborators
+ with NumberUtils
+ with Solvers
+ with ElaborationErrors {
+
+ type Signature = (Int, Seq[SimpleTypes.Type] => (Seq[SimpleTypes.Type], SimpleTypes.Type))
+
+ private def toSignature(function: SimpleFunctions.Function): Signature =
+ (function.typeParams.size, (actualTypes: Seq[SimpleTypes.Type]) => {
+ require(actualTypes.size == function.typeParams.size)
+
+ val replacements = function.typeParams.map(_.id).zip(actualTypes).toMap
+
+ (function.params.map(_.tpe.replaceTypeParams(replacements)), function.retTpe.replaceTypeParams(replacements))
+ })
+
+ private def toSignature(sort: SimpleADTs.Sort, constructor: SimpleADTs.Constructor): Signature = {
+ require(sort.id == constructor.sort)
+
+ (sort.typeParams.size, (actualTypes: Seq[SimpleTypes.Type]) => {
+ require(actualTypes.size == sort.typeParams.size)
+
+ val replacements = sort.typeParams.map(_.id).zip(actualTypes).toMap
+
+ (constructor.params.map(_.tpe.replaceTypeParams(replacements)), SimpleTypes.ADTType(sort.id, actualTypes))
+ })
+ }
+
+
+ case class FunctionStore(signatures: Map[inox.Identifier, Signature]) {
+
+ def addFunction(function: SimpleFunctions.Function): FunctionStore =
+ FunctionStore(signatures + (function.id -> toSignature(function)))
+ def addFunctions(functions: Seq[SimpleFunctions.Function]): FunctionStore =
+ FunctionStore(signatures ++ functions.map(f => (f.id -> toSignature(f))))
+ }
+
+ case class ADTStore(
+ sortArities: Map[inox.Identifier, Int],
+ constructors: Map[inox.Identifier, Signature],
+ fieldIdsByName: Map[String, Seq[inox.Identifier]],
+ fieldTypeByConsType: Map[Identifier, Seq[SimpleTypes.Type] => SimpleTypes.Type],
+ sortIdByFieldId: Map[inox.Identifier, inox.Identifier],
+ sortIdByConstructorId: Map[inox.Identifier, inox.Identifier]) {
+
+ def addSort(sort: SimpleADTs.Sort): ADTStore =
+ ADTStore(
+ sortArities + (sort.id -> sort.typeParams.size),
+ constructors ++ (sort.constructors.map(c => (c.id, toSignature(sort, c)))),
+ sort.constructors.flatMap(c => c.params.flatMap(f => f.name.map((_, f.id)))).foldLeft(fieldIdsByName) {
+ case (acc, (name, id)) => acc + (name -> (acc.getOrElse(name, Seq()) :+ id))
+ },
+ fieldTypeByConsType ++ (sort.constructors.flatMap(c => c.params.map(f => {
+ f.id -> { (actualTypes: Seq[SimpleTypes.Type]) =>
+ require(actualTypes.size == sort.typeParams.size)
+
+ val replacements = sort.typeParams.map(_.id).zip(actualTypes).toMap
+
+ f.tpe.replaceTypeParams(replacements)
+ }
+ }))),
+ sortIdByFieldId ++ sort.constructors.flatMap(c => c.params.map(f => (f.id -> sort.id))),
+ sortIdByConstructorId ++ sort.constructors.map(c => (c.id -> sort.id)))
+
+ def addSorts(sorts: Seq[SimpleADTs.Sort]): ADTStore =
+ sorts.foldLeft(this) {
+ case (acc, sort) => acc.addSort(sort)
+ }
+ }
+
+ case class Store(
+ symbols: trees.Symbols,
+ names: Map[String, inox.Identifier],
+ functionNames: Map[String, Seq[inox.Identifier]],
+ typeNames: Map[String, inox.Identifier],
+ variables: Map[inox.Identifier, (SimpleTypes.Type, Eventual[trees.Type])],
+ adtStore: ADTStore,
+ funStore: FunctionStore,
+ args: Seq[Any]) {
+
+ def getSymbols = symbols
+ def getExprIdentifier(name: String): Option[inox.Identifier] = names.get(name)
+ def getTypeIdentifier(name: String): Option[inox.Identifier] = typeNames.get(name)
+ def getFieldByName(name: String): Seq[(inox.Identifier, inox.Identifier)] = for {
+ fid <- adtStore.fieldIdsByName.getOrElse(name, Seq())
+ sid <- adtStore.sortIdByFieldId.get(fid)
+ } yield (sid, fid)
+ def getSortByField(identifier: Identifier): Option[inox.Identifier] =
+ adtStore.sortIdByFieldId.get(identifier)
+ def getTypeOfField(identifier: inox.Identifier): Seq[SimpleTypes.Type] => SimpleTypes.Type =
+ adtStore.fieldTypeByConsType(identifier)
+ def getVariable(identifier: inox.Identifier): Option[(SimpleTypes.Type, Eventual[trees.Type])] =
+ variables.get(identifier)
+ def getType(identifier: inox.Identifier): Option[(SimpleTypes.Type, Eventual[trees.Type])] =
+ variables.get(identifier)
+ def getTypeConstructor(identifier: inox.Identifier): Option[Int] =
+ adtStore.sortArities.get(identifier)
+ def getFunction(identifier: inox.Identifier): Option[Signature] =
+ funStore.signatures.get(identifier)
+ def getFunctions(name: String): Option[Seq[inox.Identifier]] =
+ functionNames.get(name)
+ def getConstructor(identifier: inox.Identifier): Option[Signature] =
+ adtStore.constructors.get(identifier)
+ def getSortOfConstructor(identifier: inox.Identifier): Option[inox.Identifier] =
+ adtStore.sortIdByConstructorId.get(identifier)
+ def getHole[A: Manifest](index: Int): Option[A] = {
+ if (args.size <= index) None
+ else args(index) match {
+ case x: A => Some(x)
+ case _ => None
+ }
+ }
+
+ def addBinding(binding: SimpleBindings.Binding): Store =
+ copy(
+ variables=variables + (binding.id -> (binding.tpe, binding.evTpe)),
+ names=names ++ binding.name.map(_ -> binding.id))
+ def addBindings(bindings: Seq[SimpleBindings.Binding]): Store =
+ copy(
+ variables=variables ++ bindings.map(binding => (binding.id -> (binding.tpe, binding.evTpe))),
+ names=names ++ bindings.flatMap(binding => binding.name.map(_ -> binding.id)))
+ def addTypeBinding(binding: SimpleBindings.TypeBinding): Store =
+ copy(
+ variables=variables + (binding.id -> (binding.tpe, binding.evTpe)),
+ typeNames=typeNames ++ binding.name.map(_ -> binding.id))
+ def addTypeBindings(bindings: Seq[SimpleBindings.TypeBinding]): Store =
+ copy(
+ variables=variables ++ bindings.map(binding => (binding.id -> (binding.tpe, binding.evTpe))),
+ typeNames=typeNames ++ bindings.flatMap(binding => binding.name.map(_ -> binding.id)))
+ def addFunction(function: SimpleFunctions.Function): Store =
+ copy(
+ funStore=funStore.addFunction(function),
+ functionNames=functionNames + (function.id.name -> (functionNames.getOrElse(function.id.name, Seq()) :+ function.id))
+ )
+ def addFunctions(functions: Seq[SimpleFunctions.Function]): Store =
+ copy(
+ funStore=funStore.addFunctions(functions),
+ functionNames=functions.foldLeft(functionNames)((namesMap, fun)=>
+ namesMap + (fun.id.name -> (namesMap.getOrElse(fun.id.name, Seq()) :+ fun.id))
+ ))
+ def addSort(sort: SimpleADTs.Sort): Store =
+ copy(
+ adtStore=adtStore.addSort(sort),
+ typeNames=typeNames ++ sort.optName.map((_, sort.id)),
+ names=names ++ sort.constructors.flatMap(c => c.optName.map((_, c.id))))
+ def addSorts(sorts: Seq[SimpleADTs.Sort]): Store =
+ copy(
+ adtStore=adtStore.addSorts(sorts),
+ typeNames=typeNames ++ sorts.flatMap(sort => sort.optName.map((_, sort.id))),
+ names=names ++ sorts.flatMap(sort => sort.constructors.flatMap(c => c.optName.map((_, c.id)))))
+ }
+
+ def createStore(symbols: trees.Symbols, args: Seq[Any]): Store = {
+
+ val adtStore: ADTStore = ADTStore(Map(), Map(), Map(), Map(), Map(), Map())
+
+ val funStore: FunctionStore = FunctionStore(Map())
+
+ Store(symbols, Map(), Map(), Map(), Map(), adtStore, funStore, args)
+ .addFunctions(symbols.functions.values.flatMap(SimpleFunctions.fromInox(_)).toSeq)
+ .addSorts(symbols.sorts.values.flatMap(SimpleADTs.fromInox(_)).toSeq)
+ }
+
+ trait Elaborator[-A, +R] {
+ def elaborate(template: A)(implicit store: Store): Constrained[R]
+ }
+
+ abstract class HSeqE[-A <: IR, H: Manifest, +R](underlying: String) extends Elaborator[HSeq[A], Seq[R]] {
+ val elaborator: Elaborator[A, R]
+
+ def wrap(value: H, where: IR)(implicit store: Store): Constrained[R]
+
+ override def elaborate(template: HSeq[A])(implicit store: Store): Constrained[Seq[R]] = {
+ val elems = template.elems
+
+ Constrained.sequence(elems.map {
+ case Left(r) => store.getHole[Seq[H]](r.index) match {
+ case None => Constrained.fail(invalidHoleType("Seq[" + underlying + "]")(r.pos))
+ case Some(xs) => Constrained.sequence(xs.map(wrap(_, r)))
+ }
+ case Right(t) => elaborator.elaborate(t).map(Seq(_))
+ }).map(_.flatten)
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/Errors.scala b/src/main/scala/inox/parser/Errors.scala
new file mode 100644
index 000000000..c6aef89e0
--- /dev/null
+++ b/src/main/scala/inox/parser/Errors.scala
@@ -0,0 +1,152 @@
+package inox
+package parser
+
+import scala.util.parsing.input._
+
+trait Errors {
+ def withPosition(error: String): Position => String = {
+ case NoPosition => error
+ case pos => error + "\n" + pos.longString
+ }
+
+ def withPositions(error: String): Seq[Position] => String =
+ (positions: Seq[Position]) => error + positions.filter(_ != NoPosition).map("\n" + _.longString).mkString("")
+
+ def unsupportedHoleTypeForElaboration(tpe: String): Position => String =
+ withPosition("Holes of type " + tpe + " are not supported for elaboration.")
+}
+
+trait ElaborationErrors extends Errors { self: Elaborators =>
+
+ /* Elaboration errors: */
+
+ import TypeClasses._
+ import SimpleTypes._
+
+ def invalidHoleType(tpe: String): Position => String =
+ withPosition("Invalid argument passed to hole. Expected a value of type " + tpe + ".")
+
+ def invalidInoxType(tpe: trees.Type): Position => String =
+ withPosition("Invalid Type " + tpe + ".")
+
+ def noTypeInScope(name: String): Position => String =
+ withPosition("No type named " + name + " is available in scope.")
+
+ def noExprInScope(name: String): Position => String =
+ withPosition("No expression named " + name + " is available in scope.")
+
+ def typeConstructorUsedAsTypeVariable(name: String): Position => String =
+ withPosition(name + " is a type constructor, not a type.")
+
+ def typeVariableUsedAsTypeConstructor(name: String): Position => String =
+ withPosition(name + " is a type, not a type constructor.")
+
+ def wrongNumberOfArguments(callee: String, expected: Int, actual: Int): Position => String =
+ withPosition("Wrong number of arguments for " + callee + ", expected " + expected + ", got " + actual + ".")
+
+ def wrongNumberOfTypeArguments(callee: String, expected: Int, actual: Int): Position => String =
+ withPosition("Wrong number of type arguments for " + callee + ", expected " + expected + ", got " + actual + ".")
+
+ def invalidInoxValDef(vd: trees.ValDef): Position => String =
+ withPosition("Invalid ValDef " + vd + ".")
+
+ def functionUsedAsVariable(name: String): Position => String =
+ withPosition(name + " is a function or a constructor, not a variable.")
+
+ def identifierNotCallable(name: String): Position => String =
+ withPosition(name + " is not callable.")
+
+ def functionValuesCanNotHaveTypeParameters(name: String): Position => String =
+ withPosition(name + " is a function value and therefore can not accept type parameters.")
+
+ def identifierNotConstructor(name: String): Position => String =
+ withPosition(name + " is not a constructor.")
+
+ def invalidInoxExpr(expr: trees.Expr): Position => String =
+ withPosition("Invalid Expr " + expr + ".")
+
+ def noFieldNamed(name: String): Position => String =
+ withPosition(name + " is not a known field.")
+
+ def invalidADTConstructor(c: trees.ADTConstructor): Position => String =
+ withPosition(c + " is not a valid ADTConstructor.")
+
+ def unificationImpossible(tpe1: SimpleTypes.Type, tpe2: SimpleTypes.Type): Seq[Position] => String =
+ withPositions("The type " + typeName(tpe1) + " can not be unified with the type " + typeName(tpe2) + ".")
+
+ val ambiguousTypes: Seq[Position] => String =
+ withPositions("The following positions have ambiguous types.")
+
+ val fieldsNotDistincts: Position => String =
+ withPosition("The fields of the various constructors should have unique names.")
+
+ def incompatibleTypeClasses(tc1: TypeClass, tc2: TypeClass): Seq[Position] => String = (tc1, tc2) match {
+ case (WithFields(fs1, _), WithFields(fs2, _)) => withPositions("No existing class has all the following fields: " + (fs1 union fs2).toSeq.mkString(", ") + ".")
+ case (WithFields(_, _), _) => withPositions("Classes can not be " + typeClassName(tc2) + ".")
+ case (_, WithFields(_, _)) => withPositions("Classes can not be " + typeClassName(tc1) + ".")
+ case (WithIndices(_), _) => withPositions("Tuples can not be " + typeClassName(tc2) + ".")
+ case (_, WithIndices(_)) => withPositions("Tuples can not be " + typeClassName(tc1) + ".")
+ case (Bits(_, _), Bits(_, _)) => withPositions("Incompatible bit vector types.")
+ case _ => withPositions("Incompatible kind of types: " + typeClassName(tc1) + " and " + typeClassName(tc2) + ".")
+ }
+
+ def notMemberOfTypeClasses(tpe: Type, tc: TypeClass): Seq[Position] => String =
+ withPositions("Values of type " + typeName(tpe) + " are not " + typeClassName(tc) + ".")
+
+ def typeClassName(tc: TypeClass): String = tc match {
+ case WithFields(fs1, _) => "classes with fields " + fs1.toSeq.mkString(", ")
+ case WithIndices(_) => "tuples"
+ case Bits(true, _) => "signed bit vectors"
+ case Bits(false, _) => "unsigned bit vectors"
+ case Integral => "integral"
+ case Numeric => "numeric"
+ case Comparable => "comparable"
+ }
+
+ def typeName(tpe: Type): String = tpe match {
+ case UnitType() => "Unit"
+ case BooleanType() => "Boolean"
+ case BitVectorType(true, s) => "Int" + s.toString
+ case BitVectorType(false, s) => "UInt" + s.toString
+ case IntegerType() => "BigInt"
+ case StringType() => "String"
+ case CharType() => "Char"
+ case RealType() => "Real"
+ case MapType(f, t) => "Map[" + typeName(f) + ", " + typeName(t) + "]"
+ case SetType(t) => "Set[" + typeName(t) + "]"
+ case BagType(t) => "Bag[" + typeName(t) + "]"
+ case ADTType(i, tpes) => i.name + "[" + tpes.map(typeName(_)).mkString(", ") + "]"
+ case TypeParameter(i) => i.name
+ case _ => "Unknown"
+ }
+
+ /* Misc: */
+
+ val filterError: String =
+ "Filter error."
+}
+
+trait ParsingErrors extends Errors { self: IRs =>
+
+ /* Parsing errors: */
+
+ def expected(string: String): Position => String =
+ withPosition("Expected " + string + ".")
+
+ def expectedString(string: String): Position => String =
+ expected("\"" + string + "\"")
+
+ def expectedOneOf(strings: String*): Position => String = {
+ assert(strings.size >= 1)
+
+ if (strings.size == 1) {
+ expectedString(strings.head)
+ }
+ else {
+ withPosition("Expected either " + strings.init.mkString(", ") + " or " + strings.last + ".")
+ }
+ }
+
+ def expectedOneOfStrings(strings: String*): Position => String =
+ expectedOneOf(strings.map(x => "\"" + x + "\""): _*)
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/Exceptions.scala b/src/main/scala/inox/parser/Exceptions.scala
new file mode 100644
index 000000000..193614653
--- /dev/null
+++ b/src/main/scala/inox/parser/Exceptions.scala
@@ -0,0 +1,4 @@
+package inox
+package parser
+
+case class InterpolatorException(message: String) extends Exception(message)
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/Extractors.scala b/src/main/scala/inox/parser/Extractors.scala
new file mode 100644
index 000000000..eef41cabf
--- /dev/null
+++ b/src/main/scala/inox/parser/Extractors.scala
@@ -0,0 +1,62 @@
+package inox
+package parser
+
+import extraction._
+import extractors._
+
+trait Extractors
+ extends Trees
+ with IRs
+ with Matchings
+ with IdentifierExtractors
+ with TypeExtractors
+ with BindingExtractors
+ with ExprExtractors
+ with FunctionExtractors
+ with ADTsExtractors
+ with NumberUtils {
+
+ trait Extractor[-A, -B, +R] {
+ def extract(template: A, scrutinee: B): Matching[R]
+ }
+
+ class HSeqX[-A <: IR, -B, +R](extractor: Extractor[A, B, R], default: R) extends Extractor[HSeq[A], Seq[B], Seq[R]] {
+ override def extract(template: HSeq[A], scrutinee: Seq[B]): Matching[Seq[R]] = {
+
+ val elems = template.elems
+ val minSize = elems.count(_.isRight)
+ val isRigid = minSize == elems.size
+ if (scrutinee.size < minSize || (isRigid && scrutinee.size != elems.size)) {
+ Matching.fail
+ }
+ else {
+ val (prefix, suffix) = elems.span(_.isRight)
+ val (prefixParts, suffixParts) = scrutinee.splitAt(prefix.size)
+
+ val prefixMatchings = prefix.zip(prefixParts).map {
+ case (elem, part) => extractor.extract(elem.right.get, part)
+ }
+
+ val matchings = if (suffix.isEmpty) {
+ prefixMatchings
+ }
+ else {
+ val firstIndex = suffix.head.left.get.index
+ val rest = suffix.tail
+
+ val (firstParts, restParts) = suffixParts.splitAt(scrutinee.size - minSize)
+
+ val (restMatchings, Seq()) = rest.foldLeft((Seq[Matching[R]](), restParts)) {
+ case ((acc, rest), Left(r)) => (acc :+ Matching(r.index -> Seq()).withValue(default), rest)
+ case ((acc, rest), Right(elem)) => (acc :+ extractor.extract(elem, rest.head), rest.tail)
+ }
+
+ prefixMatchings ++ (Matching(firstIndex -> firstParts).withValue(default) +: restMatchings)
+ }
+
+
+ Matching.sequence(matchings)
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/IRs.scala b/src/main/scala/inox/parser/IRs.scala
new file mode 100644
index 000000000..a4eb9f3ce
--- /dev/null
+++ b/src/main/scala/inox/parser/IRs.scala
@@ -0,0 +1,61 @@
+package inox
+package parser
+
+import scala.util.parsing.input.Positional
+
+import irs._
+
+trait IRs
+ extends Exprs
+ with Identifiers
+ with Bindings
+ with Types
+ with Functions
+ with ADTs
+ with Programs {
+
+ type ErrorMessage = String
+
+ trait HoleType
+ object HoleTypes {
+ case object Identifier extends HoleType
+ case object Type extends HoleType
+ case object Expr extends HoleType
+ case object ValDef extends HoleType
+ case object Constructor extends HoleType
+ case class Pair(lhs: HoleType, rhs: HoleType) extends HoleType
+ case class Sequence(inner: HoleType) extends HoleType
+ }
+
+ case class Hole(index: Int, holeType: HoleType)
+
+ trait IR extends Positional with Product {
+ def getHoles: Seq[Hole]
+
+ override def toString: String = {
+ productPrefix + "(" + productIterator.map(_.toString).mkString(",") + ")@" + pos.toString
+ }
+ }
+
+ trait HoleTypable[-A <: IR] {
+ val holeType: HoleType
+ }
+
+ case class RepHole[+A <: IR : HoleTypable](index: Int) extends IR {
+ override def getHoles = Seq(Hole(index, HoleTypes.Sequence(implicitly[HoleTypable[A]].holeType)))
+ }
+
+ case class HSeq[+A <: IR : HoleTypable](elems: Seq[Either[RepHole[A], A]]) extends IR {
+
+ def size = elems.size
+
+ override def getHoles: Seq[Hole] = elems.flatMap {
+ case Left(r) => r.getHoles
+ case Right(x) => x.getHoles
+ }
+ }
+
+ object HSeq {
+ def fromSeq[A <: IR : HoleTypable](xs: Seq[A]): HSeq[A] = HSeq(xs.map(Right(_)))
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/Interpolators.scala b/src/main/scala/inox/parser/Interpolators.scala
new file mode 100644
index 000000000..484e96f36
--- /dev/null
+++ b/src/main/scala/inox/parser/Interpolators.scala
@@ -0,0 +1,216 @@
+package inox
+package parser
+
+import scala.reflect.macros.whitebox.Context
+
+import scala.language.experimental.macros
+import scala.language.implicitConversions
+
+trait Interpolators extends Trees
+
+trait MacrosInterpolators extends Interpolators { self =>
+
+ import trees._
+
+ object Factory
+ extends Elaborators
+ with Extractors {
+ override val trees: self.trees.type = self.trees
+ }
+
+ class Interpolator(sc: StringContext)(implicit val symbols: trees.Symbols) {
+
+ class TypeExtractor {
+ def apply(args: Any*): Type = macro Macros.t_apply
+ def unapply(arg: Type): Option[Any] = macro Macros.t_unapply
+ }
+
+ val t = new TypeExtractor
+
+ class ExprExtractor {
+ def apply(args: Any*): Expr = macro Macros.e_apply
+ def unapply(arg: Expr): Option[Any] = macro Macros.e_unapply
+ }
+
+ val e = new ExprExtractor
+
+ class ValDefExtractor {
+ def apply(args: Any*): ValDef = macro Macros.vd_apply
+ def unapply(arg: ValDef): Option[Any] = macro Macros.vd_unapply
+ }
+
+ val vd = new ValDefExtractor
+
+ class FunDefExtractor {
+ def apply(args: Any*): FunDef = macro Macros.fd_apply
+ def unapply(arg: FunDef): Option[Any] = macro Macros.fd_unapply
+ }
+
+ val fd = new FunDefExtractor
+
+ class TypeDefExtractor {
+ def apply(args: Any*): ADTSort = macro Macros.td_apply
+ def unapply(arg: ADTSort): Option[Any] = macro Macros.td_unapply
+ }
+
+ val td = new TypeDefExtractor
+
+ class ProgramExtractor {
+ def apply(args: Any*): Seq[Definition] = macro Macros.p_apply
+ }
+
+ val p = new ProgramExtractor
+ }
+
+ implicit def Interpolator(sc: StringContext)(implicit symbols: trees.Symbols = trees.NoSymbols): Interpolator = new Interpolator(sc)
+}
+
+trait RunTimeInterpolators
+ extends Interpolators
+ with Elaborators
+ with Extractors
+ with Parsers {
+
+ import trees._
+
+ implicit class Interpolator(sc: StringContext)(implicit symbols: trees.Symbols = trees.NoSymbols) {
+
+ object e {
+ def apply(args: Any*): Expr = {
+ parseSC(sc)(phrase(exprParser)) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(ir) => ExprE.elaborate(ir)(createStore(symbols, args)).get match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(((_, ev), cs)) => solve(cs) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(u) => ev.get(u)
+ }
+ }
+ }
+ }
+
+ def unapplySeq(arg: Expr): Option[Seq[Any]] = {
+ parseSC(sc)(phrase(exprParser)) match {
+ case Left(err) => None
+ case Right(ir) => ExprX.extract(ir, arg).getMatches(symbols) match {
+ case None => None
+ case Some(mapping) => Some(Seq.tabulate(mapping.size) { i => mapping(i) })
+ }
+ }
+ }
+ }
+
+ object t {
+ def apply(args: Any*): Type = {
+ parseSC(sc)(phrase(typeParser)) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(ir) => TypeE.elaborate(ir)(createStore(symbols, args)).get match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(((_, ev), cs)) => solve(cs) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(u) => ev.get(u)
+ }
+ }
+ }
+ }
+
+ def unapplySeq(arg: Type): Option[Seq[Any]] = {
+ parseSC(sc)(phrase(typeParser)) match {
+ case Left(err) => None
+ case Right(ir) => TypeX.extract(ir, arg).getMatches(symbols) match {
+ case None => None
+ case Some(mapping) => Some(Seq.tabulate(mapping.size) { i => mapping(i) })
+ }
+ }
+ }
+ }
+
+ object vd {
+ def apply(args: Any*): ValDef = {
+ parseSC(sc)(phrase(bindingParser(explicitOnly=true))) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(ir) => BindingE.elaborate(ir)(createStore(symbols, args)).get match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right((sb, cs)) => solve(cs) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(u) => sb.evValDef.get(u)
+ }
+ }
+ }
+ }
+
+ def unapplySeq(arg: ValDef): Option[Seq[Any]] = {
+ parseSC(sc)(phrase(bindingParser(explicitOnly=false))) match {
+ case Left(err) => None
+ case Right(ir) => BindingX.extract(ir, arg).getMatches(symbols) match {
+ case None => None
+ case Some(mapping) => Some(Seq.tabulate(mapping.size) { i => mapping(i) })
+ }
+ }
+ }
+ }
+
+ object fd {
+ def apply(args: Any*): FunDef = {
+ parseSC(sc)(phrase(functionDefinitionParser)) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(ir) => SingleFunctionE.elaborate(ir)(createStore(symbols, args)).get match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right((ev, cs)) => solve(cs) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(u) => ev.get(u)
+ }
+ }
+ }
+ }
+
+ def unapplySeq(arg: FunDef): Option[Seq[Any]] = {
+ parseSC(sc)(phrase(functionDefinitionParser)) match {
+ case Left(err) => None
+ case Right(ir) => FunctionX.extract(ir, arg).getMatches(symbols) match {
+ case None => None
+ case Some(mapping) => Some(Seq.tabulate(mapping.size) { i => mapping(i) })
+ }
+ }
+ }
+ }
+
+ object td {
+ def apply(args: Any*): ADTSort = {
+ parseSC(sc)(phrase(adtDefinitionParser)) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(ir) => SortE.elaborate(ir)(createStore(symbols, args)).get match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(((_, ev), cs)) => solve(cs) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(u) => ev.get(u)
+ }
+ }
+ }
+ }
+
+ def unapplySeq(arg: ADTSort): Option[Seq[Any]] = {
+ parseSC(sc)(phrase(adtDefinitionParser)) match {
+ case Left(err) => None
+ case Right(ir) => SortX.extract(ir, arg).getMatches(symbols) match {
+ case None => None
+ case Some(mapping) => Some(Seq.tabulate(mapping.size) { i => mapping(i) })
+ }
+ }
+ }
+ }
+
+ def p(args: Any*): Seq[Definition] = {
+ parseSC(sc)(phrase(programParser)) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(ir) => ProgramE.elaborate(ir)(createStore(symbols, args)).get match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right((evs, cs)) => solve(cs) match {
+ case Left(err) => throw new InterpolatorException(err)
+ case Right(u) => evs.map(_.get(u))
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/src/main/scala/inox/parsing/Lexer.scala b/src/main/scala/inox/parser/Lexer.scala
similarity index 62%
rename from src/main/scala/inox/parsing/Lexer.scala
rename to src/main/scala/inox/parser/Lexer.scala
index ddfe7937c..4c5a7c8a6 100644
--- a/src/main/scala/inox/parsing/Lexer.scala
+++ b/src/main/scala/inox/parser/Lexer.scala
@@ -1,37 +1,38 @@
/* Copyright 2017 EPFL, Lausanne */
package inox
-package parsing
+package parser
import scala.util.parsing.combinator.lexical._
import scala.util.parsing.combinator.syntactical._
import scala.util.parsing.combinator.token._
-import inox.InoxProgram
+import inox.parser.sc.StringContextLexer
-trait Lexers {
- object Lexer extends StdLexical with StringContextLexer {
+trait Lexers extends Operators {
- reserved ++= Seq("true", "false", "if", "else", "exists", "forall", "lambda", "choose", "let", "in", "assume", "def", "type")
-
- val unaryOps: Seq[String] = Operators.unaries
- val opTable: Seq[Level] = Operators.binaries
- val operators = (opTable.flatMap(_.ops) ++ unaryOps).distinct
+ class InoxLexer extends StdLexical with StringContextLexer {
case class CharLit(char: Char) extends Token { def chars = char.toString }
case class DecimalLit(whole: String, trailing: String, repeating: String) extends Token { def chars = whole + "." + trailing + "(" + repeating + ")" }
case class Parenthesis(parenthesis: Char) extends Token { def chars = parenthesis.toString }
case class Punctuation(punctuation: Char) extends Token { def chars = punctuation.toString }
- case class Quantifier(quantifier: String) extends Token { def chars = quantifier }
case class Operator(operator: String) extends Token { def chars = operator }
- case class Embedded(value: Any) extends Token { def chars = value.toString }
case class Hole(pos: Int) extends Token { def chars = "$" + pos }
+ case class Primitive(name: String) extends Token { def chars = name }
+
+ reserved ++= Seq("true", "false", "if", "else", "forall", "lambda", "choose", "let", "assume", "def", "type", "is", "as", "Pi", "Sigma")
+
+ val operators = (binaries.flatMap(_.ops) ++ unaries).distinct
override def token: Parser[Token] =
- char | number | operator | keywords | punctuation | parens | quantifier | super.token
+ char | number | priorityKeywords | operator | keywords | punctuation | parens | super.token
- val keywords =
+ val priorityKeywords =
+ acceptSeq("->") ^^^ Keyword("->")
+
+ val keywords =
acceptSeq("@") ^^^ Keyword("@") |
acceptSeq("=>") ^^^ Keyword("=>") |
acceptSeq("...") ^^^ Keyword("...") |
@@ -42,19 +43,30 @@ trait Lexers {
acceptSeq("if") <~ not(identChar | digit) ^^^ Keyword("if") |
acceptSeq("else") <~ not(identChar | digit) ^^^ Keyword("else") |
acceptSeq("let") <~ not(identChar | digit) ^^^ Keyword("let") |
- acceptSeq("in") <~ not(identChar | digit) ^^^ Keyword("in") |
acceptSeq("assume") <~ not(identChar | digit) ^^^ Keyword("assume") |
acceptSeq("=") ^^^ Keyword("=") |
acceptSeq("def") ^^^ Keyword("def") |
- acceptSeq("type") ^^^ Keyword("type")
+ acceptSeq("type") ^^^ Keyword("type") |
+ acceptSeq("is") ^^^ Keyword("is") |
+ acceptSeq("as") ^^^ Keyword("as") |
+ acceptSeq("choose") ^^^ Keyword("choose") |
+ acceptSeq("lambda") ^^^ Keyword("lambda") |
+ acceptSeq("forall") ^^^ Keyword("forall") |
+ '∀' ^^^ Keyword("forall") |
+ 'λ' ^^^ Keyword("lambda") |
+ acceptSeq("Pi") ^^^ Keyword("Pi") |
+ acceptSeq("Sigma") ^^^ Keyword("Sigma") |
+ 'Π' ^^^ Keyword("Pi") |
+ 'Σ' ^^^ Keyword("Sigma")
val comma: Parser[Token] = ',' ^^^ Punctuation(',')
val dot: Parser[Token] = '.' ^^^ Punctuation('.')
val colon: Parser[Token] = ':' ^^^ Punctuation(':')
- val punctuation: Parser[Token] = comma | dot | colon
+ val semicolon: Parser[Token] = ';' ^^^ Punctuation(';')
+ val punctuation: Parser[Token] = comma | dot | colon | semicolon
val number = opt('-') ~ rep1(digit) ~ opt('.' ~> afterDot) ^^ {
- case s ~ ds ~ None => NumericLit(s.map(x => "-").getOrElse("") + ds.mkString)
+ case s ~ ds ~ None => NumericLit(s.map(x => "-").getOrElse("") + ds.mkString)
case s ~ ds ~ Some((ts, rs)) => DecimalLit(s.map(x => "-").getOrElse("") + ds.mkString, ts, rs.getOrElse(""))
}
@@ -66,16 +78,6 @@ trait Lexers {
CharLit(_)
}
- val quantifier: Parser[Token] =
- '∀' ^^^ Quantifier("forall") |
- '∃' ^^^ Quantifier("exists") |
- 'λ' ^^^ Quantifier("lambda") |
- '\\' ^^^ Quantifier("lambda") |
- acceptSeq("forall") ^^^ Quantifier("forall") |
- acceptSeq("exists") ^^^ Quantifier("exists") |
- acceptSeq("lambda") ^^^ Quantifier("lambda") |
- acceptSeq("choose") ^^^ Quantifier("choose")
-
val operator: Parser[Token] =
operators.sortBy(-_.length).map(acceptSeq(_)).reduce(_ | _) ^^ { (xs: List[Char]) =>
@@ -86,9 +88,6 @@ trait Lexers {
case c @ ('[' | ']' | '(' | ')' | '{' | '}') => Parenthesis(c)
})
- override def argToToken(x: Any): Token = x match {
- case MatchPosition(i) => Hole(i)
- case _ => Embedded(x)
- }
+ override def toHole(index: Int): Token = Hole(index)
}
}
diff --git a/src/main/scala/inox/parser/Macros.scala b/src/main/scala/inox/parser/Macros.scala
new file mode 100644
index 000000000..f2f677b74
--- /dev/null
+++ b/src/main/scala/inox/parser/Macros.scala
@@ -0,0 +1,736 @@
+package inox
+package parser
+
+import scala.util.parsing._
+import scala.reflect.macros.whitebox.Context
+import scala.language.experimental.macros
+
+import inox.parser.sc._
+
+class Macros(final val c: Context) extends Parsers with IRs with Errors {
+ import c.universe.{Type => _, Function => _, Expr => _, If => _, _}
+
+ private val self = {
+ val Select(self, _) = c.prefix.tree
+ self
+ }
+
+ private def getString(expr: c.Tree): String = expr match {
+ case Literal(Constant(s : String)) => s
+ }
+
+ private val (pckg, sc) = self match {
+ case Block(ValDef(_, _, _, Apply(_, ls)) :: _, Apply(Apply(Select(pckg, _), _), _)) => {
+ // TODO: Should we issue a warning ?
+ // c.warning(c.enclosingPosition, "No implicit Symbols in scope. Using NoSymbols by default.")
+ (pckg, StringContext(ls.map(getString): _*)) // In case of default symbols.
+ }
+ case Apply(Apply(Select(pckg, _), Apply(_, ls) :: _), _) => (pckg, StringContext(ls.map(getString): _*)) // In case of implicit symbols.
+ case _ => c.abort(c.enclosingPosition, "Unexpected macro use.")
+ }
+
+ private lazy val longTargetTrees = q"$interpolator.trees"
+ protected lazy val interpolator: c.Tree = q"$pckg.Factory"
+ protected lazy val targetTrees: c.Tree = pckg match {
+ case Select(candidateTrees, _) => {
+
+ val longTargetTreesType = c.typecheck(longTargetTrees).tpe
+ val candidateTreesType = c.typecheck(candidateTrees).tpe
+
+ if (longTargetTreesType =:= candidateTreesType) {
+ candidateTrees
+ }
+ else {
+ longTargetTrees
+ }
+ }
+ case _ => longTargetTrees
+ }
+
+
+ import Identifiers._
+ import Bindings._
+ import ADTs._
+ import Functions._
+ import Types._
+ import Exprs._
+ import Programs._
+
+ implicit lazy val stringContextLiftable: Liftable[StringContext] = Liftable[StringContext] {
+ case sc =>
+ q"_root_.scala.StringContext(..${sc.parts})"
+ }
+
+ implicit lazy val positionLiftable: Liftable[input.Position] = Liftable[input.Position] {
+ case input.NoPosition =>
+ q"_root_.scala.util.parsing.input.NoPosition"
+ case input.OffsetPosition(source, offset) =>
+ q"_root_.scala.util.parsing.input.OffsetPosition(${source.toString}, $offset)"
+ case InArgumentPosition(arg, context) =>
+ q"_root_.inox.parser.sc.InArgumentPosition($arg, $context)"
+ case InPartPosition(part, context, partLine, partColumn) =>
+ q"_root_.inox.parser.sc.InPartPosition($part, $context, $partLine, $partColumn)"
+ }
+
+ implicit lazy val bigIntLiftable: Liftable[BigInt] = Liftable[BigInt] {
+ case n => q"_root_.scala.math.BigInt(${n.toString})"
+ }
+
+ implicit def repHoleLiftable[A <: IR : TypeTag] = Liftable[RepHole[A]] {
+ case ir@RepHole(index) => {
+ val tpe = typeOf[A] match {
+ case TypeRef(SingleType(_, o), t, _) => c.typecheck(tq"$interpolator.$o.$t", c.TYPEmode).tpe
+ }
+
+ q"$interpolator.RepHole[$tpe]($index).setPos(${ir.pos})"
+ }
+ }
+
+ implicit def hseqLiftable[A <: IR : TypeTag](implicit ev: Liftable[A]) = Liftable[HSeq[A]] {
+ case ir@HSeq(es) => {
+ val tpe = typeOf[A] match {
+ case TypeRef(SingleType(_, o), t, _) => c.typecheck(tq"$interpolator.$o.$t", c.TYPEmode).tpe
+ }
+ val elems: Seq[Either[RepHole[A], A]] = es
+
+ q"$interpolator.HSeq[$tpe](_root_.scala.collection.Seq(..$elems)).setPos(${ir.pos})"
+ }
+ }
+
+ implicit lazy val identifiersLiftable: Liftable[Identifier] = Liftable[Identifier] {
+ case ir@IdentifierName(name) =>
+ q"$interpolator.Identifiers.IdentifierName($name).setPos(${ir.pos})"
+ case ir@IdentifierHole(index) =>
+ q"$interpolator.Identifiers.IdentifierHole($index).setPos(${ir.pos})"
+ }
+
+ implicit lazy val bindingsLiftable: Liftable[Binding] = Liftable[Binding] {
+ case ir@InferredValDef(id) =>
+ q"$interpolator.Bindings.InferredValDef($id).setPos(${ir.pos})"
+ case ir@ExplicitValDef(id, tpe) =>
+ q"$interpolator.Bindings.ExplicitValDef($id, $tpe).setPos(${ir.pos})"
+ case ir@BindingHole(index) =>
+ q"$interpolator.Bindings.BindingHole($index).setPos(${ir.pos})"
+ }
+
+ implicit lazy val sortsLiftable: Liftable[Sort] = Liftable[Sort] {
+ case ir@Sort(id, tps, cs) =>
+ q"$interpolator.ADTs.Sort($id, $tps, $cs).setPos(${ir.pos})"
+ }
+
+ implicit lazy val constructorsLiftable: Liftable[Constructor] = Liftable[Constructor] {
+ case ir@ConstructorValue(id, ps) =>
+ q"$interpolator.ADTs.ConstructorValue($id, $ps).setPos(${ir.pos})"
+ case ir@ConstructorHole(index) =>
+ q"$interpolator.ADTs.ConstructorHole($index).setPos(${ir.pos})"
+ }
+
+ implicit lazy val functionsLiftable: Liftable[Function] = Liftable[Function] {
+ case ir@Function(id, tps, ps, rt, b) =>
+ q"$interpolator.Functions.Function($id, $tps, $ps, $rt, $b).setPos(${ir.pos})"
+ }
+
+ implicit lazy val typesLiftable: Liftable[Type] = Liftable[Type] {
+ case ir@TypeHole(index) =>
+ q"$interpolator.Types.TypeHole($index).setPos(${ir.pos})"
+ case ir@Types.Primitive(prim) =>
+ q"$interpolator.Types.Primitive($prim).setPos(${ir.pos})"
+ case ir@Operation(op, elems) =>
+ q"$interpolator.Types.Operation($op, $elems).setPos(${ir.pos})"
+ case ir@FunctionType(froms, to) =>
+ q"$interpolator.Types.FunctionType($froms, $to).setPos(${ir.pos})"
+ case ir@TupleType(elems) =>
+ q"$interpolator.Types.TupleType($elems).setPos(${ir.pos})"
+ case ir@Types.Invocation(id, args) =>
+ q"$interpolator.Types.Invocation($id, $args).setPos(${ir.pos})"
+ case ir@Types.Variable(id) =>
+ q"$interpolator.Types.Variable($id).setPos(${ir.pos})"
+ case ir@RefinementType(b, p) =>
+ q"$interpolator.Types.RefinementType($b, $p).setPos(${ir.pos})"
+ case ir@PiType(bs, to) =>
+ q"$interpolator.Types.PiType($bs, $to).setPos(${ir.pos})"
+ case ir@SigmaType(bs, to) =>
+ q"$interpolator.Types.SigmaType($bs, $to).setPos(${ir.pos})"
+ }
+
+ implicit lazy val typePrimitivesLiftable: Liftable[Types.Primitives.Type] = Liftable[Types.Primitives.Type] {
+ case Primitives.BVType(signed, size) =>
+ q"$interpolator.Types.Primitives.BVType($signed, $size)"
+ case Primitives.IntegerType =>
+ q"$interpolator.Types.Primitives.IntegerType"
+ case Primitives.StringType =>
+ q"$interpolator.Types.Primitives.StringType"
+ case Primitives.CharType =>
+ q"$interpolator.Types.Primitives.CharType"
+ case Primitives.BooleanType =>
+ q"$interpolator.Types.Primitives.BooleanType"
+ case Primitives.UnitType =>
+ q"$interpolator.Types.Primitives.UnitType"
+ case Primitives.RealType =>
+ q"$interpolator.Types.Primitives.RealType"
+ }
+
+ implicit lazy val typeOperatorsLiftable: Liftable[Types.Operators.Operator] = Liftable[Types.Operators.Operator] {
+ case Operators.Set =>
+ q"$interpolator.Types.Operators.Set"
+ case Operators.Map =>
+ q"$interpolator.Types.Operators.Map"
+ case Operators.Bag =>
+ q"$interpolator.Types.Operators.Bag"
+ }
+
+ implicit lazy val exprsLiftable: Liftable[Expr] = Liftable[Expr] {
+ case ir@ExprHole(index) =>
+ q"$interpolator.Exprs.ExprHole($index).setPos(${ir.pos})"
+ case ir@UnitLiteral() =>
+ q"$interpolator.Exprs.UnitLiteral().setPos(${ir.pos})"
+ case ir@BooleanLiteral(value) =>
+ q"$interpolator.Exprs.BooleanLiteral($value).setPos(${ir.pos})"
+ case ir@IntegerLiteral(value) =>
+ q"$interpolator.Exprs.IntegerLiteral($value).setPos(${ir.pos})"
+ case ir@FractionLiteral(num, denom) =>
+ q"$interpolator.Exprs.FractionLiteral($num, $denom).setPos(${ir.pos})"
+ case ir@StringLiteral(value) =>
+ q"$interpolator.Exprs.StringLiteral($value).setPos(${ir.pos})"
+ case ir@CharLiteral(value) =>
+ q"$interpolator.Exprs.CharLiteral($value).setPos(${ir.pos})"
+ case ir@SetConstruction(t, es) =>
+ q"$interpolator.Exprs.SetConstruction($t, $es).setPos(${ir.pos})"
+ case ir@BagConstruction(t, ps) =>
+ q"$interpolator.Exprs.BagConstruction($t, $ps).setPos(${ir.pos})"
+ case ir@MapConstruction(ts, ps, d) =>
+ q"$interpolator.Exprs.MapConstruction($ts, $ps, $d).setPos(${ir.pos})"
+ case ir@Exprs.Variable(id) =>
+ q"$interpolator.Exprs.Variable($id).setPos(${ir.pos})"
+ case ir@UnaryOperation(op, expr) =>
+ q"$interpolator.Exprs.UnaryOperation($op, $expr).setPos(${ir.pos})"
+ case ir@BinaryOperation(op, lhs, rhs) =>
+ q"$interpolator.Exprs.BinaryOperation($op, $lhs, $rhs).setPos(${ir.pos})"
+ case ir@NaryOperation(op, args) =>
+ q"$interpolator.Exprs.NaryOperation($op, $args).setPos(${ir.pos})"
+ case ir@Exprs.Invocation(id, tps, args) =>
+ q"$interpolator.Exprs.Invocation($id, $tps, $args).setPos(${ir.pos})"
+ case ir@PrimitiveInvocation(id, tps, args) =>
+ q"$interpolator.Exprs.PrimitiveInvocation($id, $tps, $args).setPos(${ir.pos})"
+ case ir@Application(callee, args) =>
+ q"$interpolator.Exprs.Application($callee, $args).setPos(${ir.pos})"
+ case ir@Abstraction(q, bs, b) =>
+ q"$interpolator.Exprs.Abstraction($q, $bs, $b).setPos(${ir.pos})"
+ case ir@Let(b, v, e) =>
+ q"$interpolator.Exprs.Let($b, $v, $e).setPos(${ir.pos})"
+ case ir@If(c, t, e) =>
+ q"$interpolator.Exprs.If($c, $t, $e).setPos(${ir.pos})"
+ case ir@Selection(s, f) =>
+ q"$interpolator.Exprs.Selection($s, $f).setPos(${ir.pos})"
+ case ir@Tuple(es) =>
+ q"$interpolator.Exprs.Tuple($es).setPos(${ir.pos})"
+ case ir@TupleSelection(t, index) =>
+ q"$interpolator.Exprs.TupleSelection($t, $index).setPos(${ir.pos})"
+ case ir@TypeAnnotation(e, t) =>
+ q"$interpolator.Exprs.TypeAnnotation($e, $t).setPos(${ir.pos})"
+ case ir@Choose(b, p) =>
+ q"$interpolator.Exprs.Choose($b, $p).setPos(${ir.pos})"
+ case ir@Assume(p, b) =>
+ q"$interpolator.Exprs.Assume($p, $b).setPos(${ir.pos})"
+ case ir@IsConstructor(e, c) =>
+ q"$interpolator.Exprs.IsConstructor($e, $c).setPos(${ir.pos})"
+ case ir@Cast(m, e, t) =>
+ q"$interpolator.Exprs.Cast($m, $e, $t).setPos(${ir.pos})"
+ }
+
+ implicit lazy val exprPairsLiftable: Liftable[ExprPair] = Liftable[ExprPair] {
+ case ir@Pair(lhs, rhs) =>
+ q"$interpolator.Exprs.Pair($lhs, $rhs).setPos(${ir.pos})"
+ case ir@PairHole(index) =>
+ q"$interpolator.Exprs.PairHole($index).setPos(${ir.pos})"
+ }
+
+ implicit lazy val exprCastsLiftable: Liftable[Casts.Mode] = Liftable[Casts.Mode] {
+ case Casts.Widen =>
+ q"$interpolator.Exprs.Casts.Widen"
+ case Casts.Narrow =>
+ q"$interpolator.Exprs.Casts.Narrow"
+ }
+
+ implicit lazy val exprUnaryLiftable: Liftable[Unary.Operator] = Liftable[Unary.Operator] {
+ case Unary.Minus =>
+ q"$interpolator.Exprs.Unary.Minus"
+ case Unary.Not =>
+ q"$interpolator.Exprs.Unary.Not"
+ case Unary.BVNot =>
+ q"$interpolator.Exprs.Unary.BVNot"
+ }
+
+ implicit lazy val exprBinaryLiftable: Liftable[Binary.Operator] = Liftable[Binary.Operator] {
+ case Binary.Plus =>
+ q"$interpolator.Exprs.Binary.Plus"
+ case Binary.Minus =>
+ q"$interpolator.Exprs.Binary.Minus"
+ case Binary.Times =>
+ q"$interpolator.Exprs.Binary.Times"
+ case Binary.Division =>
+ q"$interpolator.Exprs.Binary.Division"
+ case Binary.Modulo =>
+ q"$interpolator.Exprs.Binary.Modulo"
+ case Binary.Remainder =>
+ q"$interpolator.Exprs.Binary.Remainder"
+ case Binary.Implies =>
+ q"$interpolator.Exprs.Binary.Implies"
+ case Binary.Equals =>
+ q"$interpolator.Exprs.Binary.Equals"
+ case Binary.LessEquals =>
+ q"$interpolator.Exprs.Binary.LessEquals"
+ case Binary.LessThan =>
+ q"$interpolator.Exprs.Binary.LessThan"
+ case Binary.GreaterEquals =>
+ q"$interpolator.Exprs.Binary.GreaterEquals"
+ case Binary.GreaterThan =>
+ q"$interpolator.Exprs.Binary.GreaterThan"
+ case Binary.BVAnd =>
+ q"$interpolator.Exprs.Binary.BVAnd"
+ case Binary.BVOr =>
+ q"$interpolator.Exprs.Binary.BVOr"
+ case Binary.BVXor =>
+ q"$interpolator.Exprs.Binary.BVXor"
+ case Binary.BVShiftLeft =>
+ q"$interpolator.Exprs.Binary.BVShiftLeft"
+ case Binary.BVAShiftRight =>
+ q"$interpolator.Exprs.Binary.BVAShiftRight"
+ case Binary.BVLShiftRight =>
+ q"$interpolator.Exprs.Binary.BVLShiftRight"
+ }
+
+ implicit lazy val exprNAryLiftable: Liftable[NAry.Operator] = Liftable[NAry.Operator] {
+ case NAry.And =>
+ q"$interpolator.Exprs.NAry.And"
+ case NAry.Or =>
+ q"$interpolator.Exprs.NAry.Or"
+ }
+
+ implicit lazy val exprPrimitivesLiftable: Liftable[Exprs.Primitive.Function] = Liftable[Exprs.Primitive.Function] {
+ case Exprs.Primitive.SetAdd =>
+ q"$interpolator.Exprs.Primitive.SetAdd"
+ case Exprs.Primitive.ElementOfSet =>
+ q"$interpolator.Exprs.Primitive.ElementOfSet"
+ case Exprs.Primitive.SetIntersection =>
+ q"$interpolator.Exprs.Primitive.SetIntersection"
+ case Exprs.Primitive.SetUnion =>
+ q"$interpolator.Exprs.Primitive.SetUnion"
+ case Exprs.Primitive.SetDifference =>
+ q"$interpolator.Exprs.Primitive.SetDifference"
+ case Exprs.Primitive.Subset =>
+ q"$interpolator.Exprs.Primitive.Subset"
+ case Exprs.Primitive.BagAdd =>
+ q"$interpolator.Exprs.Primitive.BagAdd"
+ case Exprs.Primitive.MultiplicityInBag =>
+ q"$interpolator.Exprs.Primitive.MultiplicityInBag"
+ case Exprs.Primitive.BagIntersection =>
+ q"$interpolator.Exprs.Primitive.BagIntersection"
+ case Exprs.Primitive.BagUnion =>
+ q"$interpolator.Exprs.Primitive.BagUnion"
+ case Exprs.Primitive.BagDifference =>
+ q"$interpolator.Exprs.Primitive.BagDifference"
+ case Exprs.Primitive.MapApply =>
+ q"$interpolator.Exprs.Primitive.MapApply"
+ case Exprs.Primitive.MapUpdated =>
+ q"$interpolator.Exprs.Primitive.MapUpdated"
+ case Exprs.Primitive.StringConcat =>
+ q"$interpolator.Exprs.Primitive.StringConcat"
+ case Exprs.Primitive.SubString =>
+ q"$interpolator.Exprs.Primitive.SubString"
+ case Exprs.Primitive.StringLength =>
+ q"$interpolator.Exprs.Primitive.StringLength"
+ }
+
+ implicit lazy val quantifiersLiftable: Liftable[Quantifier] = Liftable[Quantifier] {
+ case Forall =>
+ q"$interpolator.Exprs.Forall"
+ case Lambda =>
+ q"$interpolator.Exprs.Lambda"
+ }
+
+ implicit lazy val programLiftable: Liftable[Program] = Liftable[Program] {
+ case ir@Program(es) =>
+ q"$interpolator.Programs.Program(_root_.scala.collection.Seq(..$es)).setPos(${ir.pos})"
+ }
+
+ private def parse[A](p: Parser[A]): A = {
+ parseSC(sc)(phrase(p)) match {
+ case Right(v) => v
+ case Left(e) => c.abort(c.enclosingPosition, "Parsing error in quasiquoted inox expression:\n" + e)
+ }
+ }
+
+ private lazy val identType = typeOf[inox.Identifier]
+ private lazy val exprType = c.typecheck(tq"$targetTrees.Expr", c.TYPEmode).tpe
+ private lazy val typeType = c.typecheck(tq"$targetTrees.Type", c.TYPEmode).tpe
+ private lazy val valDefType = c.typecheck(tq"$targetTrees.ValDef", c.TYPEmode).tpe
+ private lazy val funDefType = c.typecheck(tq"$targetTrees.FunDef", c.TYPEmode).tpe
+ private lazy val adtSortType = c.typecheck(tq"$targetTrees.ADTSort", c.TYPEmode).tpe
+ private lazy val constructorType = c.typecheck(tq"$targetTrees.ADTConstructor", c.TYPEmode).tpe
+ private lazy val defSeqType = c.typecheck(tq"_root_.scala.collection.Seq[$targetTrees.Definition]", c.TYPEmode).tpe
+
+ private def tupleType(types: Seq[c.Type]): c.Tree = tq"(..$types)"
+
+ private def accessAll(types: Seq[c.Type]): c.Tree = {
+ val elems = types.zipWithIndex.map {
+ case (tpe, i) => q"x($i).asInstanceOf[$tpe]"
+ }
+ q"(x: Map[Int, Any]) => (..$elems)"
+ }
+
+ private def verifyElaborationHoleTypes(holes: Seq[Hole]) {
+ if (holes.exists(_.holeType == HoleTypes.Constructor)) {
+ c.error(c.enclosingPosition, unsupportedHoleTypeForElaboration("ADTConstructor")(scala.util.parsing.input.NoPosition))
+ }
+ }
+
+ private def getTypes(holes: Seq[Hole]): Seq[c.Type] = {
+
+ def holeTypeToType(holeType: HoleType): c.Type = holeType match {
+ case HoleTypes.Identifier => identType
+ case HoleTypes.Expr => exprType
+ case HoleTypes.Type => typeType
+ case HoleTypes.ValDef => valDefType
+ case HoleTypes.Constructor => constructorType
+ case HoleTypes.Pair(lhs, rhs) => c.typecheck(tq"(${holeTypeToType(lhs)}, ${holeTypeToType(rhs)})", c.TYPEmode).tpe
+ case HoleTypes.Sequence(holeType) => c.typecheck(tq"_root_.scala.collection.Seq[${holeTypeToType(holeType)}]", c.TYPEmode).tpe
+ }
+
+ val holeTypes = holes.map(h => h.index -> h.holeType).toMap
+ Seq.tabulate(holeTypes.size) { (i: Int) => holeTypeToType(holeTypes(i)) }
+ }
+
+ private def verifyArgTypes(args: Seq[c.Expr[Any]], types: Seq[c.Type]) {
+ assert(args.size == types.size)
+
+ for ((arg, expectedType) <- args.zip(types)) {
+ val actualType = arg.actualType
+ if (!(actualType <:< expectedType)) {
+ c.error(arg.tree.pos, s"Invalid argument of type $actualType. Expected an argument of type $expectedType.")
+ }
+ }
+ }
+
+ def t_apply(args: c.Expr[Any]*): c.Tree = {
+
+ val ir = parse(typeParser)
+ val holes = ir.getHoles
+
+ verifyElaborationHoleTypes(holes)
+
+ val types = getTypes(holes)
+
+ verifyArgTypes(args, types)
+
+ q"""
+ {
+ val ir = $ir
+ val self = $self
+ val res: $typeType = $interpolator.TypeE.elaborate(ir)($interpolator.createStore(self.symbols, _root_.scala.collection.Seq(..$args))).get match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(((_, ev), cs)) => $interpolator.solve(cs) match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(u) => ev.get(u)
+ }
+ }
+ res
+ }
+ """
+ }
+
+ def t_unapply(arg: c.Tree): c.Tree = {
+
+ val ir = parse(typeParser)
+ val holes = ir.getHoles
+
+ if (holes.size >= 1) {
+ val types = getTypes(holes)
+
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapply(t: $typeType): _root_.scala.Option[${tupleType(types)}] = {
+ $interpolator.TypeX.extract(ir, t).getMatches(self.symbols).map(${accessAll(types)})
+ }
+ }.unapply($arg)
+ """
+ } else {
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapplySeq(t: $typeType): _root_.scala.Option[_root_.scala.collection.Seq[_root_.scala.Nothing]] = {
+ $interpolator.TypeX.extract(ir, t).getMatches(self.symbols).map(_ => _root_.scala.collection.Seq[_root_.scala.Nothing]())
+ }
+ }.unapplySeq($arg)
+ """
+ }
+ }
+
+ def e_apply(args: c.Expr[Any]*): c.Tree = {
+
+ val ir = parse(exprParser)
+ val holes = ir.getHoles
+
+ verifyElaborationHoleTypes(holes)
+
+ val types = getTypes(holes)
+
+ verifyArgTypes(args, types)
+
+ q"""
+ {
+ val ir = $ir
+ val self = $self
+ val res: $exprType = $interpolator.ExprE.elaborate(ir)($interpolator.createStore(self.symbols, _root_.scala.collection.Seq(..$args))).get match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(((_, ev), cs)) => $interpolator.solve(cs) match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(u) => ev.get(u)
+ }
+ }
+ res
+ }
+ """
+ }
+
+ def e_unapply(arg: c.Tree): c.Tree = {
+
+ val ir = parse(exprParser)
+ val holes = ir.getHoles
+
+ if (holes.size >= 1) {
+ val types = getTypes(holes)
+
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapply(t: $exprType): _root_.scala.Option[${tupleType(types)}] = {
+ $interpolator.ExprX.extract(ir, t).getMatches(self.symbols).map(${accessAll(types)})
+ }
+ }.unapply($arg)
+ """
+ } else {
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapplySeq(t: $exprType): _root_.scala.Option[_root_.scala.collection.Seq[_root_.scala.Nothing]] = {
+ $interpolator.ExprX.extract(ir, t).getMatches(self.symbols).map(_ => _root_.scala.collection.Seq[_root_.scala.Nothing]())
+ }
+ }.unapplySeq($arg)
+ """
+ }
+ }
+
+ def vd_apply(args: c.Expr[Any]*): c.Tree = {
+
+ val ir = parse(bindingParser(explicitOnly=true))
+ val holes = ir.getHoles
+
+ verifyElaborationHoleTypes(holes)
+
+ val types = getTypes(holes)
+
+ verifyArgTypes(args, types)
+
+ q"""
+ {
+ val ir = $ir
+ val self = $self
+ val res: $valDefType = $interpolator.BindingE.elaborate(ir)($interpolator.createStore(self.symbols, _root_.scala.collection.Seq(..$args))).get match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right((ev, cs)) => $interpolator.solve(cs) match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(u) => ev.evValDef.get(u)
+ }
+ }
+ res
+ }
+ """
+ }
+
+ def vd_unapply(arg: c.Tree): c.Tree = {
+
+ val ir = parse(bindingParser(explicitOnly=true))
+ val holes = ir.getHoles
+
+ if (holes.size >= 1) {
+ val types = getTypes(holes)
+
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapply(t: $valDefType): _root_.scala.Option[${tupleType(types)}] = {
+ $interpolator.BindingX.extract(ir, t).getMatches(self.symbols).map(${accessAll(types)})
+ }
+ }.unapply($arg)
+ """
+ } else {
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapplySeq(t: $valDefType): _root_.scala.Option[_root_.scala.collection.Seq[_root_.scala.Nothing]] = {
+ $interpolator.BindingX.extract(ir, t).getMatches(self.symbols).map(_ => _root_.scala.collection.Seq[_root_.scala.Nothing]())
+ }
+ }.unapplySeq($arg)
+ """
+ }
+ }
+
+ def fd_apply(args: c.Expr[Any]*): c.Tree = {
+
+ val ir = parse(functionDefinitionParser)
+ val holes = ir.getHoles
+
+ verifyElaborationHoleTypes(holes)
+
+ val types = getTypes(holes)
+
+ verifyArgTypes(args, types)
+
+ q"""
+ {
+ val ir = $ir
+ val self = $self
+ val res: $funDefType = $interpolator.SingleFunctionE.elaborate(ir)($interpolator.createStore(self.symbols, _root_.scala.collection.Seq(..$args))).get match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right((ev, cs)) => $interpolator.solve(cs) match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(u) => ev.get(u)
+ }
+ }
+ res
+ }
+ """
+ }
+
+ def fd_unapply(arg: c.Tree): c.Tree = {
+
+ val ir = parse(functionDefinitionParser)
+ val holes = ir.getHoles
+
+ if (holes.size >= 1) {
+ val types = getTypes(holes)
+
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapply(t: $funDefType): _root_.scala.Option[${tupleType(types)}] = {
+ $interpolator.FunctionX.extract(ir, t).getMatches(self.symbols).map(${accessAll(types)})
+ }
+ }.unapply($arg)
+ """
+ } else {
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapplySeq(t: $funDefType): _root_.scala.Option[_root_.scala.collection.Seq[_root_.scala.Nothing]] = {
+ $interpolator.FunctionX.extract(ir, t).getMatches(self.symbols).map(_ => _root_.scala.collection.Seq[_root_.scala.Nothing]())
+ }
+ }.unapplySeq($arg)
+ """
+ }
+ }
+
+ def td_apply(args: c.Expr[Any]*): c.Tree = {
+
+ val ir = parse(adtDefinitionParser)
+ val holes = ir.getHoles
+
+ verifyElaborationHoleTypes(holes)
+
+ val types = getTypes(holes)
+
+ verifyArgTypes(args, types)
+
+ q"""
+ {
+ val ir = $ir
+ val self = $self
+ val res: $adtSortType = $interpolator.SortE.elaborate(ir)($interpolator.createStore(self.symbols, _root_.scala.collection.Seq(..$args))).get match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(((_, ev), cs)) => $interpolator.solve(cs) match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(u) => ev.get(u)
+ }
+ }
+ res
+ }
+ """
+ }
+
+ def td_unapply(arg: c.Tree): c.Tree = {
+
+ val ir = parse(adtDefinitionParser)
+ val holes = ir.getHoles
+
+ if (holes.size >= 1) {
+ val types = getTypes(holes)
+
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapply(t: $adtSortType): _root_.scala.Option[${tupleType(types)}] = {
+ $interpolator.SortX.extract(ir, t).getMatches(self.symbols).map(${accessAll(types)})
+ }
+ }.unapply($arg)
+ """
+ } else {
+ q"""
+ new {
+ val ir = $ir
+ val self = $self
+
+ def unapplySeq(t: $adtSortType): _root_.scala.Option[_root_.scala.collection.Seq[_root_.scala.Nothing]] = {
+ $interpolator.SortX.extract(ir, t).getMatches(self.symbols).map(_ => _root_.scala.collection.Seq[_root_.scala.Nothing]())
+ }
+ }.unapplySeq($arg)
+ """
+ }
+ }
+
+ def p_apply(args: c.Expr[Any]*): c.Tree = {
+
+ val ir = parse(programParser)
+ val holes = ir.getHoles
+
+ verifyElaborationHoleTypes(holes)
+
+ val types = getTypes(holes)
+
+ verifyArgTypes(args, types)
+
+ q"""
+ {
+ val ir = $ir
+ val self = $self
+ val res: $defSeqType = $interpolator.ProgramE.elaborate(ir)($interpolator.createStore(self.symbols, _root_.scala.collection.Seq(..$args))).get match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right((evs, cs)) => $interpolator.solve(cs) match {
+ case _root_.scala.util.Left(err) => throw _root_.inox.parser.InterpolatorException(err)
+ case _root_.scala.util.Right(u) => evs.map(_.get(u))
+ }
+ }
+ res
+ }
+ """
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/NumberUtils.scala b/src/main/scala/inox/parser/NumberUtils.scala
new file mode 100644
index 000000000..869aeeafc
--- /dev/null
+++ b/src/main/scala/inox/parser/NumberUtils.scala
@@ -0,0 +1,43 @@
+package inox
+package parser
+
+import scala.collection.BitSet
+
+trait NumberUtils {
+ def toFraction(whole: String, trailing: String, repeating: String): (BigInt, BigInt) = {
+
+ type Fraction = (BigInt, BigInt)
+
+ def add(a: Fraction, b: Fraction): Fraction = {
+ val (na, da) = a
+ val (nb, db) = b
+
+ (na * db + nb * da, da * db)
+ }
+
+ def normalize(a: Fraction): Fraction = {
+ val (na, da) = a
+
+ val gcd = na.gcd(da)
+
+ (na / gcd, da / gcd)
+ }
+
+ val t = BigInt(10).pow(trailing.length)
+
+ val nonRepeatingPart: Fraction = (BigInt(whole + trailing), t)
+ if (repeating.length == 0) {
+ normalize(nonRepeatingPart)
+ }
+ else {
+ val r = BigInt(10).pow(repeating.length)
+ val sign = if (whole.startsWith("-")) -1 else 1
+ val repeatingPart: Fraction = (sign * BigInt(repeating), (r - 1) * t)
+
+ normalize(add(nonRepeatingPart, repeatingPart))
+ }
+ }
+
+ def toBitSet(value: BigInt, base: Int): BitSet =
+ inox.trees.BVLiteral(true, value, base).value // Ugly, but works...
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parsing/Operators.scala b/src/main/scala/inox/parser/Operators.scala
similarity index 79%
rename from src/main/scala/inox/parsing/Operators.scala
rename to src/main/scala/inox/parser/Operators.scala
index 9982cebdd..3f4437f8d 100644
--- a/src/main/scala/inox/parsing/Operators.scala
+++ b/src/main/scala/inox/parser/Operators.scala
@@ -1,7 +1,6 @@
-/* Copyright 2017 EPFL, Lausanne */
package inox
-package parsing
+package parser
sealed abstract class Level {
val ops: Seq[String]
@@ -12,10 +11,9 @@ case class AnyAssoc(op: String) extends Level {
override val ops = Seq(op)
}
-object Operators {
+trait Operators {
val unaries: Seq[String] = Seq("-", "+", "!", "~")
val binaries: Seq[Level] = Seq(
- AnyAssoc("is"),
LeftAssoc(Seq("*", "/", "%", "mod")),
@@ -23,7 +21,7 @@ object Operators {
LeftAssoc(Seq("++")),
- LeftAssoc(Seq("∪", "∩", "∖")),
+ LeftAssoc(Seq("∪", "∩", "\\")),
LeftAssoc(Seq("⊆", "∈")),
@@ -43,8 +41,6 @@ object Operators {
AnyAssoc("||"),
- RightAssoc(Seq("==>")),
-
- RightAssoc(Seq("->"))
+ RightAssoc(Seq("==>"))
)
}
diff --git a/src/main/scala/inox/parser/Parsers.scala b/src/main/scala/inox/parser/Parsers.scala
new file mode 100644
index 000000000..7e1adac81
--- /dev/null
+++ b/src/main/scala/inox/parser/Parsers.scala
@@ -0,0 +1,590 @@
+package inox
+package parser
+
+import scala.util.parsing.combinator._
+import scala.util.parsing.combinator.syntactical._
+import scala.util.parsing.combinator.token._
+import scala.util.parsing.input._
+
+import inox.parser.sc.StringContextParsers
+
+trait Parsers extends StringContextParsers
+ with StdTokenParsers
+ with PackratParsers
+ with Operators
+ with NumberUtils
+ with ParsingErrors
+ with Lexers { self: IRs =>
+
+ implicit class PositionalErrorsDecorator[A](parser: Parser[A]) {
+
+ def withError(onError: Position => String): Parser[A] = new Parser[A] {
+ override def apply(input: Input) = parser(input) match {
+ case s @ Success(_, _) => s
+ case e @ Error(_, rest) => if (input.pos < rest.pos) {
+ e
+ } else {
+ Error(onError(input.pos), rest)
+ }
+ case f @ Failure(_, rest) => if (input.pos < rest.pos) {
+ f
+ } else {
+ Failure(onError(input.pos), rest)
+ }
+ }
+ }
+ }
+
+ override val lexical = new InoxLexer
+ type Tokens = InoxLexer
+
+ import Identifiers._
+ import Bindings._
+ import Exprs.{Primitive => PrimitiveFunctions, _}
+ import Types.{Invocation => TypeInvocation, Operators => TypeOperators, _}
+ import Functions._
+ import ADTs._
+ import Programs._
+
+ def p(c: Char): Parser[lexical.Token] =
+ (elem(lexical.Parenthesis(c)) | elem(lexical.Punctuation(c))).withError(expectedString(c.toString))
+
+ def kw(s: String): Parser[lexical.Token] = elem(lexical.Keyword(s)).withError(expectedString(s))
+
+ def kws(ss: String*): Parser[lexical.Token] = ss.map(s => elem(lexical.Keyword(s))).reduce(_ | _).withError(expectedOneOfStrings(ss : _*))
+
+ def operator(s: String): Parser[lexical.Token] = elem(lexical.Operator(s)).withError(expectedString(s))
+
+ def hseqParser[A <: IR](rep: Parser[A], sep: Parser[Any], allowEmpty: Boolean=false)(implicit ev: HoleTypable[A]): Parser[HSeq[A]] = {
+ val holeSeq: Parser[Either[RepHole[A], A]] = positioned(acceptMatch("hole", {
+ case lexical.Hole(index) => RepHole[A](index)
+ }) <~ elem(lexical.Keyword("..."))).map(Left(_))
+
+ val nonEmpty = rep1sep(holeSeq | rep.map(Right(_)), sep).map(HSeq[A](_))
+ positioned(if (allowEmpty) {
+ opt(nonEmpty).map(_.getOrElse(HSeq[A](Seq())))
+ }
+ else {
+ nonEmpty
+ })
+ }
+
+ lazy val identifierParser: PackratParser[Identifier] = positioned(acceptMatch("identifier", {
+ case lexical.Identifier(name) => IdentifierName(name)
+ case lexical.Hole(index) => IdentifierHole(index)
+ })).withError(expected("an identifier"))
+
+ lazy val holeParser: PackratParser[Int] = acceptMatch("hole", {
+ case lexical.Hole(index) => index
+ })
+
+ lazy val typeParser: PackratParser[Type] = contextTypeParser(None)
+
+ def contextTypeParser(context: Option[String]): Parser[Type] = {
+
+ import Primitives.{Type => _, _}
+ import TypeOperators._
+
+ val typeHoleParser: Parser[Type] =
+ holeParser ^^ { (index: Int) => TypeHole(index) }
+
+ object PrimitiveType {
+ def unapply(arg: String): Option[Primitives.Type] = arg match {
+ case "Int" => Some(BVType(true, 32))
+ case "Integer" => Some(IntegerType)
+ case "Real" => Some(RealType)
+ case "Boolean" => Some(BooleanType)
+ case "String" => Some(StringType)
+ case "Char" => Some(CharType)
+ case "Unit" => Some(UnitType)
+ case _ if arg.startsWith("Int") => scala.util.Try {
+ val size = BigInt(arg.drop(3))
+ BVType(true, size.toInt)
+ }.toOption
+ case _ if arg.startsWith("UInt") => scala.util.Try {
+ val size = BigInt(arg.drop(4))
+ BVType(false, size.toInt)
+ }.toOption
+ case _ => None
+ }
+ }
+
+ val primitiveParser = acceptMatch("type primitive", {
+ case lexical.Identifier(PrimitiveType(tpe)) => Primitive(tpe)
+ })
+
+ object TypeOperator {
+ def unapply(arg: String): Option[Operator] = arg match {
+ case "Map" => Some(Map)
+ case "Set" => Some(Set)
+ case "Bag" => Some(Bag)
+ case _ => None
+ }
+ }
+
+ val typeOperatorParser: Parser[Operator] = acceptMatch("type operator", {
+ case lexical.Identifier(TypeOperator(op)) => op
+ })
+
+ val operationParser: Parser[Type] =
+ typeOperatorParser ~ (p('[') ~> hseqParser(typeParser, p(',')) <~ p(']')) ^^ {
+ case op ~ args => Operation(op, args)
+ }
+
+ val invocationParser: Parser[Type] =
+ identifierParser ~ (p('[') ~> hseqParser(typeParser, p(',')) <~ p(']')) ^^ {
+ case id ~ args => TypeInvocation(id, args)
+ }
+
+ val inParensParser: Parser[Type] =
+ p('(') ~> hseqParser(typeParser, p(',')) <~ p(')') ^^ {
+ xs => if (xs.elems.size == 1 && xs.elems.head.isRight) xs.elems.head.right.get else TupleType(xs)
+ }
+
+ val variableParser: Parser[Type] = identifierParser ^^ {
+ case i => Types.Variable(i)
+ }
+
+ lazy val defaultNamedBinding: Parser[Binding] = context match {
+ case None => failure("no default names available").withError(expected("a binding"))
+ case Some(name) => ret.map(tpe => ExplicitValDef(IdentifierName(name), tpe)).withError(expected("a binding or a type"))
+ }
+
+ lazy val refinementTypeParser: Parser[Type] =
+ p('{') ~> (bindingParser(explicitOnly=true) | defaultNamedBinding) ~ (operator("|") ~> exprParser <~ p('}')) ^^ {
+ case binding ~ expr => RefinementType(binding, expr)
+ }
+
+ lazy val singleTypeParser: Parser[Type] = positioned(
+ typeHoleParser |
+ primitiveParser |
+ operationParser |
+ invocationParser |
+ variableParser |
+ refinementTypeParser |
+ inParensParser) withError(expected("a type"))
+
+ lazy val typesGroup: Parser[HSeq[Type]] =
+ (p('(') ~> hseqParser(typeParser, p(','), allowEmpty=true) <~ (p(')'))) |
+ singleTypeParser ^^ { tpe => HSeq[Type](Seq(Right(tpe))) }
+
+ lazy val depTypesGroup: Parser[HSeq[Binding]] =
+ (p('(') ~> hseqParser(bindingParser(explicitOnly=true), p(','), allowEmpty=true) <~ (p(')')))
+
+ lazy val arrowLeft: Parser[Type => Type] =
+ (typesGroup ^^ (lhs => (rhs: Type) => FunctionType(lhs, rhs))) |
+ (kw("Pi") ~> depTypesGroup ^^ (lhs => (rhs: Type) => PiType(lhs, rhs))) |
+ (kw("Sigma") ~> depTypesGroup ^^ (lhs => (rhs: Type) => SigmaType(lhs, rhs)))
+
+ lazy val ret = positioned(rep(arrowLeft <~ kw("=>")) ~ singleTypeParser ^^ { case fs ~ x =>
+ fs.foldRight(x) {
+ case (f, acc) => f(acc)
+ }
+ })
+
+ ret
+ }
+
+ lazy val exprParser: PackratParser[Expr] = {
+
+ val exprHoleParser: Parser[Expr] =
+ holeParser ^^ { i => ExprHole(i) }
+
+ val ifParser: Parser[Expr] = for {
+ _ <- kw("if")
+ _ <- p('(')
+ c <- exprParser
+ _ <- p(')')
+ t <- exprParser
+ _ <- kw("else")
+ e <- exprParser
+ } yield If(c, t, e)
+
+ val literalParser: Parser[Expr] = acceptMatch("literal expression", {
+ case lexical.DecimalLit(whole, trailing, repeating) => {
+ val (n, d) = toFraction(whole, trailing, repeating)
+ FractionLiteral(n, d)
+ }
+ case lexical.NumericLit(number) =>
+ IntegerLiteral(BigInt(number))
+ case lexical.StringLit(string) =>
+ StringLiteral(string)
+ case lexical.CharLit(character) =>
+ CharLiteral(character)
+ case lexical.Keyword("true") =>
+ BooleanLiteral(true)
+ case lexical.Keyword("false") =>
+ BooleanLiteral(false)
+ })
+
+ val unitLiteralParser: Parser[Expr] = p('(') ~> p(')') ^^^ UnitLiteral()
+
+ val tupleParser: Parser[Expr] = p('(') ~> hseqParser(exprParser, p(',')) <~ p(')') ^^ { xs =>
+ if (xs.elems.size == 1 && xs.elems.head.isRight) xs.elems.head.right.get else Tuple(xs)
+ }
+
+ val blockParser: Parser[Expr] = p('{') ~> exprParser <~ p('}')
+
+ val letParser: Parser[Expr] = for {
+ _ <- kw("let")
+ b <- bindingParser(explicitOnly=false)
+ _ <- kw("=")
+ v <- exprParser
+ _ <- p(';')
+ e <- exprParser
+ } yield Let(b, v, e)
+
+ val assumeParser: Parser[Expr] = for {
+ _ <- kw("assume")
+ v <- p('(') ~> exprParser <~ p(')')
+ _ <- p(';')
+ e <- exprParser
+ } yield Assume(v, e)
+
+ val quantifierParameters: Parser[HSeq[Binding]] = {
+
+ val withParens = p('(') ~> hseqParser(bindingParser(explicitOnly=false), p(','), allowEmpty=true) <~ p(')')
+
+ val uniqueUntyped = identifierParser.map((x: Identifier) => HSeq.fromSeq(Seq(InferredValDef(x))))
+
+ positioned(withParens | uniqueUntyped)
+ }
+
+ val lambdaParser: Parser[Expr] = for {
+ _ <- opt(kw("lambda"))
+ ps <- quantifierParameters
+ _ <- kw("=>")
+ e <- exprParser
+ } yield Abstraction(Lambda, ps, e)
+
+ val forallParser: Parser[Expr] = for {
+ _ <- kw("forall")
+ ps <- quantifierParameters
+ _ <- kw("=>")
+ e <- exprParser
+ } yield Abstraction(Forall, ps, e)
+
+ val chooseParser: Parser[Expr] = for {
+ _ <- kw("choose")
+ b <- (p('(') ~> bindingParser(explicitOnly=false) <~ p(')')) |
+ identifierParser.map(InferredValDef(_))
+ _ <- kw("=>")
+ e <- exprParser
+ } yield Choose(b, e)
+
+ val primitiveConstructorParser: Parser[Expr] = {
+
+ val exprPairParser: Parser[ExprPair] = {
+
+ val exprPairHole: Parser[ExprPair] = acceptMatch("expression pair hole", {
+ case lexical.Hole(i) => PairHole(i)
+ })
+
+ lazy val exprPair: Parser[ExprPair] = for {
+ l <- exprParser
+ _ <- kw("->")
+ r <- exprParser
+ } yield Pair(l, r)
+
+ exprPairHole | exprPair
+ }
+
+ val mapConstructorParser: Parser[Expr] = for {
+ _ <- elem(lexical.Identifier("Map"))
+ otps <- opt(p('[') ~> hseqParser(typeParser, p(',')) <~ p(']'))
+ d <- p('(') ~> exprParser <~ p(',')
+ ps <- hseqParser(exprPairParser, p(','), allowEmpty=true) <~ p(')')
+ } yield MapConstruction(otps, ps, d)
+
+ val setConstructorParser: Parser[Expr] = for {
+ _ <- elem(lexical.Identifier("Set"))
+ otps <- opt(p('[') ~> hseqParser(typeParser, p(',')) <~ p(']'))
+ es <- p('(') ~> hseqParser(exprParser, p(','), allowEmpty=true) <~ p(')')
+ } yield SetConstruction(otps, es)
+
+ val bagConstructorParser: Parser[Expr] = for {
+ _ <- elem(lexical.Identifier("Bag"))
+ otps <- opt(p('[') ~> hseqParser(typeParser, p(',')) <~ p(']'))
+ es <- p('(') ~> hseqParser(exprPairParser, p(','), allowEmpty=true) <~ p(')')
+ } yield BagConstruction(otps, es)
+
+ mapConstructorParser | setConstructorParser | bagConstructorParser
+ }
+
+ val primitiveFunctions = Map(
+ "elementOfSet" -> PrimitiveFunctions.ElementOfSet,
+ "setAdd" -> PrimitiveFunctions.SetAdd,
+ "setUnion" -> PrimitiveFunctions.SetUnion,
+ "setIntersection" -> PrimitiveFunctions.SetIntersection,
+ "setDifference" -> PrimitiveFunctions.SetDifference,
+ "subset" -> PrimitiveFunctions.Subset,
+ "bagAdd" -> PrimitiveFunctions.BagAdd,
+ "multiplicity" -> PrimitiveFunctions.MultiplicityInBag,
+ "bagIntersection" -> PrimitiveFunctions.BagIntersection,
+ "bagUnion" -> PrimitiveFunctions.BagUnion,
+ "bagDifference" -> PrimitiveFunctions.BagDifference,
+ "apply" -> PrimitiveFunctions.MapApply,
+ "updated" -> PrimitiveFunctions.MapUpdated,
+ "concatenate" -> PrimitiveFunctions.StringConcat,
+ "substring" -> PrimitiveFunctions.SubString,
+ "length" -> PrimitiveFunctions.StringLength)
+
+ val primitiveNameParser: Parser[PrimitiveFunctions.Function] = acceptMatch("primitive function name", {
+ case lexical.Identifier(name) if primitiveFunctions.contains(name) => primitiveFunctions(name)
+ })
+
+ val primitiveInvocationParser: Parser[Expr] = for {
+ f <- primitiveNameParser
+ tps <- opt(p('[') ~> hseqParser(typeParser, p(',')) <~ p(']'))
+ ps <- (p('(') ~> hseqParser(exprParser, p(','), allowEmpty=true) <~ p(')'))
+ } yield PrimitiveInvocation(f, tps, ps)
+
+
+ val castName: Parser[(Casts.Mode, Int)] = {
+
+ class CastName(name: String) {
+ def unapply(x: String): Option[Int] = for {
+ t <- Some(x.startsWith(name))
+ if t
+ bi <- scala.util.Try(BigInt(x.drop(name.size))).toOption
+ if bi.isValidInt
+ } yield bi.intValue
+ }
+
+ object NarrowName extends CastName("narrow")
+ object WidenName extends CastName("widen")
+
+ acceptMatch("cast function name", {
+ case lexical.Identifier(NarrowName(size)) => (Casts.Narrow, size)
+ case lexical.Identifier(WidenName(size)) => (Casts.Widen, size)
+ })
+ }
+
+ val castParser: Parser[Expr] = for {
+ (mode, size) <- castName
+ arg <- p('(') ~> exprParser <~ p(')')
+ } yield Cast(mode, arg, size)
+
+ val invocationParser: Parser[Expr] = for {
+ i <- identifierParser
+ tps <- opt(p('[') ~> hseqParser(typeParser, p(',')) <~ p(']'))
+ ps <- p('(') ~> hseqParser(exprParser, p(','), allowEmpty=true) <~ p(')')
+ } yield Invocation(i, tps, ps)
+
+ val variableParser: Parser[Expr] = identifierParser ^^ {
+ i => Exprs.Variable(i)
+ }
+
+ val nonOperatorParser: Parser[Expr] = positioned(
+ lambdaParser |
+ literalParser |
+ unitLiteralParser |
+ primitiveConstructorParser |
+ primitiveInvocationParser |
+ castParser |
+ invocationParser |
+ exprHoleParser |
+ variableParser |
+ tupleParser |
+ blockParser |
+ ifParser |
+ letParser |
+ assumeParser |
+ forallParser |
+ chooseParser).withError(expected("an expression"))
+
+ val postfixedParser: Parser[Expr] = positioned({
+
+ object TupleSelector {
+ def unapply(s: String): Option[Int] =
+ if (s.startsWith("_")) scala.util.Try {
+ BigInt(s.drop(1)).toInt
+ }.toOption
+ else None
+ }
+
+ val tupleSelectorParser: Parser[Int] = acceptMatch("tuple selector", {
+ case lexical.Identifier(TupleSelector(i)) => i
+ })
+
+ val fieldParser = kw(".") ~> (tupleSelectorParser.map(Left(_)) | identifierParser.map(Right(_)))
+
+ val argsParser = p('(') ~> hseqParser(exprParser, p(','), allowEmpty=true) <~p(')')
+
+ val asParser = kw("as") ~> typeParser
+
+ val isParser = kw("is") ~> identifierParser
+
+ val postfixParser =
+ fieldParser.map(i => Left(Left(i))) |
+ argsParser.map(as => Left(Right(as))) |
+ asParser.map(tpe => Right(Left(tpe))) |
+ isParser.map(i => Right(Right(i)))
+
+ nonOperatorParser ~ rep(postfixParser) ^^ {
+ case e ~ fs => fs.foldLeft(e) {
+ case (acc, Left(Left(Left(i)))) => TupleSelection(acc, i)
+ case (acc, Left(Left(Right(i)))) => Selection(acc, i)
+ case (acc, Left(Right(xs))) => Application(acc, xs)
+ case (acc, Right(Left(tpe))) => TypeAnnotation(acc, tpe)
+ case (acc, Right(Right(i))) => IsConstructor(acc, i)
+ }
+ }
+ })
+
+ val operatorParser: Parser[Expr] = {
+
+ val unaryParser: Parser[Expr] = {
+ rep(unaries.map(operator(_)).reduce(_ | _)) ~ postfixedParser ^^ { case os ~ e =>
+ os.foldRight(e) {
+ case (lexical.Operator(o), acc) => o match {
+ case "+" => e
+ case "-" => UnaryOperation(Unary.Minus, acc)
+ case "~" => UnaryOperation(Unary.BVNot, acc)
+ case "!" => UnaryOperation(Unary.Not, acc)
+ case _ => throw new IllegalArgumentException("Unknown operator: " + o)
+ }
+ case (tk, _) => throw new IllegalArgumentException("Unexpected token: " + tk)
+ }
+ }
+ }
+
+ binaries.foldLeft(unaryParser) {
+ case (acc, LeftAssoc(ops)) => acc ~ rep(ops.map(operator(_) ~ acc).reduce(_ | _)) ^^ {
+ case first ~ pairs => {
+ pairs.foldLeft(first) {
+ case (acc, lexical.Operator("+") ~ elem) =>
+ BinaryOperation(Binary.Plus, acc, elem)
+ case (acc, lexical.Operator("-") ~ elem) =>
+ BinaryOperation(Binary.Minus, acc, elem)
+ case (acc, lexical.Operator("*") ~ elem) =>
+ BinaryOperation(Binary.Times, acc, elem)
+ case (acc, lexical.Operator("/") ~ elem) =>
+ BinaryOperation(Binary.Division, acc, elem)
+ case (acc, lexical.Operator("mod") ~ elem) =>
+ BinaryOperation(Binary.Modulo, acc, elem)
+ case (acc, lexical.Operator("%") ~ elem) =>
+ BinaryOperation(Binary.Remainder, acc, elem)
+ case (acc, lexical.Operator("==") ~ elem) =>
+ BinaryOperation(Binary.Equals, acc, elem)
+ case (acc, lexical.Operator("!=") ~ elem) =>
+ UnaryOperation(Unary.Not, BinaryOperation(Binary.Equals, acc, elem))
+ case (acc, lexical.Operator("<=") ~ elem) =>
+ BinaryOperation(Binary.LessEquals, acc, elem)
+ case (acc, lexical.Operator("<") ~ elem) =>
+ BinaryOperation(Binary.LessThan, acc, elem)
+ case (acc, lexical.Operator(">=") ~ elem) =>
+ BinaryOperation(Binary.GreaterEquals, acc, elem)
+ case (acc, lexical.Operator(">") ~ elem) =>
+ BinaryOperation(Binary.GreaterThan, acc, elem)
+ case (acc, lexical.Operator("&") ~ elem) =>
+ BinaryOperation(Binary.BVAnd, acc, elem)
+ case (acc, lexical.Operator("|") ~ elem) =>
+ BinaryOperation(Binary.BVOr, acc, elem)
+ case (acc, lexical.Operator("^") ~ elem) =>
+ BinaryOperation(Binary.BVXor, acc, elem)
+ case (acc, lexical.Operator("<<") ~ elem) =>
+ BinaryOperation(Binary.BVShiftLeft, acc, elem)
+ case (acc, lexical.Operator(">>") ~ elem) =>
+ BinaryOperation(Binary.BVAShiftRight, acc, elem)
+ case (acc, lexical.Operator(">>>") ~ elem) =>
+ BinaryOperation(Binary.BVLShiftRight, acc, elem)
+ case (acc, lexical.Operator("++") ~ elem) =>
+ PrimitiveInvocation(PrimitiveFunctions.StringConcat, None, HSeq.fromSeq(Seq(acc, elem)))
+ case (acc, lexical.Operator("∪") ~ elem) =>
+ PrimitiveInvocation(PrimitiveFunctions.SetUnion, None, HSeq.fromSeq(Seq(acc, elem)))
+ case (acc, lexical.Operator("∩") ~ elem) =>
+ PrimitiveInvocation(PrimitiveFunctions.SetIntersection, None, HSeq.fromSeq(Seq(acc, elem)))
+ case (acc, lexical.Operator("\\") ~ elem) =>
+ PrimitiveInvocation(PrimitiveFunctions.SetDifference, None, HSeq.fromSeq(Seq(acc, elem)))
+ case (acc, lexical.Operator("⊆") ~ elem) =>
+ PrimitiveInvocation(PrimitiveFunctions.Subset, None, HSeq.fromSeq(Seq(acc, elem)))
+ case (acc, lexical.Operator("∈") ~ elem) =>
+ PrimitiveInvocation(PrimitiveFunctions.ElementOfSet, None, HSeq.fromSeq(Seq(acc, elem)))
+ case (_, op ~ _) => throw new IllegalArgumentException("Unknown operator: " + op)
+ }
+ }
+ }
+ case (acc, RightAssoc(ops)) => acc ~ rep(ops.map(operator(_) ~ acc).reduce(_ | _)) ^^ {
+ case first ~ pairs => {
+ val os = pairs.map(_._1)
+ val es = first +: pairs.map(_._2)
+ (es.init.zip(os)).foldRight(es.last) {
+ case ((elem, lexical.Operator("==>")), acc) => BinaryOperation(Binary.Implies, elem, acc)
+ case ((_, op), _) => throw new IllegalArgumentException("Unknown operator: " + op)
+ }
+ }
+ }
+ case (acc, AnyAssoc(op)) => rep1sep(acc, operator(op)) ^^ { xs =>
+ val nary = op match {
+ case "&&" => NAry.And
+ case "||" => NAry.Or
+ case _ => throw new IllegalArgumentException("Unknown operator: " + op)
+ }
+ if (xs.length == 1) {
+ xs.head
+ }
+ else {
+ NaryOperation(nary, HSeq.fromSeq(xs))
+ }
+ }
+ }
+ }
+
+ positioned(operatorParser) withError(expected("an expression"))
+ }
+
+ lazy val functionDefinitionParser: PackratParser[Function] = positioned(for {
+ _ <- kw("def")
+ i <- identifierParser
+ ts <- opt(p('[') ~> hseqParser(identifierParser, p(',')) <~ p(']'))
+ ps <- p('(') ~> hseqParser(bindingParser(explicitOnly=true), p(','), allowEmpty=true) <~ p(')')
+ ot <- opt(p(':') ~> typeParser)
+ _ <- kw("=")
+ b <- exprParser
+ } yield Function(i, ts.getOrElse(HSeq.fromSeq(Seq[Identifier]())), ps, ot, b))
+
+ lazy val adtDefinitionParser: PackratParser[Sort] = positioned({
+ val constructorParser: Parser[Constructor] = positioned((for {
+ i <- identifierParser
+ ps <- p('(') ~> hseqParser(bindingParser(explicitOnly=true), p(','), allowEmpty=true) <~ p(')')
+ } yield ConstructorValue(i, ps)) | holeParser.map(ConstructorHole(_)))
+
+ for {
+ _ <- kw("type")
+ i <- identifierParser
+ ts <- opt(p('[') ~> hseqParser(identifierParser, p(',')) <~ p(']'))
+ _ <- kw("=")
+ cs <- hseqParser(constructorParser, operator("|"))
+ } yield Sort(i, ts.getOrElse(HSeq.fromSeq(Seq[Identifier]())), cs)
+ })
+
+ def bindingParser(explicitOnly: Boolean=false): Parser[Binding] = {
+
+ def typeParserOf(id: Identifier): Parser[Type] = id match {
+ case IdentifierHole(_) => typeParser
+ case IdentifierName(name) => contextTypeParser(Some(name))
+ }
+
+ val explicitBinding = for {
+ i <- identifierParser
+ tpe <- p(':') ~> typeParserOf(i)
+ } yield ExplicitValDef(i, tpe)
+
+ val holeBinding = holeParser.map(BindingHole(_))
+
+ val implicitBinding = identifierParser.map(InferredValDef(_))
+
+ positioned({
+ if (explicitOnly) {
+ explicitBinding | holeBinding
+ }
+ else {
+ explicitBinding | holeBinding | implicitBinding
+ }
+ })
+ }.withError(expected("a binding"))
+
+ lazy val programParser: PackratParser[Program] =
+ positioned(rep1(adtDefinitionParser.map(Left(_)) | functionDefinitionParser.map(Right(_))).map(Program(_)))
+}
diff --git a/src/main/scala/inox/parser/Trees.scala b/src/main/scala/inox/parser/Trees.scala
new file mode 100644
index 000000000..07f3bb2c5
--- /dev/null
+++ b/src/main/scala/inox/parser/Trees.scala
@@ -0,0 +1,6 @@
+package inox
+package parser
+
+trait Trees {
+ protected val trees: inox.ast.Trees
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/Constraints.scala b/src/main/scala/inox/parser/elaboration/Constraints.scala
new file mode 100644
index 000000000..688a02393
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/Constraints.scala
@@ -0,0 +1,384 @@
+package inox
+package parser
+package elaboration
+
+import scala.util.parsing.input.Position
+
+trait Constraints { self: IRs with SimpleTypes with ElaborationErrors =>
+
+ import SimpleTypes._
+ import TypeClasses._
+ import Constraints._
+
+ sealed trait Constraint
+ object Constraints {
+ case class Exists(elem: Type) extends Constraint
+ case class Equals(left: Type, right: Type) extends Constraint
+ case class HasClass(elem: Type, typeClass: TypeClass) extends Constraint
+ case class OneOf(unknown: Unknown, tpe: Type, typeOptions: Seq[Type]) extends Constraint
+ }
+
+ object Constraint {
+ def exist(elem: Unknown): Constraint = Exists(elem)
+ def equal(left: Type, right: Type): Constraint = Equals(left, right)
+ def isNumeric(elem: Type): Constraint = HasClass(elem, Numeric)
+ def isIntegral(elem: Type): Constraint = HasClass(elem, Integral)
+ def isComparable(elem: Type): Constraint = HasClass(elem, Comparable)
+ def isBits(elem: Type, lower: Option[Int] = None, upper: Option[Int] = None, signed: Boolean = true) =
+ HasClass(elem, Bits(signed, (lower, upper) match {
+ case (None, None) => NoSpec
+ case (Some(l), None) => GreaterEquals(l)
+ case (None, Some(u)) => LessEquals(u)
+ case (Some(l), Some(u)) => Between(l, u).validate.getOrElse {
+ throw new IllegalArgumentException("Invalid bounds.")
+ }
+ }))
+ def atIndexIs(scrutinee: Type, index: Int, value: Type): Constraint =
+ HasClass(scrutinee, WithIndices(Map(index -> value)))
+ def hasFields(elem: Type, fields: Set[String], sorts: Seq[(inox.Identifier, Type => Seq[Constraint])]): Constraint = {
+ val mapping = sorts.foldLeft(Map[inox.Identifier, Type => Seq[Constraint]]()) {
+ case (acc, (id, f)) => acc.get(id) match {
+ case None => acc + (id -> f)
+ case Some(f2) => acc + (id -> { (t: Type) => f2(t) ++ f(t) })
+ }
+ }
+ HasClass(elem, WithFields(fields, mapping))
+ }
+ def oneOf(unknown: Unknown, tpe: Type, typeOptions: Seq[Type]): Constraint =
+ OneOf(unknown, tpe, typeOptions)
+ }
+
+ class Eventual[+A] private(private val fun: Unifier => A) {
+ def get(implicit unifier: Unifier): A = fun(unifier)
+
+ def map[B](f: A => B): Eventual[B] =
+ new Eventual(fun andThen f)
+
+ def flatMap[B](f: A => Eventual[B]): Eventual[B] =
+ new Eventual((u: Unifier) => f(fun(u)).fun(u))
+ }
+
+ object Eventual {
+ def pure[A](x: A): Eventual[A] = new Eventual((u: Unifier) => x)
+ def withUnifier[A](f: Unifier => A) = new Eventual(f)
+ def sequence[A](eventuals: Seq[Eventual[A]]): Eventual[Seq[A]] =
+ new Eventual((u: Unifier) => eventuals.map(_.get(u)))
+ def sequence[K, A](eventuals: Map[K, Eventual[A]]): Eventual[Map[K, A]] =
+ new Eventual((u: Unifier) => eventuals.mapValues(_.get(u)))
+ def unify[A](value: A)(implicit ev: Unifiable[A]): Eventual[A] =
+ ev.unify(value)
+ }
+
+ class Unifier private(mapping: Map[Unknown, Type]) {
+ def get(unknown: Unknown): Type = mapping.getOrElse(unknown, unknown)
+
+ def +(pair: (Unknown, Type)): Unifier =
+ new Unifier(Unifier(pair)(mapping) + pair)
+
+ def apply[A](value: A)(implicit unifiable: Unifiable[A]): A =
+ unifiable.unify(value).get(this)
+ }
+ object Unifier {
+ def apply(pair: (Unknown, Type)): Unifier = new Unifier(Map(pair))
+ def empty: Unifier = new Unifier(Map())
+ }
+
+ trait Unifiable[A] {
+ def unify(value: A): Eventual[A]
+ }
+
+ object Unifiable {
+ def apply[A](fun: A => Eventual[A]): Unifiable[A] = new Unifiable[A] {
+ override def unify(value: A): Eventual[A] = fun(value)
+ }
+ }
+
+ implicit lazy val simpleTypeUnifiable: Unifiable[Type] = Unifiable {
+ case u: Unknown => Eventual.withUnifier(_.get(u))
+ case FunctionType(froms, to) => for {
+ fs <- Eventual.unify(froms)
+ t <- Eventual.unify(to)
+ } yield FunctionType(fs, t)
+ case MapType(from, to) => for {
+ f <- Eventual.unify(from)
+ t <- Eventual.unify(to)
+ } yield MapType(f, t)
+ case SetType(elem) => for {
+ e <- Eventual.unify(elem)
+ } yield SetType(e)
+ case BagType(elem) => for {
+ e <- Eventual.unify(elem)
+ } yield BagType(e)
+ case TupleType(elems) => for {
+ es <- Eventual.unify(elems)
+ } yield TupleType(es)
+ case ADTType(identifier, args) => for {
+ as <- Eventual.unify(args)
+ } yield ADTType(identifier, as)
+ case tpe => Eventual.pure(tpe)
+ }
+
+ implicit lazy val constraintUnifiable: Unifiable[Constraint] = Unifiable {
+ case Constraints.Exists(elem) => for {
+ e <- Eventual.unify(elem)
+ } yield Constraints.Exists(e)
+ case Constraints.Equals(left, right) => for {
+ l <- Eventual.unify(left)
+ r <- Eventual.unify(right)
+ } yield Constraints.Equals(l, r)
+ case Constraints.HasClass(elem, typeClass) => for {
+ e <- Eventual.unify(elem)
+ t <- Eventual.unify(typeClass)
+ } yield Constraints.HasClass(e, t)
+ case OneOf(unknown, tpe, typeOptions) =>
+ for {
+ t <- Eventual.unify(tpe)
+ goal <- Eventual.sequence(typeOptions.map(Eventual.unify(_)))
+ } yield Constraints.OneOf(unknown, t, goal)
+ }
+
+
+ implicit lazy val typeClassUnifiable: Unifiable[TypeClass] = Unifiable {
+ case WithFields(fields, sorts) => for {
+ ss <- Eventual.sequence(sorts.mapValues { (function: Type => Seq[Constraint]) =>
+ Eventual.withUnifier { (unifier: Unifier) =>
+ function andThen (_.map(unifier(_)))
+ }
+ }.view.force)
+ } yield WithFields(fields, ss)
+ case WithIndices(indices) => for {
+ is <- Eventual.sequence(indices.mapValues(Eventual.unify(_)).view.force)
+ } yield WithIndices(is)
+ case x => Eventual.pure(x)
+ }
+
+
+ implicit def pairUnifiable[A: Unifiable, B: Unifiable]: Unifiable[(A, B)] = Unifiable {
+ a: (A, B) => for {
+ first <- Eventual.unify(a._1)
+ second <- Eventual.unify(a._2)
+ } yield (first, second)
+ }
+
+ implicit def seqUnifiable[A](implicit inner: Unifiable[A]): Unifiable[Seq[A]] = Unifiable { xs: Seq[A] =>
+ Eventual.sequence(xs.map(inner.unify(_)))
+ }
+
+ implicit def mapUnifiable[K, A](implicit inner: Unifiable[A]): Unifiable[Map[K, A]] = Unifiable { xs: Map[K, A] =>
+ Eventual.sequence(xs.mapValues(inner.unify(_)).view.force)
+ }
+
+ class Constrained[+A] private(val get: Either[ErrorMessage, (A, Seq[Constraint])]) {
+ def map[B](f: A => B): Constrained[B] =
+ new Constrained(get.right.map { case (v, cs) => (f(v), cs) })
+
+ def flatMap[B](f: A => Constrained[B]): Constrained[B] =
+ new Constrained(get.right.flatMap { case (v1, cs1) =>
+ val other = f(v1).get
+ other.right.map { case (v2, cs2) => (v2, cs1 ++ cs2) }
+ })
+
+ def addConstraint(constraint: Constraint): Constrained[A] =
+ new Constrained(get.right.map { case (v, cs) => (v, cs :+ constraint) })
+
+ def addConstraints(constraints: Seq[Constraint]): Constrained[A] =
+ constraints.foldLeft(this) { case (acc, c) => acc.addConstraint(c) }
+
+ def checkImmediate(condition: Boolean, error: => ErrorMessage): Constrained[A] =
+ if (condition) this else Constrained.fail(error)
+
+ def checkImmediate(condition: A => Boolean, error: => ErrorMessage): Constrained[A] =
+ flatMap { x =>
+ if (condition(x)) Constrained.pure(x) else Constrained.fail(error)
+ }
+
+ def checkImmediate(condition: Boolean, where: IR, error: Position => String): Constrained[A] =
+ if (condition) this else Constrained.fail(error(where.pos))
+
+ def checkImmediate(condition: A => Boolean, where: IR, error: A => Position => String): Constrained[A] =
+ flatMap { x =>
+ if (condition(x)) Constrained.pure(x) else Constrained.fail(error(x)(where.pos))
+ }
+
+ def withFilter(pred: A => Boolean): Constrained[A] = new Constrained(get match {
+ case Right((a, _)) if !pred(a) => Left(filterError)
+ case _ => get
+ })
+ }
+
+ object Constrained {
+ def apply(constraints: Constraint*): Constrained[Unit] = {
+ constraints.foldLeft(pure(())) {
+ case (acc, constraint) => acc.addConstraint(constraint)
+ }
+ }
+
+ def pure[A](x: A): Constrained[A] = {
+ new Constrained(Right((x, Seq())))
+ }
+ def fail(error: ErrorMessage): Constrained[Nothing] =
+ new Constrained(Left(error))
+
+ def sequence[A](constraineds: Seq[Constrained[A]]): Constrained[Seq[A]] = {
+ constraineds.foldLeft(Constrained.pure(Seq[A]())) {
+ case (acc, constrained) => for {
+ xs <- acc
+ x <- constrained
+ } yield xs :+ x
+ }
+ }
+
+ def attempt[A](opt: Option[A], error: => ErrorMessage): Constrained[A] = opt match {
+ case Some(x) => Constrained.pure(x)
+ case None => Constrained.fail(error)
+ }
+
+ def attempt[A](opt: Option[A], where: IR, error: Position => ErrorMessage): Constrained[A] = opt match {
+ case Some(x) => Constrained.pure(x)
+ case None => Constrained.fail(error(where.pos))
+ }
+
+ def checkImmediate(condition: Boolean, error: => ErrorMessage): Constrained[Unit] =
+ if (condition) Constrained.pure(()) else Constrained.fail(error)
+
+ def checkImmediate(condition: Boolean, where: IR, error: Position => String): Constrained[Unit] =
+ if (condition) Constrained.pure(()) else Constrained.fail(error(where.pos))
+ }
+
+ object TypeClasses {
+
+ sealed abstract class SizeSpec {
+ def combine(that: SizeSpec): Option[SizeSpec]
+
+ def accepts(value: Int) = this match {
+ case LessEquals(upper) => value <= upper
+ case GreaterEquals(lower) => value >= lower
+ case Between(lower, upper) => value >= lower && value <= upper
+ case NoSpec => true
+ }
+ }
+ case object NoSpec extends SizeSpec {
+ override def combine(that: SizeSpec): Option[SizeSpec] = Some(that)
+ }
+ case class LessEquals(value: Int) extends SizeSpec {
+ override def combine(that: SizeSpec): Option[SizeSpec] = that match {
+ case LessEquals(other) => Some(LessEquals(Math.min(value, other)))
+ case GreaterEquals(other) => Between(value, other).validate
+ case Between(low, high) => Between(low, Math.min(value, high)).validate
+ case NoSpec => Some(this)
+ }
+ }
+ case class GreaterEquals(value: Int) extends SizeSpec {
+ override def combine(that: SizeSpec): Option[SizeSpec] = that match {
+ case LessEquals(other) => Between(other, value).validate
+ case GreaterEquals(other) => Some(GreaterEquals(Math.max(value, other)))
+ case Between(low, high) => Between(Math.max(value, low), high).validate
+ case NoSpec => Some(this)
+ }
+ }
+ case class Between(low: Int, high: Int) extends SizeSpec {
+ def validate: Option[Between] = if (high >= low) Some(this) else None
+
+ override def combine(that: SizeSpec): Option[SizeSpec] = that match {
+ case LessEquals(other) => Between(low, Math.min(high, other)).validate
+ case GreaterEquals(other) => Between(Math.max(low, other), high).validate
+ case Between(otherLow, otherHigh) => Between(Math.max(low, otherLow), Math.min(high, otherHigh)).validate
+ case NoSpec => Some(this)
+ }
+ }
+
+ sealed abstract class TypeClass {
+
+ def combine(that: TypeClass)(tpe: Type): Option[Seq[Constraint]] = (this, that) match {
+ case (WithFields(fs1, s1), WithFields(fs2, s2)) => {
+ val intersect: Seq[inox.Identifier] = s1.keySet.intersect(s2.keySet).toSeq
+
+ val size = intersect.size
+ if (size == 0) {
+ None
+ }
+ if (size == 1) {
+ val id = intersect.head
+ Some(s1(id)(tpe) ++ s2(id)(tpe))
+ }
+ else {
+ Some(Seq(HasClass(tpe, WithFields(fs1 union fs2, intersect.map { id =>
+ id -> ((t: Type) => s1(id)(t) ++ s2(id)(t))
+ }.toMap))))
+ }
+ }
+ case (_, WithFields(_, _)) => None
+ case (WithFields(_, _), _) => None
+ case (WithIndices(is1), WithIndices(is2)) => {
+ val union: Seq[Int] = is1.keySet.union(is2.keySet).toSeq
+ val intersect: Seq[Int] = is1.keySet.intersect(is2.keySet).toSeq
+
+ val is3 = union.map { k =>
+ k -> is1.getOrElse(k, is2(k))
+ }.toMap
+
+ Some(HasClass(tpe, WithIndices(is3)) +: intersect.map { (i: Int) => Equals(is1(i), is2(i)) })
+ }
+ case (_, WithIndices(_)) => None
+ case (WithIndices(_), _) => None
+ case (Bits(signed1, _), Bits(signed2, _)) if signed1 != signed2 => None
+ case (Bits(signed, size1), Bits(_, size2)) => size1.combine(size2).map {
+ case Between(low, high) if low == high => Seq(Equals(tpe, BitVectorType(signed, low)))
+ case s3 => Seq(HasClass(tpe, Bits(signed, s3)))
+ }
+ case (b: Bits, _) => Some(Seq(HasClass(tpe, b)))
+ case (_, b: Bits) => Some(Seq(HasClass(tpe, b)))
+ case (Integral, _) => Some(Seq(HasClass(tpe, Integral)))
+ case (_, Integral) => Some(Seq(HasClass(tpe, Integral)))
+ case (Numeric, _) => Some(Seq(HasClass(tpe, Numeric)))
+ case (_, Numeric) => Some(Seq(HasClass(tpe, Numeric)))
+ case _ => Some(Seq(HasClass(tpe, Comparable)))
+ }
+
+ def accepts(tpe: Type): Option[Seq[Constraint]]
+ }
+
+ case class WithFields(fields: Set[String], sorts: Map[inox.Identifier, Type => Seq[Constraint]]) extends TypeClass {
+ override def accepts(tpe: Type) = tpe match {
+ case ADTType(id, _) => sorts.get(id).map{_.apply(tpe)}
+ case _ => None
+ }
+ }
+
+ case class WithIndices(indices: Map[Int, Type]) extends TypeClass {
+ override def accepts(tpe: Type) = tpe match {
+ case TupleType(es) => indices.toSeq.map { case (k, v) =>
+ if (es.size < k) None else Some(Equals(es(k - 1), v))
+ }.foldLeft(Option(Seq[Constraint]())) {
+ case (acc, constraint) => acc.flatMap { xs => constraint.map { x => xs :+ x }}
+ }
+ case _ => None
+ }
+ }
+
+ case object Comparable extends TypeClass {
+ override def accepts(tpe: Type) = tpe match {
+ case CharType() => Some(Seq())
+ case _ => Numeric.accepts(tpe)
+ }
+ }
+ case object Numeric extends TypeClass {
+ override def accepts(tpe: Type) = tpe match {
+ case RealType() => Some(Seq())
+ case _ => Integral.accepts(tpe)
+ }
+ }
+ case object Integral extends TypeClass {
+ override def accepts(tpe: Type) = tpe match {
+ case IntegerType() => Some(Seq())
+ case _ => Bits(true, NoSpec).accepts(tpe).orElse(Bits(false, NoSpec).accepts(tpe))
+ }
+ }
+ case class Bits(signed: Boolean, size: SizeSpec) extends TypeClass {
+ override def accepts(tpe: Type) = tpe match {
+ case BitVectorType(`signed`, value) => if (size.accepts(value)) Some(Seq()) else None
+ case _ => None
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/SimpleADTs.scala b/src/main/scala/inox/parser/elaboration/SimpleADTs.scala
new file mode 100644
index 000000000..6d00da819
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/SimpleADTs.scala
@@ -0,0 +1,46 @@
+package inox
+package parser
+package elaboration
+
+trait SimpleADTs { self: SimpleBindings with SimpleTypes with Trees with Constraints =>
+
+ object SimpleADTs {
+
+ case class Sort(
+ id: inox.Identifier,
+ optName: Option[String],
+ typeParams: Seq[SimpleBindings.TypeBinding],
+ constructors: Seq[Constructor])
+
+ def fromInox(s: trees.ADTSort): Option[Sort] =
+ try {
+ Some(Sort(
+ s.id,
+ Some(s.id.name),
+ s.tparams.map(SimpleBindings.fromInox(_)),
+ s.constructors.map(fromInox(_).get)))
+ }
+ catch {
+ case _: Exception => None
+ }
+
+
+ case class Constructor(
+ id: inox.Identifier,
+ optName: Option[String],
+ params: Seq[SimpleBindings.Binding],
+ sort: inox.Identifier)
+
+ def fromInox(c: trees.ADTConstructor): Option[Constructor] =
+ try {
+ Some(Constructor(
+ c.id,
+ Some(c.id.name),
+ c.fields.map(SimpleBindings.fromInox(_).get),
+ c.sort))
+ }
+ catch {
+ case _: Exception => None
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/SimpleBindings.scala b/src/main/scala/inox/parser/elaboration/SimpleBindings.scala
new file mode 100644
index 000000000..722db2e96
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/SimpleBindings.scala
@@ -0,0 +1,23 @@
+package inox
+package parser
+package elaboration
+
+trait SimpleBindings { self: SimpleTypes with Trees with Constraints =>
+
+ object SimpleBindings {
+ case class TypeBinding(id: Identifier, tpe: SimpleTypes.Type, evTpe: Eventual[trees.Type], name: Option[String])
+
+ def fromInox(tp: trees.TypeParameterDef): TypeBinding =
+ TypeBinding(tp.id, SimpleTypes.TypeParameter(tp.id), Eventual.pure(tp.tp), None)
+
+ case class Binding(id: inox.Identifier, tpe: SimpleTypes.Type, evValDef: Eventual[trees.ValDef], name: Option[String]) {
+ val evTpe = evValDef.map(_.tpe)
+
+ def forgetName: Binding = copy(name=None)
+ }
+
+ def fromInox(vd: trees.ValDef): Option[Binding] = SimpleTypes.fromInox(vd.tpe).map { st =>
+ Binding(vd.id, st, Eventual.pure(vd), Some(vd.id.name))
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/SimpleFunctions.scala b/src/main/scala/inox/parser/elaboration/SimpleFunctions.scala
new file mode 100644
index 000000000..dc5a890d6
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/SimpleFunctions.scala
@@ -0,0 +1,31 @@
+package inox
+package parser
+package elaboration
+
+trait SimpleFunctions { self: SimpleBindings with SimpleTypes with Trees with Constraints =>
+
+ object SimpleFunctions {
+ case class Function(
+ id: inox.Identifier,
+ optName: Option[String],
+ typeParams: Seq[SimpleBindings.TypeBinding],
+ params: Seq[SimpleBindings.Binding],
+ retTpe: SimpleTypes.Type,
+ evRetTpe: Eventual[trees.Type])
+
+
+ def fromInox(fd: trees.FunDef): Option[Function] =
+ try {
+ Some(Function(
+ fd.id,
+ Some(fd.id.name),
+ fd.tparams.map(SimpleBindings.fromInox(_)),
+ fd.params.map(SimpleBindings.fromInox(_).get),
+ SimpleTypes.fromInox(fd.returnType).get,
+ Eventual.pure(fd.returnType)))
+ }
+ catch {
+ case _: Exception => None
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/SimpleTypes.scala b/src/main/scala/inox/parser/elaboration/SimpleTypes.scala
new file mode 100644
index 000000000..34073bac4
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/SimpleTypes.scala
@@ -0,0 +1,149 @@
+package inox
+package parser
+package elaboration
+
+import scala.util.parsing.input._
+
+trait SimpleTypes { self: Trees =>
+
+ object SimpleTypes {
+
+ sealed abstract class Type extends Positional {
+
+ def withPos(pos: Position): Type = this match {
+ case UnitType() => UnitType().setPos(pos)
+ case BooleanType() => BooleanType().setPos(pos)
+ case BitVectorType(signed, size) => BitVectorType(signed, size).setPos(pos)
+ case IntegerType() => IntegerType().setPos(pos)
+ case StringType() => StringType().setPos(pos)
+ case CharType() => CharType().setPos(pos)
+ case RealType() => RealType().setPos(pos)
+ case FunctionType(f, t) => FunctionType(f, t).setPos(pos)
+ case SetType(t) => SetType(t).setPos(pos)
+ case BagType(t) => BagType(t).setPos(pos)
+ case MapType(f, t) => MapType(f, t).setPos(pos)
+ case TupleType(ts) => TupleType(ts).setPos(pos)
+ case ADTType(i, as) => ADTType(i, as).setPos(pos)
+ case TypeParameter(id) => TypeParameter(id).setPos(pos)
+ case u: Unknown => u.copy().setPos(pos)
+ case _ => this
+ }
+
+ def contains(unknown: Unknown): Boolean = this match {
+ case other: Unknown => unknown == other
+ case FunctionType(froms, to) => froms.exists(_.contains(unknown)) || to.contains(unknown)
+ case MapType(from, to) => from.contains(unknown) || to.contains(unknown)
+ case SetType(elem) => elem.contains(unknown)
+ case BagType(elem) => elem.contains(unknown)
+ case TupleType(elems) => elems.exists(_.contains(unknown))
+ case ADTType(_, args) => args.exists(_.contains(unknown))
+ case _ => false
+ }
+
+ def unknowns: Set[Unknown] = this match {
+ case unknown: Unknown => Set(unknown)
+ case FunctionType(froms, to) => froms.map(_.unknowns).fold(to.unknowns)(_ union _)
+ case MapType(from, to) => from.unknowns union to.unknowns
+ case SetType(elem) => elem.unknowns
+ case BagType(elem) => elem.unknowns
+ case TupleType(elems) => elems.map(_.unknowns).fold(Set[Unknown]())(_ union _)
+ case ADTType(_, args) => args.map(_.unknowns).fold(Set[Unknown]())(_ union _)
+ case _ => Set()
+ }
+
+ def replaceTypeParams(mapping: Map[inox.Identifier, Type]): Type = this match {
+ case TypeParameter(id) => mapping.getOrElse(id, this)
+ case FunctionType(froms, to) => FunctionType(froms.map(_.replaceTypeParams(mapping)), to.replaceTypeParams(mapping))
+ case MapType(from, to) => MapType(from.replaceTypeParams(mapping), to.replaceTypeParams(mapping))
+ case SetType(elem) => SetType(elem.replaceTypeParams(mapping))
+ case BagType(elem) => BagType(elem.replaceTypeParams(mapping))
+ case TupleType(elems) => TupleType(elems.map(_.replaceTypeParams(mapping)))
+ case ADTType(id, elems) => ADTType(id, elems.map(_.replaceTypeParams(mapping)))
+ case _ => this
+ }
+ }
+ case class UnitType() extends Type
+ case class BooleanType() extends Type
+ case class BitVectorType(signed: Boolean, size: Int) extends Type
+ case class IntegerType() extends Type
+ case class StringType() extends Type
+ case class CharType() extends Type
+ case class RealType() extends Type
+ case class FunctionType(froms: Seq[Type], to: Type) extends Type
+ case class MapType(from: Type, to: Type) extends Type
+ case class SetType(elem: Type) extends Type
+ case class BagType(elem: Type) extends Type
+ case class TupleType(elems: Seq[Type]) extends Type
+ case class ADTType(identifier: inox.Identifier, args: Seq[Type]) extends Type
+ case class TypeParameter(identifier: inox.Identifier) extends Type
+
+ final class Unknown private(private val identifier: Int) extends Type {
+ override def equals(that: Any): Boolean =
+ that.isInstanceOf[Unknown] && that.asInstanceOf[Unknown].identifier == identifier
+ override def hashCode(): Int = identifier
+ override def toString: String = "Unknown(" + identifier + ")"
+
+ def copy(): Unknown = new Unknown(identifier)
+ }
+
+ object Unknown {
+ private var next: Int = 0
+
+ def fresh: Unknown = synchronized {
+ val ret = next
+ next += 1
+ new Unknown(ret)
+ }
+ }
+
+ def fromInox(tpe: trees.Type): Option[Type] = tpe match {
+ case trees.Untyped => None
+ case trees.BooleanType() => Some(BooleanType())
+ case trees.UnitType() => Some(UnitType())
+ case trees.CharType() => Some(CharType())
+ case trees.IntegerType() => Some(IntegerType())
+ case trees.RealType() => Some(RealType())
+ case trees.StringType() => Some(StringType())
+ case trees.BVType(signed, size) => Some(BitVectorType(signed, size))
+ case trees.TypeParameter(id, _) => Some(TypeParameter(id))
+ case trees.TupleType(ts) => ts.foldLeft(Option(Seq[Type]())) {
+ case (acc, t) => acc.flatMap(xs => fromInox(t).map(x => xs :+ x))
+ }.map(TupleType(_))
+ case trees.SetType(t) => fromInox(t).map(SetType(_))
+ case trees.BagType(t) => fromInox(t).map(BagType(_))
+ case trees.MapType(f, t) => fromInox(f).flatMap(sf => fromInox(t).map(st => MapType(sf, st)))
+ case trees.FunctionType(fs, t) => fs.foldLeft(Option(Seq[Type]())) {
+ case (acc, f) => acc.flatMap(xs => fromInox(f).map(x => xs :+ x))
+ }.flatMap(sfs => fromInox(t).map(st => FunctionType(sfs, st)))
+ case trees.ADTType(id, args) => args.foldLeft(Option(Seq[Type]())) {
+ case (acc, f) => acc.flatMap(xs => fromInox(f).map(x => xs :+ x))
+ }.map(ADTType(id, _))
+ case trees.PiType(vds, t) => vds.foldLeft(Option(Seq[Type]())) {
+ case (acc, vd) => acc.flatMap(xs => fromInox(vd.tpe).map(x => xs :+ x))
+ }.flatMap(sfs => fromInox(t).map(st => FunctionType(sfs, st)))
+ case trees.SigmaType(vds, t) => vds.foldLeft(Option(Seq[Type]())) {
+ case (acc, vd) => acc.flatMap(xs => fromInox(vd.tpe).map(x => xs :+ x))
+ }.flatMap(sfs => fromInox(t).map(st => TupleType(sfs :+ st)))
+ case trees.RefinementType(vd, _) => fromInox(vd.tpe)
+ case _ => None
+ }
+
+ def toInox(tpe: Type): trees.Type = tpe match {
+ case u: Unknown => throw new IllegalArgumentException("Unexpected Unknown.")
+ case UnitType() => trees.UnitType()
+ case BooleanType() => trees.BooleanType()
+ case BitVectorType(signed, size) => trees.BVType(signed, size)
+ case IntegerType() => trees.IntegerType()
+ case StringType() => trees.StringType()
+ case CharType() => trees.CharType()
+ case RealType() => trees.RealType()
+ case FunctionType(froms, to) => trees.FunctionType(froms.map(toInox), toInox(to))
+ case MapType(from, to) => trees.MapType(toInox(from), toInox(to))
+ case SetType(elem) => trees.SetType(toInox(elem))
+ case BagType(elem) => trees.BagType(toInox(elem))
+ case TupleType(elems) => trees.TupleType(elems.map(toInox))
+ case ADTType(id, args) => trees.ADTType(id, args.map(toInox))
+ case TypeParameter(id) => trees.TypeParameter(id, Seq())
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/Solvers.scala b/src/main/scala/inox/parser/elaboration/Solvers.scala
new file mode 100644
index 000000000..d51c0377f
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/Solvers.scala
@@ -0,0 +1,286 @@
+package inox.parser.elaboration
+
+import inox.parser.{ElaborationErrors, IRs}
+import scala.util.parsing.input.Position
+
+trait Solvers {
+ self: Constraints with SimpleTypes with IRs with ElaborationErrors =>
+
+ import SimpleTypes._
+ import TypeClasses._
+ import Constraints.{HasClass, Exists, Equals, OneOf}
+
+ /**
+ * Checks if two types are compatible in the type option sense
+ *
+ * @param ltpe left hand side type
+ * @param rtpe right hand side type
+ * @return
+ */
+ private def isCompatible(ltpe: SimpleTypes.Type, rtpe: SimpleTypes.Type): Boolean = (ltpe, rtpe) match {
+ case (_: Unknown, _) => true
+ case (_, _: Unknown) => true
+ case (UnitType(), UnitType()) => true
+ case (IntegerType(), IntegerType()) => true
+ case (BitVectorType(signed1, size1), BitVectorType(signed2, size2)) if signed1 == signed2 && size1 == size2 => true
+ case (BooleanType(), BooleanType()) => true
+ case (StringType(), StringType()) => true
+ case (CharType(), CharType()) => true
+ case (RealType(), RealType()) => true
+ case (FunctionType(fs1, t1), FunctionType(fs2, t2)) if fs1.size == fs2.size =>
+ fs1.zip(fs2).forall(a => isCompatible(a._1, a._2)) && isCompatible(t1, t2)
+ case (TupleType(es1), TupleType(es2)) if es1.size == es2.size =>
+ es1.zip(es2).forall(a => isCompatible(a._1, a._2))
+ case (MapType(f1, t1), MapType(f2, t2)) => isCompatible(f1, f2) && isCompatible(t1, t2)
+ case (SetType(e1), SetType(e2)) => isCompatible(e1, e2)
+ case (BagType(e1), BagType(e2)) => isCompatible(e1, e2)
+ case (ADTType(i1, as1), ADTType(i2, as2)) if i1 == i2 && as1.size == as2.size =>
+ as1.zip(as2).forall(a => isCompatible(a._1, a._2))
+ case (TypeParameter(i1), TypeParameter(i2)) if i1 == i2 => true
+ case _ => false
+ }
+
+
+ def noUnknown(tpe: SimpleTypes.Type): Boolean = tpe match {
+ case _: Unknown =>
+ false
+ case FunctionType(fs1, t1) =>
+ fs1.forall(a => noUnknown(a)) && noUnknown(t1)
+ case TupleType(es1) =>
+ es1.forall(noUnknown)
+ case MapType(f1, t1) => noUnknown(f1) && noUnknown(t1)
+ case SetType(e1) => noUnknown(e1)
+ case BagType(e1) => noUnknown(e1)
+ case ADTType(_, as1) =>
+ as1.forall(noUnknown)
+ case _ => true
+ }
+
+ def solve(constraints: Seq[Constraint]): Either[ErrorMessage, Unifier] = {
+
+ case class UnificationError(message: Seq[Position] => ErrorMessage, positions: Seq[Position]) extends Exception(message(positions))
+
+ var unknowns: Set[Unknown] = Set()
+ var remaining: Seq[Constraint] = constraints
+ var typeClasses: Map[Unknown, TypeClass] = Map()
+ var unifier: Unifier = Unifier.empty
+ // unknown to its' option sequence map, if a mapping exists sequence size is greater than 1
+ var typeOptionsMap: Map[SimpleTypes.Unknown, (SimpleTypes.Type, Seq[SimpleTypes.Type])] = Map()
+
+ def unify(unknown: Unknown, value: Type) {
+
+ typeClasses.get(unknown).foreach { tc =>
+ remaining :+= HasClass(value, tc)
+ }
+ typeClasses -= unknown
+
+ typeOptionsMap.get(unknown).foreach { options =>
+ simplifyTypeOptions(unknown, value, None, Some(options))
+ }
+
+
+ val singleton = Unifier(unknown -> value)
+
+ typeClasses = singleton(typeClasses)
+ remaining = singleton(remaining)
+ typeOptionsMap = singleton(typeOptionsMap)
+
+ unknowns -= unknown
+
+ unifier += (unknown -> value)
+ }
+
+
+ /**
+ * Tries to simplify the currently processed OneOf as much as possible, possibly using the fact that there allready
+ * exists a mapping for a type
+ * @param unknown type for which we have a OneOf constraint
+ * @param value new template which the unknown should have
+ * @param newOptions type options of the new OneOF
+ * @param existing mapping inside the typeOptionsMap
+ */
+ def simplifyTypeOptions(unknown: SimpleTypes.Unknown, value: SimpleTypes.Type, newOptions: Option[Seq[SimpleTypes.Type]],
+ existing: Option[(SimpleTypes.Type, Seq[SimpleTypes.Type])]): Unit = {
+ var mappings: Map[Unknown, Seq[SimpleTypes.Type]] = Map()
+
+ def collectOptions(tpe: Type, typeOption: Type): Unit = (tpe, typeOption) match {
+ case (u1: Unknown, u2: Unknown) =>
+ mappings += (u1 -> (mappings.getOrElse(u1, Seq.empty) :+ u2))
+ mappings += (u2 -> (mappings.getOrElse(u2, Seq.empty) :+ u1))
+ case (u: Unknown, _) => mappings += (u -> (mappings.getOrElse(u, Seq.empty) :+ typeOption))
+ case (_, u: Unknown) => mappings += (u -> (mappings.getOrElse(u, Seq.empty) :+ tpe))
+ case (UnitType(), UnitType()) => ()
+ case (IntegerType(), IntegerType()) => ()
+ case (StringType(), StringType()) => ()
+ case (RealType(), RealType()) => ()
+ case (BooleanType(), BooleanType()) => ()
+ case (BitVectorType(signed1, size1), BitVectorType(signed2, size2)) if signed1 == signed2 && size1 == size2 => ()
+ case (CharType(), CharType()) => ()
+ case (FunctionType(fs1, t1), FunctionType(fs2, t2)) if fs1.size == fs2.size =>
+ fs1.zip(fs2).foreach(pair => collectOptions(pair._1, pair._2))
+ collectOptions(t1, t2)
+ case (TupleType(es1), TupleType(es2)) =>
+ es1.zip(es2).foreach(pair => collectOptions(pair._1, pair._2))
+ case (MapType(f1, t1), MapType(f2, t2)) =>
+ collectOptions(f1, f2)
+ collectOptions(t1, t2)
+ case (SetType(t1), SetType(t2)) =>
+ collectOptions(t1, t2)
+ case (BagType(t1), BagType(t2)) =>
+ collectOptions(t1, t2)
+ case (ADTType(i1, as1), ADTType(i2, as2)) if i1 == i2 && as1.size == as2.size =>
+ as1.zip(as2).foreach { pair => collectOptions(pair._1, pair._2) }
+ case (TypeParameter(i1), TypeParameter(i2)) if i1 == i2 => ()
+ case _ => throw new Exception("Two types are not compatible!!! Should never happen")
+ }
+
+
+ val possibleOptions = existing match {
+ case None =>
+ val allOptions = newOptions.getOrElse(Seq.empty)
+ .filter(a => isCompatible(value, a))
+
+ allOptions.distinct
+ case Some((tpe, options)) =>
+ if (!isCompatible(value, tpe))
+ unificationImpossible(tpe, value)
+ val allOptions = newOptions.getOrElse(Seq.empty)
+ .filter(a => isCompatible(value, a) && isCompatible(tpe, a)) ++
+ options.filter(a => isCompatible(value, a) && isCompatible(tpe, a))
+
+ allOptions.distinct
+ }
+
+
+ if (possibleOptions.isEmpty)
+ throw UnificationError(unificationImpossible(unknown, value), Seq(unknown.pos))
+ else if (possibleOptions.size == 1) {
+ remaining :+= Constraint.equal(value, possibleOptions.head)
+ remaining :+= Constraint.equal(unknown, value)
+ // remaining :+= Constraint.equal(unknown, value)
+ typeOptionsMap = typeOptionsMap - unknown
+ } else {
+ typeOptionsMap = typeOptionsMap - unknown
+ typeOptionsMap = typeOptionsMap + (unknown -> (value, possibleOptions))
+ }
+
+ }
+
+ def handle(constraint: Constraint): Unit = constraint match {
+ case Exists(tpe) => tpe match {
+ case u: Unknown => unknowns += u
+ case _ => remaining ++= tpe.unknowns.map(Exists(_))
+ }
+ case Equals(tpe1, tpe2) => (tpe1, tpe2) match {
+ case (u1: Unknown, u2: Unknown) => if (u1 != u2) unify(u1, u2) else unknowns += u1
+ case (u1: Unknown, _) => if (!tpe2.contains(u1)) unify(u1, tpe2) else throw UnificationError(unificationImpossible(tpe1, tpe2), Seq(tpe1.pos, tpe2.pos))
+ case (_, u2: Unknown) => if (!tpe1.contains(u2)) unify(u2, tpe1) else throw UnificationError(unificationImpossible(tpe1, tpe2), Seq(tpe1.pos, tpe2.pos))
+ case (UnitType(), UnitType()) => ()
+ case (IntegerType(), IntegerType()) => ()
+ case (BitVectorType(signed1, size1), BitVectorType(signed2, size2)) if signed1 == signed2 && size1 == size2 => ()
+ case (BooleanType(), BooleanType()) => ()
+ case (StringType(), StringType()) => ()
+ case (CharType(), CharType()) => ()
+ case (RealType(), RealType()) => ()
+ case (FunctionType(fs1, t1), FunctionType(fs2, t2)) if fs1.size == fs2.size => {
+ remaining ++= fs1.zip(fs2).map { case (f1, f2) => Equals(f1, f2) }
+ remaining :+= Equals(t1, t2)
+ }
+ case (TupleType(es1), TupleType(es2)) if es1.size == es2.size =>
+ remaining ++= es1.zip(es2).map { case (e1, e2) => Equals(e1, e2) }
+ case (MapType(f1, t1), MapType(f2, t2)) => {
+ remaining :+= Equals(f1, f2)
+ remaining :+= Equals(t1, t2)
+ }
+ case (SetType(e1), SetType(e2)) => remaining :+= Equals(e1, e2)
+ case (BagType(e1), BagType(e2)) => remaining :+= Equals(e1, e2)
+ case (ADTType(i1, as1), ADTType(i2, as2)) if i1 == i2 && as1.size == as2.size =>
+ remaining ++= as1.zip(as2).map { case (a1, a2) => Equals(a1, a2) }
+ case (TypeParameter(i1), TypeParameter(i2)) if i1 == i2 => ()
+ case _ => throw UnificationError(unificationImpossible(tpe1, tpe2), Seq(tpe1.pos, tpe2.pos))
+ }
+ case HasClass(tpe, tc) => tpe match {
+ case u: Unknown => {
+ unknowns += u
+ typeClasses.get(u) match {
+ case None => typeClasses += (u -> tc)
+ case Some(tc2) => tc.combine(tc2)(tpe) match {
+ case None => throw UnificationError(incompatibleTypeClasses(tc, tc2), Seq(tpe.pos))
+ case Some(cs) => {
+ typeClasses -= u
+ remaining ++= cs
+ }
+ }
+ }
+ }
+ case _ => tc.accepts(tpe) match {
+ case None => throw UnificationError(notMemberOfTypeClasses(tpe, tc), Seq(tpe.pos))
+ case Some(cs) => remaining ++= cs
+ }
+ }
+
+ case OneOf(unknown, tpe, typeOptions) => tpe match {
+ case _: Unknown if typeOptions.size > 1 =>
+ typeOptionsMap += (unknown -> (tpe, typeOptions))
+ case _: Unknown =>
+ remaining :+= Equals(unknown, tpe)
+ remaining :+= Equals(unknown, typeOptions.head)
+ case _ =>
+ simplifyTypeOptions(unknown, tpe, None, Some((tpe, typeOptions)))
+ }
+ }
+
+
+ try {
+ while (unknowns.nonEmpty || remaining.nonEmpty) {
+
+ while (remaining.nonEmpty) {
+ val constraint = remaining.head
+ remaining = remaining.tail
+
+ handle(constraint)
+ }
+
+ if (typeOptionsMap.nonEmpty) {
+ typeOptionsMap.foreach {
+ case (key: SimpleTypes.Unknown, (tpe: SimpleTypes.Type, options: Seq[SimpleTypes.Type])) =>
+ val opt = options.filter(isCompatible(tpe, _))
+ typeOptionsMap = typeOptionsMap - key
+ if (opt.isEmpty) {
+ throw UnificationError(unificationImpossible(key, tpe), Seq(key.pos, tpe.pos))
+ } else if (opt.size == 1) {
+ remaining :+= Equals(tpe, opt.head)
+ remaining :+= Equals(key, tpe)
+ } else {
+ typeOptionsMap = typeOptionsMap.updated(key, (tpe, opt))
+ }
+ }
+ }
+
+ if (remaining.isEmpty && unknowns.nonEmpty) {
+ val defaults = typeClasses.collect {
+ case (u, Integral) => u -> IntegerType()
+ case (u, Numeric) => u -> IntegerType()
+ }.toSeq
+
+ defaults.foreach {
+ case (u, t) => remaining :+= Equals(u, t)
+ }
+
+ if (defaults.isEmpty) {
+ throw UnificationError(ambiguousTypes, unknowns.toSeq.map(_.pos))
+ }
+ }
+ }
+
+ if (typeOptionsMap.nonEmpty) {
+ throw UnificationError(ambiguousTypes, typeOptionsMap.keySet.toSeq.map(_.pos))
+ }
+
+ Right(unifier)
+ }
+ catch {
+ case UnificationError(error, positions) => Left(error(positions))
+ }
+ }
+}
diff --git a/src/main/scala/inox/parser/elaboration/elaborators/ADTsElaborators.scala b/src/main/scala/inox/parser/elaboration/elaborators/ADTsElaborators.scala
new file mode 100644
index 000000000..d9f9fa976
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/elaborators/ADTsElaborators.scala
@@ -0,0 +1,54 @@
+package inox
+package parser
+package elaboration
+package elaborators
+
+trait ADTsElaborators { self: Elaborators =>
+
+ import ADTs._
+
+ class EmptySortE extends Elaborator[Sort, SimpleADTs.Sort] {
+ override def elaborate(sort: Sort)(implicit store: Store): Constrained[SimpleADTs.Sort] = for {
+ (i, optName) <- DefIdE.elaborate(sort.identifier)
+ typeBindings <- DefIdSeqE.elaborate(sort.typeParams).map(_.map({
+ case (varId, optVarName) => SimpleBindings.TypeBinding(
+ varId, SimpleTypes.TypeParameter(varId), Eventual.pure(trees.TypeParameter(varId, Seq())), optVarName)
+ }))
+ } yield SimpleADTs.Sort(i, optName, typeBindings, Seq())
+ }
+ val EmptySortE = new EmptySortE
+
+ class SortE extends Elaborator[Sort, (SimpleADTs.Sort, Eventual[trees.ADTSort])] {
+ override def elaborate(sort: Sort)(implicit store: Store): Constrained[(SimpleADTs.Sort, Eventual[trees.ADTSort])] = for {
+ s <- EmptySortE.elaborate(sort)
+ (scs, ecs) <- new ConstructorSeqE(s.id).elaborate(sort.constructors)({
+ store
+ .addSort(s)
+ .addTypeBindings(s.typeParams)
+ }).map(_.unzip)
+ fieldNames = scs.flatMap(_.params.flatMap(_.name))
+ _ <- Constrained.checkImmediate(fieldNames.toSet.size == fieldNames.size, sort, fieldsNotDistincts)
+ } yield (s.copy(constructors=scs), Eventual.withUnifier { implicit unifier =>
+ new trees.ADTSort(s.id, s.typeParams.map(tb => trees.TypeParameterDef(tb.id, Seq())), ecs.map(_.get), Seq()) })
+ }
+ val SortE = new SortE
+
+ class ConstructorE(sortId: inox.Identifier) extends Elaborator[Constructor, (SimpleADTs.Constructor, Eventual[trees.ADTConstructor])] {
+ override def elaborate(constructor: Constructor)(implicit store: Store): Constrained[(SimpleADTs.Constructor, Eventual[trees.ADTConstructor])] = constructor match {
+ case ConstructorValue(identifier, parameters) => for {
+ (id, optName) <- DefIdE.elaborate(identifier)
+ params <- BindingSeqE.elaborate(parameters)
+ } yield (SimpleADTs.Constructor(id, optName, params, sortId), Eventual.withUnifier { implicit unifier =>
+ new trees.ADTConstructor(id, sortId, params.map(_.evValDef.get)) })
+ case ConstructorHole(index) => Constrained.fail(unsupportedHoleTypeForElaboration("ADTConstructor")(constructor.pos))
+ }
+ }
+
+ class ConstructorSeqE(sortId: inox.Identifier) extends HSeqE[Constructor, trees.ADTConstructor, (SimpleADTs.Constructor, Eventual[trees.ADTConstructor])]("ADTConstructor") {
+
+ override val elaborator = new ConstructorE(sortId)
+
+ def wrap(c: trees.ADTConstructor, where: IR)(implicit store: Store): Constrained[(SimpleADTs.Constructor, Eventual[trees.ADTConstructor])] =
+ Constrained.fail(unsupportedHoleTypeForElaboration("ADTConstructor")(where.pos))
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/elaborators/BindingElaborators.scala b/src/main/scala/inox/parser/elaboration/elaborators/BindingElaborators.scala
new file mode 100644
index 000000000..56ab76947
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/elaborators/BindingElaborators.scala
@@ -0,0 +1,72 @@
+package inox
+package parser
+package elaboration
+package elaborators
+
+trait BindingElaborators { self: Elaborators =>
+
+ import Bindings._
+
+ class BindingE extends Elaborator[Binding, SimpleBindings.Binding] {
+
+ override def elaborate(template: Binding)(implicit store: Store): Constrained[SimpleBindings.Binding] = template match {
+ case BindingHole(index) => store.getHole[trees.ValDef](index) match {
+ case None => Constrained.fail(invalidHoleType("ValDef")(template.pos))
+ case Some(vd) => Constrained.attempt(SimpleBindings.fromInox(vd).map(_.forgetName), template, invalidInoxValDef(vd))
+ }
+ case ExplicitValDef(id, tpe) => for {
+ (i, on) <- DefIdE.elaborate(id)
+ (st, et) <- TypeE.elaborate(tpe)
+ } yield SimpleBindings.Binding(i, st, Eventual.withUnifier { implicit unifier =>
+ trees.ValDef(i, et.get)
+ }, on)
+ case InferredValDef(id) => {
+ val u = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ for {
+ (i, on) <- DefIdE.elaborate(id).addConstraint(Constraint.exist(u))
+ } yield {
+
+ val vd = Eventual.withUnifier { unifier =>
+ trees.ValDef(i, SimpleTypes.toInox(unifier.get(u)))
+ }
+ val sb = SimpleBindings.Binding(i, u, vd, on)
+
+ sb
+ }
+ }
+ }
+ }
+ val BindingE = new BindingE
+
+ class BindingSeqE extends HSeqE[Binding, trees.ValDef, SimpleBindings.Binding]("ValDef") {
+
+ override val elaborator = BindingE
+ override def wrap(vd: trees.ValDef, where: IR)(implicit store: Store): Constrained[SimpleBindings.Binding] =
+ Constrained.attempt(SimpleBindings.fromInox(vd).map(_.forgetName), where, invalidInoxValDef(vd))
+
+ override def elaborate(template: HSeq[Binding])(implicit store: Store): Constrained[Seq[SimpleBindings.Binding]] = {
+ val elems = template.elems
+
+ val sequence = elems.foldRight((store: Store) => Constrained.pure(Seq[SimpleBindings.Binding]())) {
+ case (x, f) => (store: Store) => x match {
+ case Left(r) => {
+ val c: Constrained[Seq[SimpleBindings.Binding]] = store.getHole[Seq[trees.ValDef]](r.index) match {
+ case None => Constrained.fail(invalidHoleType("Seq[ValDef]")(r.pos))
+ case Some(xs) => Constrained.sequence(xs.map(wrap(_, r)(store)))
+ }
+
+ c.flatMap { (as: Seq[SimpleBindings.Binding]) =>
+ f(store).map((bs: Seq[SimpleBindings.Binding]) => as ++ bs)
+ }
+ }
+ case Right(t) => elaborator.elaborate(t)(store).flatMap { (b: SimpleBindings.Binding) =>
+ f(store.addBinding(b)).map((bs: Seq[SimpleBindings.Binding]) => b +: bs)
+ }
+ }
+ }
+
+ sequence(store)
+ }
+ }
+ val BindingSeqE = new BindingSeqE
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/elaborators/ExprElaborators.scala b/src/main/scala/inox/parser/elaboration/elaborators/ExprElaborators.scala
new file mode 100644
index 000000000..11a8d9979
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/elaborators/ExprElaborators.scala
@@ -0,0 +1,600 @@
+package inox
+package parser
+package elaboration
+package elaborators
+
+trait ExprElaborators { self: Elaborators =>
+
+ import Exprs._
+
+ class ExprE extends Elaborator[Expr, (SimpleTypes.Type, Eventual[trees.Expr])] {
+ override def elaborate(template: Expr)(implicit store: Store): Constrained[(SimpleTypes.Type, Eventual[trees.Expr])] = template match {
+ case ExprHole(index) => Constrained.attempt(store.getHole[trees.Expr](index), template, invalidHoleType("Expr")).flatMap { expr =>
+ Constrained.attempt(SimpleTypes.fromInox(expr.getType(store.getSymbols)).map(_.setPos(template.pos)), template, invalidInoxExpr(expr)).map { st =>
+ (st, Eventual.pure(expr))
+ }
+ }
+ case Variable(id) => for {
+ i <- ExprUseIdE.elaborate(id)
+ (st, et) <- Constrained.attempt(store.getVariable(i), template, functionUsedAsVariable(i.name))
+ } yield (st.withPos(template.pos), et.map(trees.Variable(i, _, Seq())))
+ case UnitLiteral() =>
+ Constrained.pure((SimpleTypes.UnitType().setPos(template.pos), Eventual.pure(trees.UnitLiteral())))
+ case BooleanLiteral(value) =>
+ Constrained.pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.pure(trees.BooleanLiteral(value))))
+ case IntegerLiteral(value) => {
+ val u = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ val v = Eventual.withUnifier { unifier =>
+ unifier.get(u) match {
+ case SimpleTypes.BitVectorType(true, size) => trees.BVLiteral(true, value, size)
+ case SimpleTypes.BitVectorType(false, size) => {
+ if (value >= 0) {
+ trees.BVLiteral(false, value, size)
+ }
+ else {
+ val complement = value.mod(BigInt(2).pow(size))
+ trees.BVLiteral(false, complement, size)
+ }
+ }
+ case SimpleTypes.IntegerType() => trees.IntegerLiteral(value)
+ case SimpleTypes.RealType() => trees.FractionLiteral(value, 1)
+ case _ => throw new IllegalStateException("Unifier returned unexpected value.")
+ }
+ }
+ Constrained.pure((u, v)).addConstraint(Constraint.isNumeric(u))
+ }
+ case FractionLiteral(numerator, denominator) =>
+ Constrained.pure((SimpleTypes.RealType().setPos(template.pos), Eventual.pure(trees.FractionLiteral(numerator, denominator))))
+ case StringLiteral(string) =>
+ Constrained.pure((SimpleTypes.StringType().setPos(template.pos), Eventual.pure(trees.StringLiteral(string))))
+ case CharLiteral(value) =>
+ Constrained.pure((SimpleTypes.CharType().setPos(template.pos), Eventual.pure(trees.CharLiteral(value))))
+ case SetConstruction(optTypes, elems) => for {
+ (st, et) <- optTypes
+ .map(TypeSeqE.elaborate(_))
+ .getOrElse(Constrained.sequence(Seq.fill(1) {
+ OptTypeE.elaborate(Left(template.pos))
+ }))
+ .checkImmediate(_.size == 1, template, xs => wrongNumberOfTypeArguments("Set", 1, xs.size))
+ .map(_.head)
+ (sts, evs) <- ExprSeqE.elaborate(elems).map(_.unzip)
+ _ <- Constrained(sts.map(Constraint.equal(_, st)) : _*)
+ } yield (SimpleTypes.SetType(st).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.FiniteSet(evs.map(_.get), et.get)
+ })
+ case BagConstruction(optTypes, elems) => for {
+ (st, et) <- optTypes
+ .map(TypeSeqE.elaborate(_))
+ .getOrElse(Constrained.sequence(Seq.fill(1) {
+ OptTypeE.elaborate(Left(template.pos))
+ }))
+ .checkImmediate(_.size == 1, template, xs => wrongNumberOfTypeArguments("Set", 1, xs.size))
+ .map(_.head)
+ (stps, evps) <- ExprPairSeqE.elaborate(elems).map(_.unzip)
+ (sks, sts) = stps.unzip
+ _ <- Constrained(sks.map(Constraint.equal(_, st)) : _*)
+ _ <- Constrained(sts.map(Constraint.equal(_, SimpleTypes.IntegerType().setPos(template.pos))) : _*)
+ } yield (SimpleTypes.BagType(st).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.FiniteBag(evps.map(_.get), et.get)
+ })
+ case MapConstruction(optTypes, elems, default) => for {
+ (Seq(stf, stt), Seq(etf, ett)) <- optTypes
+ .map(TypeSeqE.elaborate(_))
+ .getOrElse(Constrained.sequence(Seq.fill(2) {
+ OptTypeE.elaborate(Left(template.pos))
+ }))
+ .checkImmediate(_.size == 2, template, xs => wrongNumberOfTypeArguments("Map", 2, xs.size))
+ .map(_.unzip)
+ (stps, evps) <- ExprPairSeqE.elaborate(elems).map(_.unzip)
+ (sks, sts) = stps.unzip
+ (std, ed) <- ExprE.elaborate(default)
+ .addConstraints(sks.map(Constraint.equal(_, stf)))
+ .addConstraints(sts.map(Constraint.equal(_, stt)))
+ _ <- Constrained(Constraint.equal(std, stt))
+ } yield (SimpleTypes.MapType(stf, stt).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.FiniteMap(evps.map(_.get), ed.get, etf.get, ett.get)
+ })
+ case Abstraction(quantifier, bindings, body) => for {
+ bs <- BindingSeqE.elaborate(bindings)
+ (stb, evb) <- ExprE.elaborate(body)(store.addBindings(bs))
+ } yield quantifier match {
+ case Lambda =>
+ (SimpleTypes.FunctionType(bs.map(_.tpe), stb).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.Lambda(bs.map(_.evValDef.get), evb.get)
+ })
+ case Forall =>
+ (SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.Forall(bs.map(_.evValDef.get), evb.get)
+ })
+ }
+ case Application(callee, args) => {
+ val u = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ for {
+ (stc, evc) <- ExprE.elaborate(callee)
+ (sts, evs) <- ExprSeqE.elaborate(args).map(_.unzip)
+ _ <- Constrained(Constraint.equal(SimpleTypes.FunctionType(sts, u).setPos(template.pos), stc))
+ } yield (u, Eventual.withUnifier { implicit unifier =>
+ trees.Application(evc.get, evs.map(_.get))
+ })
+ }
+ case Assume(condition, body) => for {
+ (stc, evc) <- ExprE.elaborate(condition)
+ (stb, evb) <- ExprE.elaborate(body).addConstraint(Constraint.equal(stc, SimpleTypes.BooleanType().setPos(template.pos)))
+ } yield (stb, Eventual.withUnifier { implicit unifier =>
+ trees.Assume(evc.get, evb.get)
+ })
+ case Cast(Casts.Widen, expr, size) => for {
+ (st, ev) <- ExprE.elaborate(expr)
+ _ <- Constrained(Constraint.isBits(st, upper=Some(size - 1)))
+ } yield (SimpleTypes.BitVectorType(true, size).setPos(template.pos), ev.map(trees.BVWideningCast(_, trees.BVType(true, size))))
+ case Cast(Casts.Narrow, expr, size) => for {
+ (st, ev) <- ExprE.elaborate(expr)
+ _ <- Constrained(Constraint.isBits(st, lower=Some(size + 1)))
+ } yield (SimpleTypes.BitVectorType(true, size).setPos(template.pos), ev.map(trees.BVNarrowingCast(_, trees.BVType(true, size))))
+ case Choose(binding, body) => for {
+ sb <- BindingE.elaborate(binding)
+ (st, evb) <- ExprE.elaborate(body)(store.addBinding(sb))
+ _ <- Constrained(Constraint.equal(st, SimpleTypes.BooleanType().setPos(template.pos)))
+ } yield (sb.tpe.withPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.Choose(sb.evValDef.get, evb.get)
+ })
+ case If(condition, thenn, elze) => for {
+ (stc, evc) <- ExprE.elaborate(condition)
+ (stt, evt) <- ExprE.elaborate(thenn).addConstraint(Constraint.equal(stc, SimpleTypes.BooleanType().setPos(template.pos)))
+ (ste, eve) <- ExprE.elaborate(elze)
+ _ <- Constrained(Constraint.equal(stt, ste))
+ } yield (stt, Eventual.withUnifier { implicit unifier =>
+ trees.IfExpr(evc.get, evt.get, eve.get)
+ })
+
+ case Exprs.Invocation(id, optTypeArgs, args) =>
+ val identUnknownType = SimpleTypes.Unknown.fresh.setPos(id.pos)
+ val resType = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ for {
+ // get a sequence of identifiers
+ identifierSequence: Seq[inox.Identifier] <- ExprUseIDOverloadE.elaborate(id)
+ // if there are any type parameters elaborate them
+ (sts, ets) <- optTypeArgs.map(TypeSeqE.elaborate(_)).getOrElse(
+ Constrained.sequence(Seq.empty[Constrained[(SimpleTypes.Type, Eventual[trees.Type])]])).map(_.unzip)
+ // elaborate arguments
+ (stas, evas) <- ExprSeqE.elaborate(args).map(_.unzip)
+ // for all functions with the identifier name collect the result type, function type of the identifier and the eventual expression
+ mapped: Seq[Constrained[(SimpleTypes.Type, SimpleTypes.Type, Eventual[trees.Expr])]] = identifierSequence.flatMap(ident => {
+ store.getFunction(ident).map(x => Left((x, true)))
+ .orElse(store.getConstructor(ident).map(x => Left((x, false))))
+ .orElse(store.getVariable(ident).map(x => Right(x))).map {
+ // case for functions
+ case Left(((n, f), true)) =>
+ // when there are no type arguments
+ if (sts.isEmpty) {
+ Some(
+ for {
+ (sts, ets) <- Constrained.sequence(Seq.fill(n) {
+ val unknown = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ Constrained.pure(unknown, Eventual.withUnifier { implicit unifier =>
+ SimpleTypes.toInox(unifier.get(unknown))
+ })
+ }).map(_.unzip)
+ (ests, rst) = f(sts)
+ } yield (rst, SimpleTypes.FunctionType(ests, rst), Eventual.withUnifier { implicit unifier =>
+ trees.FunctionInvocation(ident, ets.map(_.get), evas.map(_.get))
+ }))
+ // if the number of type arguments is the same as the number of type parameters
+ } else if (sts.size == n) {
+ val (ests, rst) = f(sts)
+ Some(Constrained.pure((rst, SimpleTypes.FunctionType(ests, rst), Eventual.withUnifier { implicit unifier =>
+ trees.FunctionInvocation(ident, ets.map(_.get), evas.map(_.get))
+ })))
+ // in other cases there result is None
+ } else
+ None
+ // case for constructors
+ case Left(((n, f), false)) =>
+ if (sts.isEmpty) {
+ Some(
+ for {
+ (sts, ets) <- Constrained.sequence(Seq.fill(n) {
+ val unknown = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ Constrained.pure(unknown, Eventual.withUnifier { implicit unifier =>
+ SimpleTypes.toInox(unifier.get(unknown))
+ })
+ }).map(_.unzip)
+ (ests, rst) = f(sts)
+ if stas.size == ests.size
+ } yield (rst, rst, Eventual.withUnifier { implicit unifier =>
+ trees.ADT(ident, ets.map(_.get), evas.map(_.get))
+ }))
+ } else if (sts.size == n) {
+ val (ests, rst) = f(sts)
+ Some(Constrained.pure((rst, rst, Eventual.withUnifier { implicit unifier =>
+ trees.ADT(ident, ets.map(_.get), evas.map(_.get))
+ })))
+ } else
+ None
+ // variable which has a function type
+ case Right((st, et)) =>
+ val retTpe = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ Some(for {
+ _ <- Constrained(Constraint.equal(st, SimpleTypes.FunctionType(stas, retTpe)))
+ .checkImmediate(optTypeArgs.isEmpty, template, functionValuesCanNotHaveTypeParameters(ident.name))
+ } yield (retTpe, SimpleTypes.FunctionType(stas, retTpe), Eventual.withUnifier { implicit unifier =>
+ trees.Application(trees.Variable(ident, et.get, Seq()), evas.map(_.get))
+ }))
+ }
+ }).flatten
+ options <- Constrained.sequence(mapped)
+ resOptions = options.map(_._1).distinct
+ idOptions = options.map(_._2).distinct
+ _ <- Constrained(Constraint.oneOf(resType, resType, resOptions), Constraint.oneOf(identUnknownType, identUnknownType, idOptions),
+ Constraint.equal(identUnknownType, SimpleTypes.FunctionType(stas, resType)), Constraint.exist(resType), Constraint.exist(identUnknownType))
+ } yield (resType, Eventual.withUnifier { implicit unifier =>
+ val unifiedFinal = unifier.get(resType)
+ val unifierIdType = unifier.get(identUnknownType)
+ val possibleOptions = options.count(option => unifier(option._1) == unifiedFinal && unifier(option._2) == unifierIdType)
+ if (possibleOptions > 1)
+ throw new Exception(ambiguousTypes(Seq(id.pos)))
+ val eventualOption = options.find(option => unifier(option._1) == unifiedFinal && unifier(option._2) == unifierIdType)
+ eventualOption match {
+ case None => throw new Exception("Should not happen that unification finished")
+ case Some(eventual) => eventual._3.get
+ }
+ })
+ case PrimitiveInvocation(fun, optTypeArgs, args) => {
+ import Primitive._
+
+ optTypeArgs
+ .map(TypeSeqE.elaborate(_))
+ .getOrElse(Constrained.sequence(Seq.fill(fun.typeArgs) {
+ OptTypeE.elaborate(Left(template.pos))
+ }))
+ .checkImmediate(_.size == fun.typeArgs, template, xs => wrongNumberOfTypeArguments(fun.name, fun.typeArgs, xs.size))
+ .map(_.map(_._1))
+ .flatMap { (typeArgs) =>
+
+ ExprSeqE
+ .elaborate(args)
+ .checkImmediate(_.size == fun.args, template, xs => wrongNumberOfArguments(fun.name, fun.args, xs.size))
+ .map(_.unzip)
+ .flatMap { case (sts, evs) => fun match {
+ case SetAdd =>
+ Constrained
+ .pure((SimpleTypes.SetType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.SetAdd(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), typeArgs(0)))
+ case ElementOfSet =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.ElementOfSet(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), typeArgs(0)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ case SetIntersection =>
+ Constrained
+ .pure((SimpleTypes.SetType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.SetIntersection(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ case SetUnion =>
+ Constrained
+ .pure((SimpleTypes.SetType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.SetUnion(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ case SetDifference =>
+ Constrained
+ .pure((SimpleTypes.SetType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.SetUnion(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ case Subset =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.SubsetOf(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.SetType(typeArgs(0)).setPos(template.pos)))
+ case BagAdd =>
+ Constrained
+ .pure((SimpleTypes.BagType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.BagAdd(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), typeArgs(0)))
+ case MultiplicityInBag =>
+ Constrained
+ .pure((SimpleTypes.IntegerType().setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.MultiplicityInBag(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), typeArgs(0)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ case BagIntersection =>
+ Constrained
+ .pure((SimpleTypes.BagType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.BagIntersection(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ case BagUnion =>
+ Constrained
+ .pure((SimpleTypes.BagType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.BagUnion(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ case BagDifference =>
+ Constrained
+ .pure((SimpleTypes.BagType(typeArgs(0)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.BagUnion(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.BagType(typeArgs(0)).setPos(template.pos)))
+ case MapApply =>
+ Constrained
+ .pure((typeArgs(1), Eventual.withUnifier { implicit unifier =>
+ trees.MapApply(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.MapType(typeArgs(0), typeArgs(1)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), typeArgs(0)))
+ case MapUpdated =>
+ Constrained
+ .pure((SimpleTypes.MapType(typeArgs(0), typeArgs(1)).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.MapUpdated(evs(0).get, evs(1).get, evs(2).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.MapType(typeArgs(0), typeArgs(1)).setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), typeArgs(0)))
+ .addConstraint(Constraint.equal(sts(2), typeArgs(1)))
+ case StringConcat =>
+ Constrained
+ .pure((SimpleTypes.StringType().setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.StringConcat(evs(0).get, evs(1).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.StringType().setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.StringType().setPos(template.pos)))
+ case SubString =>
+ Constrained
+ .pure((SimpleTypes.StringType().setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.SubString(evs(0).get, evs(1).get, evs(2).get)
+ }))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.StringType().setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(1), SimpleTypes.IntegerType().setPos(template.pos)))
+ .addConstraint(Constraint.equal(sts(2), SimpleTypes.IntegerType().setPos(template.pos)))
+ case StringLength =>
+ Constrained
+ .pure((SimpleTypes.IntegerType().setPos(template.pos), evs(0).map(trees.StringLength(_))))
+ .addConstraint(Constraint.equal(sts(0), SimpleTypes.StringType().setPos(template.pos)))
+ }
+ }
+ }
+ }
+ case IsConstructor(expr, id) => for {
+ (st, ev) <- ExprE.elaborate(expr)
+ i <- ExprUseIdE.elaborate(id)
+ s <- Constrained.attempt(store.getSortOfConstructor(i), template, identifierNotConstructor(i.name))
+ n = store.getTypeConstructor(s).getOrElse { throw new IllegalStateException("Inconsistent store.") }
+ _ <- Constrained(Constraint.equal(st, SimpleTypes.ADTType(s, Seq.fill(n)(SimpleTypes.Unknown.fresh.setPos(template.pos))).setPos(template.pos)))
+ } yield (SimpleTypes.BooleanType().setPos(template.pos), ev.map(trees.IsConstructor(_, i)))
+ case Let(binding, value, expr) => for {
+ sb <- BindingE.elaborate(binding)
+ (stv, ev) <- ExprE.elaborate(value)
+ (ste, ee) <- ExprE.elaborate(expr)(store.addBinding(sb))
+ .addConstraint(Constraint.equal(sb.tpe, stv))
+ } yield (ste, Eventual.withUnifier { implicit unifier =>
+ trees.Let(sb.evValDef.get, ev.get, ee.get)
+ })
+ case Tuple(exprs) => for {
+ (sts, evs) <- ExprSeqE.elaborate(exprs).map(_.unzip)
+ } yield (SimpleTypes.TupleType(sts).setPos(template.pos), Eventual.withUnifier { implicit unifier =>
+ trees.Tuple(evs.map(_.get))
+ })
+ case TupleSelection(expr, index) => for {
+ (st, ev) <- ExprE.elaborate(expr)
+ u = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ _ <- Constrained(Constraint.atIndexIs(st, index, u))
+ } yield (u, ev.map(trees.TupleSelect(_, index)))
+ case Selection(expr, id) => for {
+ (st, ev) <- ExprE.elaborate(expr)
+ (name, pis) <- FieldIdE.elaborate(id)
+ _ <- Constrained.checkImmediate(pis.size > 0, id, noFieldNamed(name))
+ adtType = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ retType = SimpleTypes.Unknown.fresh.setPos(template.pos)
+ _ <- Constrained(Constraint.hasFields(st, pis.map(_._2.name).toSet, pis.map {
+ case (sortId, fieldId) => (sortId, (tpe: SimpleTypes.Type) => {
+ val n = store.getTypeConstructor(sortId).getOrElse {
+ throw new IllegalStateException("Inconsistent store.")
+ }
+ val as = Seq.fill(n)(SimpleTypes.Unknown.fresh.setPos(template.pos))
+ val r = store.getTypeOfField(fieldId)(as)
+ Seq(
+ Constraint.equal(adtType, tpe),
+ Constraint.equal(r, retType),
+ Constraint.equal(tpe, SimpleTypes.ADTType(sortId, as).setPos(template.pos)))
+ })
+ }))
+ } yield (retType, Eventual.withUnifier { implicit unifier =>
+ val sortId = unifier.get(adtType) match {
+ case SimpleTypes.ADTType(i, _) => i
+ case _ => throw new IllegalStateException("Unifier returned unexpected value.")
+ }
+ val fieldId = pis.toMap.get(sortId).getOrElse {
+ throw new IllegalStateException("Unifier returned unexpected value.")
+ }
+ trees.ADTSelector(ev.get, fieldId)
+ })
+ case TypeAnnotation(expr, tpe) => for {
+ (ste, ev) <- ExprE.elaborate(expr)
+ (stt, _) <- TypeE.elaborate(tpe)
+ _ <- Constrained(Constraint.equal(ste, stt))
+ } yield (stt, ev)
+ case UnaryOperation(op, arg) => ExprE.elaborate(arg).flatMap { case (st, ev) =>
+ import Unary._
+
+ op match {
+ case Minus =>
+ Constrained
+ .pure((st, ev.map(trees.UMinus(_))))
+ .addConstraint(Constraint.isNumeric(st))
+ case Not =>
+ Constrained
+ .pure((st, ev.map(trees.Not(_))))
+ .addConstraint(Constraint.equal(st, SimpleTypes.BooleanType().setPos(template.pos)))
+ case BVNot =>
+ Constrained
+ .pure((st, ev.map(trees.BVNot(_))))
+ .addConstraint(Constraint.isBits(st))
+ }
+ }
+ case BinaryOperation(op, arg1, arg2) => ExprE.elaborate(arg1).flatMap { case (st1, ev1) =>
+ ExprE.elaborate(arg2).flatMap { case (st2, ev2) =>
+ import Binary._
+ op match {
+ case Plus =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.Plus(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isNumeric(st1))
+ case Minus =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.Minus(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isNumeric(st1))
+ case Times =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.Times(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isNumeric(st1))
+ case Division =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.Division(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isNumeric(st1))
+ case Modulo =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.Modulo(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isIntegral(st1))
+ case Remainder =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.Remainder(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isIntegral(st1))
+ case Implies =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier => trees.Implies(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, SimpleTypes.BooleanType().setPos(template.pos)))
+ .addConstraint(Constraint.equal(st2, SimpleTypes.BooleanType().setPos(template.pos)))
+ case Equals =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier => trees.Equals(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ case LessEquals =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier => trees.LessEquals(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isComparable(st1))
+ case LessThan =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier => trees.LessThan(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isComparable(st1))
+ case GreaterEquals =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier => trees.GreaterEquals(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isComparable(st1))
+ case GreaterThan =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.withUnifier { implicit unifier => trees.GreaterThan(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isComparable(st1))
+ case BVAnd =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.BVAnd(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isBits(st1))
+ case BVOr =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.BVOr(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isBits(st1))
+ case BVXor =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.BVXor(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isBits(st1))
+ case BVShiftLeft =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.BVShiftLeft(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isBits(st1))
+ case BVAShiftRight =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.BVAShiftRight(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isBits(st1))
+ case BVLShiftRight =>
+ Constrained
+ .pure((st1, Eventual.withUnifier { implicit unifier => trees.BVLShiftRight(ev1.get, ev2.get) }))
+ .addConstraint(Constraint.equal(st1, st2))
+ .addConstraint(Constraint.isBits(st1))
+ }
+ }
+ }
+ case NaryOperation(op, args) => ExprSeqE.elaborate(args).map(_.unzip).flatMap { case (sts, evs) =>
+ import NAry._
+
+ op match {
+ case And =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.sequence(evs).map(trees.And(_))))
+ .addConstraints(sts.map(Constraint.equal(_, SimpleTypes.BooleanType().setPos(template.pos))))
+ case Or =>
+ Constrained
+ .pure((SimpleTypes.BooleanType().setPos(template.pos), Eventual.sequence(evs).map(trees.Or(_))))
+ .addConstraints(sts.map(Constraint.equal(_, SimpleTypes.BooleanType().setPos(template.pos))))
+ }
+ }
+ }
+ }
+ val ExprE = new ExprE
+
+ class ExprSeqE extends HSeqE[Expr, trees.Expr, (SimpleTypes.Type, Eventual[trees.Expr])]("Expr") {
+ override val elaborator = ExprE
+ override def wrap(expr: trees.Expr, where: IR)(implicit store: Store): Constrained[(SimpleTypes.Type, Eventual[trees.Expr])] =
+ Constrained.attempt(SimpleTypes.fromInox(expr.getType(store.getSymbols)).map { st =>
+ (st.setPos(where.pos), Eventual.pure(expr))
+ }, where, invalidInoxExpr(expr))
+ }
+ val ExprSeqE = new ExprSeqE
+
+ class ExprPairE extends Elaborator[ExprPair, ((SimpleTypes.Type, SimpleTypes.Type), Eventual[(trees.Expr, trees.Expr)])] {
+ override def elaborate(pair: ExprPair)(implicit store: Store):
+ Constrained[((SimpleTypes.Type, SimpleTypes.Type), Eventual[(trees.Expr, trees.Expr)])] = pair match {
+ case PairHole(index) => Constrained.attempt(store.getHole[(trees.Expr, trees.Expr)](index), pair, invalidHoleType("(Expr, Expr)")).flatMap {
+ case p@(lhs, rhs) => for {
+ stl <- Constrained.attempt(SimpleTypes.fromInox(lhs.getType(store.getSymbols)).map(_.setPos(pair.pos)), pair, invalidInoxExpr(lhs))
+ str <- Constrained.attempt(SimpleTypes.fromInox(rhs.getType(store.getSymbols)).map(_.setPos(pair.pos)), pair, invalidInoxExpr(rhs))
+ } yield ((stl, str), Eventual.pure(p))
+ }
+ case Pair(lhs, rhs) => for {
+ (stl, evl) <- ExprE.elaborate(lhs)
+ (str, evr) <- ExprE.elaborate(rhs)
+ } yield ((stl, str), Eventual.withUnifier { implicit unifier => (evl.get, evr.get) })
+ }
+ }
+ val ExprPairE = new ExprPairE
+
+ class ExprPairSeqE extends HSeqE[ExprPair, (trees.Expr, trees.Expr), ((SimpleTypes.Type, SimpleTypes.Type), Eventual[(trees.Expr, trees.Expr)])]("(Expr, Expr)") {
+ override val elaborator = ExprPairE
+ override def wrap(pair: (trees.Expr, trees.Expr), where: IR)(implicit store: Store):
+ Constrained[((SimpleTypes.Type, SimpleTypes.Type), Eventual[(trees.Expr, trees.Expr)])] = for {
+ stl <- Constrained.attempt(SimpleTypes.fromInox(pair._1.getType(store.getSymbols)).map(_.setPos(where.pos)), where, invalidInoxExpr(pair._1))
+ str <- Constrained.attempt(SimpleTypes.fromInox(pair._2.getType(store.getSymbols)).map(_.setPos(where.pos)), where, invalidInoxExpr(pair._2))
+ } yield ((stl, str), Eventual.pure(pair))
+ }
+ val ExprPairSeqE = new ExprPairSeqE
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/elaborators/FunctionElaborators.scala b/src/main/scala/inox/parser/elaboration/elaborators/FunctionElaborators.scala
new file mode 100644
index 000000000..16a07be5b
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/elaborators/FunctionElaborators.scala
@@ -0,0 +1,43 @@
+package inox
+package parser
+package elaboration
+package elaborators
+
+trait FunctionElaborators { self: Elaborators =>
+
+ import Functions._
+
+ class SingleFunctionE extends Elaborator[Function, Eventual[trees.FunDef]] {
+ override def elaborate(function: Function)(implicit store: Store): Constrained[Eventual[trees.FunDef]] = for {
+ sf <- SignatureE.elaborate(function)
+ (st, ev) <- ExprE.elaborate(function.body)(store
+ .addFunction(sf)
+ .addTypeBindings(sf.typeParams)
+ .addBindings(sf.params))
+ _ <- Constrained(Constraint.equal(st, sf.retTpe))
+ } yield Eventual.withUnifier { implicit unifier =>
+ new trees.FunDef(
+ sf.id,
+ sf.typeParams.map(binding => trees.TypeParameterDef(binding.id, Seq())),
+ sf.params.map(_.evValDef.get),
+ sf.evRetTpe.get,
+ ev.get,
+ Seq())
+ }
+ }
+ val SingleFunctionE = new SingleFunctionE
+
+ class SignatureE extends Elaborator[Function, SimpleFunctions.Function] {
+ override def elaborate(function: Function)(implicit store: Store): Constrained[SimpleFunctions.Function] = for {
+ (i, optName) <- DefIdE.elaborate(function.identifier)
+ tpbs <- TypeVarDefSeqE.elaborate(function.typeParams)
+ storeWithTypeParams = store.addTypeBindings(tpbs)
+ bs <- BindingSeqE.elaborate(function.params)(storeWithTypeParams)
+ (stRet, evRet) <- OptTypeE.elaborate(function.returnType match {
+ case Some(tpe) => Right(tpe)
+ case None => Left(function.pos)
+ })(storeWithTypeParams.addBindings(bs))
+ } yield SimpleFunctions.Function(i, optName, tpbs, bs, stRet, evRet)
+ }
+ val SignatureE = new SignatureE
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/elaborators/IdentifierElaborators.scala b/src/main/scala/inox/parser/elaboration/elaborators/IdentifierElaborators.scala
new file mode 100644
index 000000000..eadc77a02
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/elaborators/IdentifierElaborators.scala
@@ -0,0 +1,111 @@
+package inox
+package parser
+package elaboration
+package elaborators
+
+trait IdentifierElaborators { self: Elaborators =>
+
+ import Identifiers._
+
+ class DefIdE extends Elaborator[Identifier, (inox.Identifier, Option[String])] {
+ override def elaborate(template: Identifier)(implicit store: Store): Constrained[(inox.Identifier, Option[String])] = template match {
+ case IdentifierHole(index) => store.getHole[inox.Identifier](index) match {
+ case None => Constrained.fail(invalidHoleType("Identifier")(template.pos))
+ case Some(id) => Constrained.pure((id, None))
+ }
+ case IdentifierName(name) => Constrained.pure((inox.FreshIdentifier(name), Some(name)))
+ }
+ }
+ val DefIdE = new DefIdE
+
+ class ExprUseIdE extends Elaborator[Identifier, inox.Identifier] {
+ override def elaborate(template: Identifier)(implicit store: Store): Constrained[inox.Identifier] = template match {
+ case IdentifierHole(index) => store.getHole[inox.Identifier](index) match {
+ case None => Constrained.fail(invalidHoleType("Identifier")(template.pos))
+ case Some(id) => Constrained.pure(id)
+ }
+ case IdentifierName(name) => store.getExprIdentifier(name) match {
+ case None => Constrained.fail(noExprInScope(name)(template.pos))
+ case Some(id) => Constrained.pure(id)
+ }
+ }
+ }
+ val ExprUseIdE = new ExprUseIdE
+
+ class ExprUseIDOverloadE extends Elaborator[Identifier, Seq[inox.Identifier]] {
+ override def elaborate(template: Identifier)(implicit store: Store): Constrained[Seq[inox.Identifier]] = template match {
+ case IdentifierHole(index) => store.getHole[inox.Identifier](index) match {
+ case None => Constrained.fail(invalidHoleType("Identifier")(template.pos))
+ case Some(id) => Constrained.pure(Seq(id))
+ }
+ case IdentifierName(name) =>
+ store.getExprIdentifier(name) match {
+ case None => store.getFunctions(name) match {
+ case None => Constrained.fail(noExprInScope(name)(template.pos))
+ case Some(identSequence) => Constrained.pure(identSequence)
+ }
+ case Some(a) => Constrained.pure(Seq(a))
+ }
+ }
+ }
+
+ val ExprUseIDOverloadE = new ExprUseIDOverloadE
+
+ class TypeUseIdE extends Elaborator[Identifier, inox.Identifier] {
+ override def elaborate(template: Identifier)(implicit store: Store): Constrained[inox.Identifier] = template match {
+ case IdentifierHole(index) => store.getHole[inox.Identifier](index) match {
+ case None => Constrained.fail(invalidHoleType("Identifier")(template.pos))
+ case Some(id) => Constrained.pure(id)
+ }
+ case IdentifierName(name) => store.getTypeIdentifier(name) match {
+ case None => Constrained.fail(noTypeInScope(name)(template.pos))
+ case Some(id) => Constrained.pure(id)
+ }
+ }
+ }
+ val TypeUseIdE = new TypeUseIdE
+
+ class FieldIdE extends Elaborator[Identifier, (String, Seq[(inox.Identifier, inox.Identifier)])] {
+ override def elaborate(template: Identifier)(implicit store: Store): Constrained[(String, Seq[(inox.Identifier, inox.Identifier)])] = template match {
+ case IdentifierHole(index) => store.getHole[inox.Identifier](index) match {
+ case None => Constrained.fail(invalidHoleType("Identifier")(template.pos))
+ case Some(id) => Constrained.pure((id.name, store.getSortByField(id).toSeq.map((_, id))))
+ }
+ case IdentifierName(name) => Constrained.pure((name, store.getFieldByName(name)))
+ }
+ }
+ val FieldIdE = new FieldIdE
+
+ class DefIdSeqE extends HSeqE[Identifier, inox.Identifier, (inox.Identifier, Option[String])]("Identifier") {
+ override val elaborator = DefIdE
+
+ override def wrap(id: inox.Identifier, where: IR)(implicit store: Store): Constrained[(inox.Identifier, Option[String])] =
+ Constrained.pure((id, None))
+ }
+ val DefIdSeqE = new DefIdSeqE
+
+ class TypeVarDefE extends Elaborator[Identifier, SimpleBindings.TypeBinding] {
+ override def elaborate(template: Identifier)(implicit store: Store): Constrained[SimpleBindings.TypeBinding] = {
+ DefIdE.elaborate(template).map { case (id, optName) =>
+ SimpleBindings.TypeBinding(
+ id,
+ SimpleTypes.TypeParameter(id),
+ Eventual.pure(trees.TypeParameter(id, Seq())),
+ optName)
+ }
+ }
+ }
+ val TypeVarDefE = new TypeVarDefE
+
+ class TypeVarDefSeqE extends HSeqE[Identifier, inox.Identifier, SimpleBindings.TypeBinding]("Identifier") {
+ override val elaborator = TypeVarDefE
+
+ override def wrap(id: inox.Identifier, where: IR)(implicit store: Store): Constrained[SimpleBindings.TypeBinding] =
+ Constrained.pure(SimpleBindings.TypeBinding(
+ id,
+ SimpleTypes.TypeParameter(id),
+ Eventual.pure(trees.TypeParameter(id, Seq())),
+ None))
+ }
+ val TypeVarDefSeqE = new TypeVarDefSeqE
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/elaborators/ProgramElaborators.scala b/src/main/scala/inox/parser/elaboration/elaborators/ProgramElaborators.scala
new file mode 100644
index 000000000..fe4bd7b4d
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/elaborators/ProgramElaborators.scala
@@ -0,0 +1,58 @@
+package inox
+package parser
+package elaboration
+package elaborators
+
+trait ProgramElaborators { self: Elaborators =>
+
+ import Programs._
+ import ADTs._
+ import Functions._
+
+ class ProgramE extends Elaborator[Program, Seq[Eventual[trees.Definition]]] {
+ override def elaborate(program: Program)(implicit store: Store): Constrained[Seq[Eventual[trees.Definition]]] = {
+ val sorts = program.defs.filter(_.isLeft).map(_.left.get)
+ val funs = program.defs.filter(_.isRight).map(_.right.get)
+
+ def rebuild[A](defs: Seq[Either[Sort, Function]], xs: Seq[A], ys: Seq[A]): Seq[A] = defs.foldLeft((Seq[A](), xs, ys)) {
+ case ((acc, x :: xs, ys), Left(_)) => (acc :+ x, xs, ys)
+ case ((acc, xs, y :: ys), Right(_)) => (acc :+ y, xs, ys)
+ case _ => throw new IllegalStateException("Unreachable.")
+ }._1
+
+ for {
+ emptySimpleSorts <- Constrained.sequence(sorts.map(s => EmptySortE.elaborate(s)))
+ storeWithEmptySorts = store.addSorts(emptySimpleSorts)
+ (simpleSorts, evSorts) <- Constrained.sequence(sorts.zip(emptySimpleSorts).map {
+ case (sort, ss) => for {
+ (scs, ecs) <- new ConstructorSeqE(ss.id)
+ .elaborate(sort.constructors)(storeWithEmptySorts.addTypeBindings(ss.typeParams)).map(_.unzip)
+ fieldNames = scs.flatMap(_.params.flatMap(_.name))
+ _ <- Constrained.checkImmediate(fieldNames.toSet.size == fieldNames.size, sort, fieldsNotDistincts)
+ } yield (ss.copy(constructors=scs), Eventual.withUnifier { implicit unifier =>
+ new trees.ADTSort(ss.id, ss.typeParams.map(tb => trees.TypeParameterDef(tb.id, Seq())), ecs.map(_.get), Seq()) })
+ }).map(_.unzip)
+ storeWithSorts = store.addSorts(simpleSorts)
+ signatures <- Constrained.sequence(funs.map(f => SignatureE.elaborate(f)(storeWithSorts)))
+ storeWithFunSignatures = storeWithSorts.addFunctions(signatures)
+ evFunsDefs <- Constrained.sequence(funs.zip(signatures).map {
+ case (function, sf) => for {
+ (st, ev) <- ExprE.elaborate(function.body)(storeWithFunSignatures
+ .addTypeBindings(sf.typeParams)
+ .addBindings(sf.params))
+ _ <- Constrained(Constraint.equal(st, sf.retTpe))
+ } yield Eventual.withUnifier { implicit unifier =>
+ new trees.FunDef(
+ sf.id,
+ sf.typeParams.map(binding => trees.TypeParameterDef(binding.id, Seq())),
+ sf.params.map(_.evValDef.get),
+ sf.evRetTpe.get,
+ ev.get,
+ Seq())
+ }
+ })
+ } yield rebuild(program.defs, evSorts, evFunsDefs)
+ }
+ }
+ val ProgramE = new ProgramE
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/elaboration/elaborators/TypeElaborators.scala b/src/main/scala/inox/parser/elaboration/elaborators/TypeElaborators.scala
new file mode 100644
index 000000000..3b7681d76
--- /dev/null
+++ b/src/main/scala/inox/parser/elaboration/elaborators/TypeElaborators.scala
@@ -0,0 +1,122 @@
+package inox
+package parser
+package elaboration
+package elaborators
+
+import scala.util.parsing.input.Position
+
+trait TypeElaborators { self: Elaborators =>
+
+ import Types._
+
+ class TypeE extends Elaborator[Type, (SimpleTypes.Type, Eventual[trees.Type])] {
+ override def elaborate(template: Type)(implicit store: Store): Constrained[(SimpleTypes.Type, Eventual[trees.Type])] = template match {
+ case TypeHole(index) => for {
+ t <- Constrained.attempt(store.getHole[trees.Type](index), template, invalidHoleType("Type"))
+ st <- Constrained.attempt(SimpleTypes.fromInox(t).map(_.setPos(template.pos)), template, invalidInoxType(t))
+ } yield (st, Eventual.pure(t))
+ case Variable(id) => for {
+ i <- TypeUseIdE.elaborate(id)
+ (st, et) <- Constrained.attempt(store.getType(i).orElse(store.getTypeConstructor(i).flatMap { (n: Int) =>
+ if (n == 0) Some((SimpleTypes.ADTType(i, Seq()), Eventual.pure(trees.ADTType(i, Seq()))))
+ else None
+ }), template, typeConstructorUsedAsTypeVariable(i.name))
+ } yield (st.withPos(template.pos), et)
+ case Primitive(tpe) => {
+ import Primitives._
+ val st = tpe match {
+ case UnitType => SimpleTypes.UnitType().setPos(template.pos)
+ case BooleanType => SimpleTypes.BooleanType().setPos(template.pos)
+ case BVType(signed, size) => SimpleTypes.BitVectorType(signed, size).setPos(template.pos)
+ case IntegerType => SimpleTypes.IntegerType().setPos(template.pos)
+ case RealType => SimpleTypes.RealType().setPos(template.pos)
+ case StringType => SimpleTypes.StringType().setPos(template.pos)
+ case CharType => SimpleTypes.CharType().setPos(template.pos)
+ }
+ Constrained.pure((st, Eventual.pure(SimpleTypes.toInox(st))))
+ }
+ case Operation(Operators.Set, args) => for {
+ zs <- TypeSeqE.elaborate(args)
+ _ <- Constrained.checkImmediate(zs.size == 1, template, wrongNumberOfTypeArguments("Set", 1, zs.size))
+ } yield {
+ val Seq((st, et)) = zs
+ (SimpleTypes.SetType(st).setPos(template.pos), et.map(trees.SetType(_)))
+ }
+ case Operation(Operators.Bag, args) => for {
+ zs <- TypeSeqE.elaborate(args)
+ _ <- Constrained.checkImmediate(zs.size == 1, template, wrongNumberOfTypeArguments("Bag", 1, zs.size))
+ } yield {
+ val Seq((st, et)) = zs
+ (SimpleTypes.BagType(st).setPos(template.pos), et.map(trees.BagType(_)))
+ }
+ case Operation(Operators.Map, args) => for {
+ zs <- TypeSeqE.elaborate(args)
+ _ <- Constrained.checkImmediate(zs.size == 2, template, wrongNumberOfTypeArguments("Map", 2, zs.size))
+ } yield {
+ val Seq((sf, ef), (st, et)) = zs
+ (SimpleTypes.MapType(sf, st).setPos(template.pos), Eventual.withUnifier { implicit u =>
+ trees.MapType(ef.get, et.get)
+ })
+ }
+ case Invocation(id, args) => for {
+ i <- TypeUseIdE.elaborate(id)
+ n <- Constrained.attempt(store.getTypeConstructor(i), template, typeVariableUsedAsTypeConstructor(i.name))
+ zas <- TypeSeqE.elaborate(args)
+ _ <- Constrained.checkImmediate(n == zas.size, template, wrongNumberOfTypeArguments(i.name, n, zas.size))
+ (sas, eas) = zas.unzip
+ } yield (SimpleTypes.ADTType(i, sas).setPos(template.pos), Eventual.withUnifier { implicit u =>
+ trees.ADTType(i, eas.map(_.get))
+ })
+ case TupleType(elems) => for {
+ zes <- TypeSeqE.elaborate(elems)
+ (ses, ees) = zes.unzip
+ } yield (SimpleTypes.TupleType(ses).setPos(template.pos), Eventual.sequence(ees).map(trees.TupleType(_)))
+ case FunctionType(froms, to) => for {
+ zfs <- TypeSeqE.elaborate(froms)
+ (sfs, efs) = zfs.unzip
+ (st, et) <- TypeE.elaborate(to)
+ } yield (SimpleTypes.FunctionType(sfs, st).setPos(template.pos), Eventual.withUnifier(implicit u => trees.FunctionType(efs.map(_.get), et.get)))
+ case RefinementType(binding, pred) => for {
+ sb <- BindingE.elaborate(binding)
+ (pt, ep) <- ExprE.elaborate(pred)(store.addBinding(sb))
+ _ <- Constrained(Constraint.equal(pt, SimpleTypes.BooleanType().setPos(template.pos)))
+ } yield (sb.tpe, Eventual.withUnifier(implicit u => trees.RefinementType(sb.evValDef.get, ep.get)))
+ case SigmaType(bindings, to) => for {
+ bs <- BindingSeqE.elaborate(bindings)
+ (st, et) <- TypeE.elaborate(to)(store.addBindings(bs))
+ } yield (SimpleTypes.TupleType(bs.map(_.tpe) :+ st).setPos(template.pos), Eventual.withUnifier { implicit u =>
+ trees.SigmaType(bs.map(_.evValDef.get), et.get)
+ })
+ case PiType(bindings, to) => for {
+ bs <- BindingSeqE.elaborate(bindings)
+ (st, et) <- TypeE.elaborate(to)(store.addBindings(bs))
+ } yield (SimpleTypes.FunctionType(bs.map(_.tpe), st).setPos(template.pos), Eventual.withUnifier { implicit u =>
+ trees.PiType(bs.map(_.evValDef.get), et.get)
+ })
+ }
+ }
+ val TypeE = new TypeE
+
+ class OptTypeE extends Elaborator[Either[Position, Type], (SimpleTypes.Type, Eventual[trees.Type])] {
+ override def elaborate(optType: Either[Position, Type])(implicit store: Store):
+ Constrained[(SimpleTypes.Type, Eventual[trees.Type])] = optType match {
+ case Right(tpe) => TypeE.elaborate(tpe)
+ case Left(pos) => {
+ val u = SimpleTypes.Unknown.fresh.setPos(pos)
+
+ Constrained
+ .pure((u, Eventual.withUnifier { unifier => SimpleTypes.toInox(unifier.get(u)) }))
+ .addConstraint(Constraint.exist(u))
+ }
+ }
+ }
+ val OptTypeE = new OptTypeE
+
+ class TypeSeqE extends HSeqE[Type, trees.Type, (SimpleTypes.Type, Eventual[trees.Type])]("Type") {
+ override val elaborator = TypeE
+
+ override def wrap(tpe: trees.Type, where: IR)(implicit store: Store): Constrained[(SimpleTypes.Type, Eventual[trees.Type])] =
+ Constrained.attempt(SimpleTypes.fromInox(tpe).map(_.setPos(where.pos)), where, invalidInoxType(tpe)).map { st => (st, Eventual.pure(tpe)) }
+ }
+ val TypeSeqE = new TypeSeqE
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/extraction/Matchings.scala b/src/main/scala/inox/parser/extraction/Matchings.scala
new file mode 100644
index 000000000..53c9c87bb
--- /dev/null
+++ b/src/main/scala/inox/parser/extraction/Matchings.scala
@@ -0,0 +1,178 @@
+package inox
+package parser
+
+trait Matchings { self: Trees =>
+ sealed abstract class Matching[+A] { self =>
+ final def getMatches(symbols: trees.Symbols): Option[Map[Int, Any]] =
+ getMatches(symbols, Map.empty, Map.empty).map(_._2)
+
+ def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], A)]
+
+ def extendLocal(pairs: Seq[(String, inox.Identifier)], isType: Boolean=false): Matching[A] = new Matching[A] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], A)] = {
+
+ self.getMatches(symbols, global, pairs.foldLeft(local) { case (acc, pair) => acc + ((pair._1, isType) -> pair._2) })
+ }
+ }
+
+ def <>[B](that: Matching[B]): Matching[Unit] = new Matching[Unit] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], Unit)] = {
+
+ for {
+ (newGlobal, firstMap, _) <- self.getMatches(symbols, global, local)
+ (finalGlobal, secondMap, _) <- that.getMatches(symbols, newGlobal, local)
+ } yield (finalGlobal, firstMap ++ secondMap, ())
+ }
+ }
+
+ def >>[B](that: Matching[B]): Matching[B] = new Matching[B] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], B)] = {
+
+ for {
+ (newGlobal, firstMap, _) <- self.getMatches(symbols, global, local)
+ (finalGlobal, secondMap, v) <- that.getMatches(symbols, newGlobal, local)
+ } yield (finalGlobal, firstMap ++ secondMap, v)
+ }
+ }
+
+ def <<[B](that: Matching[B]): Matching[A] = new Matching[A] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], A)] = {
+
+ for {
+ (newGlobal, firstMap, v) <- self.getMatches(symbols, global, local)
+ (finalGlobal, secondMap, _) <- that.getMatches(symbols, newGlobal, local)
+ } yield (finalGlobal, firstMap ++ secondMap, v)
+ }
+ }
+
+ def map[B](f: A => B): Matching[B] = new Matching[B] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], B)] = {
+
+ for {
+ (newGlobal, mapping, v) <- self.getMatches(symbols, global, local)
+ } yield (newGlobal, mapping, f(v))
+ }
+ }
+
+ def flatMap[B](that: A => Matching[B]): Matching[B] = new Matching[B] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], B)] = {
+
+ for {
+ (newGlobal, firstMap, v1) <- self.getMatches(symbols, global, local)
+ (finalGlobal, secondMap, v2) <- that(v1).getMatches(symbols, newGlobal, local)
+ } yield (finalGlobal, firstMap ++ secondMap, v2)
+ }
+ }
+
+ def withValue[B](value: B): Matching[B] = this.map(_ => value)
+ }
+
+ object Matching {
+ def ensureConsistent(name: String, identifier: inox.Identifier, isType: Boolean=false): Matching[Unit] =
+ new Matching[Unit] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], Unit)] = {
+
+ local.get((name, isType)).orElse(global.get((name, isType))) match {
+ case None => Some((global + ((name, isType) -> identifier), Map(), ()))
+ case Some(otherIdentifier) => if (identifier != otherIdentifier) None else Some((global, Map(), ()))
+ }
+ }
+ }
+
+ def collect[A, B](scrutinee: A)(fun: PartialFunction[A, Matching[B]]): Matching[B] =
+ fun.lift(scrutinee).getOrElse(Matching.fail)
+
+ def conditionally(condition: Boolean): Matching[Unit] =
+ if (condition) success else fail
+
+ def optionally[A](option: Option[Matching[A]]): Matching[Unit] =
+ option.map(_.withValue(())).getOrElse(Matching.success)
+
+ def optionally[A](option: Option[Matching[A]], default: => A): Matching[A] =
+ option.getOrElse(Matching.pure(default))
+
+ def apply(pairs: (Int, Any)*): Matching[Unit] = new Matching[Unit] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], Unit)] =
+ Some((global, Map(pairs: _*), ()))
+ }
+
+ def pure[A](x: A): Matching[A] = new Matching[A] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], A)] = Some((global, Map(), x))
+ }
+
+ val success: Matching[Unit] = new Matching[Unit] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], Unit)] = Some((global, Map(), ()))
+ }
+
+ val fail: Matching[Nothing] = new Matching[Nothing] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], Nothing)] = None
+ }
+
+ def withSymbols[A](f: trees.Symbols => Matching[A]): Matching[A] = new Matching[A] {
+ override def getMatches(
+ symbols: trees.Symbols,
+ global: Map[(String, Boolean), inox.Identifier],
+ local: Map[(String, Boolean), inox.Identifier]):
+ Option[(Map[(String, Boolean), inox.Identifier], Map[Int, Any], A)] =
+ f(symbols).getMatches(symbols, global, local)
+ }
+
+ def sequence[A](matchings: Seq[Matching[A]]): Matching[Seq[A]] = {
+
+ matchings.foldLeft(Matching.pure(Seq[A]())) {
+ case (acc, matching) => for {
+ xs <- acc
+ x <- matching
+ } yield xs :+ x
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/extraction/extractors/ADTsExtractors.scala b/src/main/scala/inox/parser/extraction/extractors/ADTsExtractors.scala
new file mode 100644
index 000000000..8306a51a9
--- /dev/null
+++ b/src/main/scala/inox/parser/extraction/extractors/ADTsExtractors.scala
@@ -0,0 +1,33 @@
+package inox
+package parser
+package extraction
+package extractors
+
+trait ADTsExtractors { self: Extractors =>
+
+ import ADTs._
+ class SortX extends Extractor[Sort, trees.ADTSort, Unit] {
+ override def extract(template: Sort, scrutinee: trees.ADTSort): Matching[Unit] =
+ DefIdX.extract(template.identifier, scrutinee.id).flatMap { optPair =>
+ DefIdSeqX.extract(template.typeParams, scrutinee.tparams.map(_.id)).flatMap { optPairs =>
+ ConstructorSeqX.extract(template.constructors, scrutinee.constructors)
+ .extendLocal(optPair.toSeq ++ optPairs.flatten, isType=true)
+ .withValue(())
+ }
+ }
+ }
+ val SortX = new SortX
+
+ class ConstructorX extends Extractor[Constructor, trees.ADTConstructor, Unit] {
+ override def extract(template: Constructor, scrutinee: trees.ADTConstructor): Matching[Unit] = template match {
+ case ConstructorValue(templateIdentifier, templateParams) =>
+ DefIdX.extract(templateIdentifier, scrutinee.id) <>
+ BindingSeqX.extract(templateParams, scrutinee.fields)
+ case ConstructorHole(index) => Matching(index -> scrutinee)
+ }
+ }
+ val ConstructorX = new ConstructorX
+
+ class ConstructorSeqX extends HSeqX[Constructor, trees.ADTConstructor, Unit](ConstructorX, ())
+ val ConstructorSeqX = new ConstructorSeqX
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/extraction/extractors/BindingExtractors.scala b/src/main/scala/inox/parser/extraction/extractors/BindingExtractors.scala
new file mode 100644
index 000000000..4de219a5a
--- /dev/null
+++ b/src/main/scala/inox/parser/extraction/extractors/BindingExtractors.scala
@@ -0,0 +1,25 @@
+package inox
+package parser
+package extraction
+package extractors
+
+trait BindingExtractors { self: Extractors =>
+
+ import Bindings._
+
+ class BindingX extends Extractor[Binding, trees.ValDef, Option[(String, inox.Identifier)]] {
+ override def extract(template: Binding, scrutinee: trees.ValDef): Matching[Option[(String, inox.Identifier)]] = template match {
+ case BindingHole(index) =>
+ Matching(index -> scrutinee).withValue(None)
+ case InferredValDef(identifier) =>
+ DefIdX.extract(identifier, scrutinee.id)
+ case ExplicitValDef(identifier, tpe) =>
+ DefIdX.extract(identifier, scrutinee.id) <<
+ TypeX.extract(tpe, scrutinee.tpe)
+ }
+ }
+ val BindingX = new BindingX
+
+ class BindingSeqX extends HSeqX[Binding, trees.ValDef, Option[(String, inox.Identifier)]](BindingX, None)
+ val BindingSeqX = new BindingSeqX
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/extraction/extractors/ExprExtractors.scala b/src/main/scala/inox/parser/extraction/extractors/ExprExtractors.scala
new file mode 100644
index 000000000..943e37686
--- /dev/null
+++ b/src/main/scala/inox/parser/extraction/extractors/ExprExtractors.scala
@@ -0,0 +1,268 @@
+package inox
+package parser
+package extraction
+package extractors
+
+trait ExprExtractors { self: Extractors =>
+
+ import Exprs._
+
+ class ExprX extends Extractor[Expr, trees.Expr, Unit] {
+ override def extract(template: Expr, scrutinee: trees.Expr): Matching[Unit] = template match {
+ case ExprHole(index) => Matching(index -> scrutinee)
+ case UnitLiteral() => Matching.collect(scrutinee) {
+ case trees.UnitLiteral() => Matching.success
+ }
+ case Variable(id) => Matching.collect(scrutinee) {
+ case trees.Variable(sId, _, _) => ExprUseIdX.extract(id, sId)
+ }
+ case IntegerLiteral(number) => Matching.collect(scrutinee) {
+ case trees.BVLiteral(true, value, base) =>
+ Matching.conditionally(toBitSet(number, base) == value)
+ case trees.IntegerLiteral(value) =>
+ Matching.conditionally(value == number)
+ case trees.FractionLiteral(numerator, denominator) =>
+ Matching.conditionally(numerator == denominator * number)
+ }
+ case FractionLiteral(numerator, denominator) => Matching.collect(scrutinee) {
+ case trees.FractionLiteral(otherNumerator, otherDenominator) =>
+ Matching.conditionally(otherNumerator * denominator == numerator * otherDenominator)
+ }
+ case StringLiteral(string) => Matching.collect(scrutinee) {
+ case trees.StringLiteral(otherString) =>
+ Matching.conditionally(string == otherString)
+ }
+ case CharLiteral(character) => Matching.collect(scrutinee) {
+ case trees.CharLiteral(otherCharacter) =>
+ Matching.conditionally(character == otherCharacter)
+ }
+ case BooleanLiteral(value) => Matching.collect(scrutinee) {
+ case trees.BooleanLiteral(otherValue) =>
+ Matching.conditionally(value == otherValue)
+ }
+ case Abstraction(quantifier, bindings, body) => Matching.collect(scrutinee) {
+ case trees.Forall(sBindings, sBody) if quantifier == Forall =>
+ BindingSeqX.extract(bindings, sBindings).flatMap { opts =>
+ ExprX.extract(body, sBody).extendLocal(opts.flatten)
+ }
+ case trees.Lambda(sBindings, sBody) if quantifier == Lambda =>
+ BindingSeqX.extract(bindings, sBindings).flatMap { opts =>
+ ExprX.extract(body, sBody).extendLocal(opts.flatten)
+ }
+ }
+ case Application(callee, args) => Matching.collect(scrutinee) {
+ case trees.Application(sCallee, sArgs) =>
+ ExprX.extract(callee, sCallee) <> ExprSeqX.extract(args, sArgs)
+ }
+ case Assume(pred, body) => Matching.collect(scrutinee) {
+ case trees.Assume(sPred, sBody) =>
+ ExprX.extract(pred, sPred) <> ExprX.extract(body, sBody)
+ }
+ case SetConstruction(optTypes, elems) => Matching.collect(scrutinee) {
+ case trees.FiniteSet(sElems, sType) =>
+ Matching.optionally(optTypes.map(TypeSeqX.extract(_, Seq(sType)))) <>
+ ExprSeqX.extract(elems, sElems)
+ }
+ case BagConstruction(optTypes, elems) => Matching.collect(scrutinee) {
+ case trees.FiniteBag(sElems, sType) =>
+ Matching.optionally(optTypes.map(TypeSeqX.extract(_, Seq(sType)))) <>
+ ExprPairSeqX.extract(elems, sElems)
+ }
+ case MapConstruction(optTypes, elems, default) => Matching.collect(scrutinee) {
+ case trees.FiniteMap(sElems, sDefault, sFrom, sTo) =>
+ Matching.optionally(optTypes.map(TypeSeqX.extract(_, Seq(sFrom, sTo)))) <>
+ ExprPairSeqX.extract(elems, sElems) <> ExprX.extract(default, sDefault)
+ }
+ case Let(binding, value, body) => Matching.collect(scrutinee) {
+ case trees.Let(sBinding, sValue, sBody) =>
+ BindingX.extract(binding, sBinding).flatMap { opt =>
+ ExprX.extract(value, sValue) <> (ExprX.extract(body, sBody).extendLocal(opt.toSeq))
+ }
+ }
+ case UnaryOperation(operator, expr) => {
+ import Unary._
+
+ Matching.collect((operator, scrutinee)) {
+ case (Minus, trees.UMinus(sExpr)) =>
+ ExprX.extract(expr, sExpr)
+ case (Not, trees.Not(sExpr)) =>
+ ExprX.extract(expr, sExpr)
+ case (BVNot, trees.BVNot(sExpr)) =>
+ ExprX.extract(expr, sExpr)
+ }
+ }
+ case BinaryOperation(operator, lhs, rhs) => {
+ import Binary._
+
+ Matching.collect((operator, scrutinee)) {
+ case (Plus, trees.Plus(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (Minus, trees.Minus(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (Times, trees.Times(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (Division, trees.Division(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (Remainder, trees.Remainder(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (Modulo, trees.Modulo(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (Implies, trees.Implies(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (Equals, trees.Equals(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (LessEquals, trees.LessEquals(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (LessThan, trees.LessThan(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (GreaterEquals, trees.GreaterEquals(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (GreaterThan, trees.GreaterThan(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (BVAnd, trees.BVAnd(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (BVOr, trees.BVOr(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (BVXor, trees.BVXor(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (BVShiftLeft, trees.BVShiftLeft(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (BVAShiftRight, trees.BVAShiftRight(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ case (BVLShiftRight, trees.BVLShiftRight(sLhs, sRhs)) =>
+ ExprX.extract(lhs, sLhs) <> ExprX.extract(rhs, sRhs)
+ }
+ }
+ case NaryOperation(operator, args) => {
+ import NAry._
+
+ Matching.collect((operator, scrutinee)) {
+ case (And, trees.And(sArgs)) =>
+ ExprSeqX.extract(args, sArgs)
+ case (Or, trees.Or(sArgs)) =>
+ ExprSeqX.extract(args, sArgs)
+ }.withValue(())
+ }
+ case If(condition, thenn, elze) => Matching.collect(scrutinee) {
+ case trees.IfExpr(sCondition, sThenn, sElze) =>
+ ExprX.extract(condition, sCondition) <>
+ ExprX.extract(thenn, sThenn) <>
+ ExprX.extract(elze, sElze)
+ }
+ case Cast(mode, expr, size) => {
+ import Casts._
+
+ Matching.collect((mode, scrutinee)) {
+ case (Widen, trees.BVWideningCast(sExpr, trees.BVType(true, sSize))) if size == sSize =>
+ ExprX.extract(expr, sExpr)
+ case (Narrow, trees.BVNarrowingCast(sExpr, trees.BVType(true, sSize))) if size == sSize =>
+ ExprX.extract(expr, sExpr)
+ }
+ }
+ case Choose(binding, body) => Matching.collect(scrutinee) {
+ case trees.Choose(sBinding, sBody) =>
+ BindingX.extract(binding, sBinding).flatMap { opt =>
+ ExprX.extract(body, sBody).extendLocal(opt.toSeq)
+ }
+ }
+ case Invocation(id, optTypeArgs, args) => Matching.collect(scrutinee) {
+ case trees.FunctionInvocation(sId, sTypeArgs, sArgs) =>
+ ExprUseIdX.extract(id, sId) <>
+ Matching.optionally(optTypeArgs.map(TypeSeqX.extract(_, sTypeArgs))) <>
+ ExprSeqX.extract(args, sArgs)
+ case trees.ADT(sId, sTypeArgs, sArgs) =>
+ ExprUseIdX.extract(id, sId) <>
+ Matching.optionally(optTypeArgs.map(TypeSeqX.extract(_, sTypeArgs))) <>
+ ExprSeqX.extract(args, sArgs)
+ case trees.Application(trees.Variable(sId, _, _), sArgs) =>
+ ExprUseIdX.extract(id, sId) <>
+ Matching.conditionally(optTypeArgs.isEmpty) <>
+ ExprSeqX.extract(args, sArgs)
+ }
+ case PrimitiveInvocation(fun, optTypeArgs, args) => {
+ import Primitive._
+
+ Matching.withSymbols { symbols =>
+ Matching.collect(scrutinee.getType(symbols)) {
+ case trees.SetType(tpe) => Matching.pure(Seq(tpe))
+ case trees.BagType(tpe) => Matching.pure(Seq(tpe))
+ case trees.MapType(from, to) => Matching.pure(Seq(from, to))
+ case _ => Matching.pure(Seq())
+ }
+ }.flatMap { (sTypeArgs) =>
+ Matching.optionally(optTypeArgs.map(TypeSeqX.extract(_, sTypeArgs))) <>
+ Matching.collect((fun, scrutinee)) {
+ case (SetAdd, trees.SetAdd(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (ElementOfSet, trees.ElementOfSet(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (SetIntersection, trees.SetIntersection(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (SetUnion, trees.SetUnion(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (SetDifference, trees.SetDifference(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (BagAdd, trees.BagAdd(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (MultiplicityInBag, trees.MultiplicityInBag(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (BagIntersection, trees.BagIntersection(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (BagUnion, trees.BagUnion(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (BagDifference, trees.BagDifference(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (MapApply, trees.MapApply(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (MapUpdated, trees.MapUpdated(sLhs, sMid, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sMid, sRhs))
+ case (Subset, trees.SubsetOf(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (SubString, trees.SubString(sLhs, sMid, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sMid, sRhs))
+ case (StringConcat, trees.StringConcat(sLhs, sRhs)) =>
+ ExprSeqX.extract(args, Seq(sLhs, sRhs))
+ case (StringLength, trees.StringLength(sExpr)) =>
+ ExprSeqX.extract(args, Seq(sExpr))
+ }
+ }
+ }
+ case IsConstructor(expr, id) => Matching.collect(scrutinee) {
+ case trees.IsConstructor(sExpr, sId) =>
+ ExprX.extract(expr, sExpr) <>
+ ExprUseIdX.extract(id, sId)
+ }
+ case Selection(expr, id) => Matching.collect(scrutinee) {
+ case trees.ADTSelector(sExpr, sId) =>
+ ExprX.extract(expr, sExpr) <> FieldIdX.extract(id, sId)
+ }
+ case Tuple(exprs) => Matching.collect(scrutinee) {
+ case trees.Tuple(sExprs) =>
+ ExprSeqX.extract(exprs, sExprs).withValue(())
+ }
+ case TupleSelection(expr, index) => Matching.collect(scrutinee) {
+ case trees.TupleSelect(sExpr, sIndex) if index == sIndex =>
+ ExprX.extract(expr, sExpr)
+ }
+ case TypeAnnotation(expr, tpe) =>
+ Matching.withSymbols { implicit s: trees.Symbols =>
+ ExprX.extract(expr, scrutinee) <> TypeX.extract(tpe, scrutinee.getType)
+ }
+ }
+ }
+ val ExprX = new ExprX
+
+ class ExprPairX extends Extractor[ExprPair, (trees.Expr, trees.Expr), Unit] {
+ override def extract(template: ExprPair, scrutinee: (trees.Expr, trees.Expr)): Matching[Unit] = template match {
+ case PairHole(index) => Matching(index -> scrutinee)
+ case Pair(lhs, rhs) => ExprX.extract(lhs, scrutinee._1) <> ExprX.extract(rhs, scrutinee._2)
+ }
+ }
+ val ExprPairX = new ExprPairX
+
+ class ExprSeqX extends HSeqX[Expr, trees.Expr, Unit](ExprX, ())
+ val ExprSeqX = new ExprSeqX
+
+ class ExprPairSeqX extends HSeqX[ExprPair, (trees.Expr, trees.Expr), Unit](ExprPairX, ())
+ val ExprPairSeqX = new ExprPairSeqX
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/extraction/extractors/FunctionExtractors.scala b/src/main/scala/inox/parser/extraction/extractors/FunctionExtractors.scala
new file mode 100644
index 000000000..ccdda3a96
--- /dev/null
+++ b/src/main/scala/inox/parser/extraction/extractors/FunctionExtractors.scala
@@ -0,0 +1,25 @@
+package inox
+package parser
+package extraction
+package extractors
+
+trait FunctionExtractors { self: Extractors =>
+
+ import Functions.Function
+
+ class FunctionX extends Extractor[Function, trees.FunDef, Unit] {
+ override def extract(template: Function, scrutinee: trees.FunDef): Matching[Unit] = {
+ DefIdX.extract(template.identifier, scrutinee.id).flatMap { optName =>
+ DefIdSeqX.extract(template.typeParams, scrutinee.tparams.map(_.id)).flatMap { optTypeParams =>
+ BindingSeqX.extract(template.params, scrutinee.params).flatMap { optParams =>
+ Matching.optionally(template.returnType.map(st => TypeX.extract(st, scrutinee.returnType))) <>
+ ExprX.extract(template.body, scrutinee.fullBody)
+ .extendLocal(optParams.flatten)
+ .extendLocal(optName.toSeq)
+ }.extendLocal(optTypeParams.flatten, isType=true)
+ }
+ }
+ }
+ }
+ val FunctionX = new FunctionX
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/extraction/extractors/IdentifierExtractors.scala b/src/main/scala/inox/parser/extraction/extractors/IdentifierExtractors.scala
new file mode 100644
index 000000000..bd14ace77
--- /dev/null
+++ b/src/main/scala/inox/parser/extraction/extractors/IdentifierExtractors.scala
@@ -0,0 +1,44 @@
+package inox
+package parser
+package extraction
+package extractors
+
+trait IdentifierExtractors { self: Extractors =>
+
+ import Identifiers._
+
+ class ExprUseIdX extends Extractor[Identifier, inox.Identifier, Unit] {
+ override def extract(template: Identifier, scrutinee: inox.Identifier): Matching[Unit] = template match {
+ case IdentifierHole(index) => Matching(index -> scrutinee)
+ case IdentifierName(name) => Matching.ensureConsistent(name, scrutinee, isType=false)
+ }
+ }
+ val ExprUseIdX = new ExprUseIdX
+
+ class TypeUseIdX extends Extractor[Identifier, inox.Identifier, Unit] {
+ override def extract(template: Identifier, scrutinee: inox.Identifier): Matching[Unit] = template match {
+ case IdentifierHole(index) => Matching(index -> scrutinee)
+ case IdentifierName(name) => Matching.ensureConsistent(name, scrutinee, isType=true)
+ }
+ }
+ val TypeUseIdX = new TypeUseIdX
+
+ class DefIdX extends Extractor[Identifier, inox.Identifier, Option[(String, inox.Identifier)]] {
+ override def extract(template: Identifier, scrutinee: inox.Identifier): Matching[Option[(String, inox.Identifier)]] = template match {
+ case IdentifierHole(index) => Matching(index -> scrutinee).withValue(None)
+ case IdentifierName(name) => Matching.pure(Some(name -> scrutinee))
+ }
+ }
+ val DefIdX = new DefIdX
+
+ class DefIdSeqX extends HSeqX[Identifier, inox.Identifier, Option[(String, inox.Identifier)]](DefIdX, None)
+ val DefIdSeqX = new DefIdSeqX
+
+ class FieldIdX extends Extractor[Identifier, inox.Identifier, Unit] {
+ override def extract(template: Identifier, scrutinee: inox.Identifier): Matching[Unit] = template match {
+ case IdentifierHole(index) => Matching(index -> scrutinee)
+ case IdentifierName(name) => Matching.conditionally(scrutinee.name == name)
+ }
+ }
+ val FieldIdX = new FieldIdX
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/extraction/extractors/TypeExtractors.scala b/src/main/scala/inox/parser/extraction/extractors/TypeExtractors.scala
new file mode 100644
index 000000000..ec2c13076
--- /dev/null
+++ b/src/main/scala/inox/parser/extraction/extractors/TypeExtractors.scala
@@ -0,0 +1,79 @@
+package inox
+package parser
+package extraction
+package extractors
+
+trait TypeExtractors { self: Extractors =>
+
+ import Types._
+
+ class TypeX extends Extractor[Type, trees.Type, Unit] {
+ override def extract(template: Type, scrutinee: trees.Type): Matching[Unit] = template match {
+ case TypeHole(index) => Matching(index -> scrutinee)
+ case Primitive(tpe) => {
+ import Primitives._
+ Matching.collect((tpe, scrutinee)) {
+ case (UnitType, trees.UnitType()) => Matching.success
+ case (CharType, trees.CharType()) => Matching.success
+ case (StringType, trees.StringType()) => Matching.success
+ case (IntegerType, trees.IntegerType()) => Matching.success
+ case (BVType(signed1, size1), trees.BVType(signed2, size2)) if signed1 == signed2 && size1 == size2 => Matching.success
+ case (RealType, trees.RealType()) => Matching.success
+ case (BooleanType, trees.BooleanType()) => Matching.success
+ }
+ }
+ case FunctionType(froms, to) => Matching.collect(scrutinee) {
+ case trees.FunctionType(sFroms, sTo) =>
+ TypeSeqX.extract(froms, sFroms) <> TypeX.extract(to, sTo)
+ }
+ case TupleType(elems) => Matching.collect(scrutinee) {
+ case trees.TupleType(sElems) =>
+ TypeSeqX.extract(elems, sElems).withValue(())
+ }
+ case Operation(operator, args) => {
+ import Operators._
+
+ Matching.collect((operator, scrutinee)) {
+ case (Set, trees.SetType(sElem)) =>
+ TypeSeqX.extract(args, Seq(sElem)).withValue(())
+ case (Bag, trees.BagType(sElem)) =>
+ TypeSeqX.extract(args, Seq(sElem)).withValue(())
+ case (Map, trees.MapType(sFrom, sTo)) =>
+ TypeSeqX.extract(args, Seq(sFrom, sTo)).withValue(())
+ }
+ }
+ case Invocation(id, args) => Matching.collect(scrutinee) {
+ case trees.ADTType(sId, sArgs) =>
+ TypeUseIdX.extract(id, sId) <> TypeSeqX.extract(args, sArgs)
+ }
+ case RefinementType(binding, pred) => Matching.collect(scrutinee) {
+ case trees.RefinementType(sBinding, sPred) =>
+ BindingX.extract(binding, sBinding).flatMap { opt =>
+ ExprX.extract(pred, sPred).extendLocal(opt.toSeq)
+ }
+ }
+ case Variable(id) => Matching.collect(scrutinee) {
+ case trees.ADTType(sId, Seq()) =>
+ TypeUseIdX.extract(id, sId)
+ case trees.TypeParameter(sId, _) =>
+ TypeUseIdX.extract(id, sId)
+ }
+ case PiType(bindings, to) => Matching.collect(scrutinee) {
+ case trees.PiType(sBindings, sTo) =>
+ BindingSeqX.extract(bindings, sBindings).flatMap { opts =>
+ TypeX.extract(to, sTo).extendLocal(opts.flatten)
+ }
+ }
+ case SigmaType(bindings, to) => Matching.collect(scrutinee) {
+ case trees.SigmaType(sBindings, sTo) =>
+ BindingSeqX.extract(bindings, sBindings).flatMap { opts =>
+ TypeX.extract(to, sTo).extendLocal(opts.flatten)
+ }
+ }
+ }
+ }
+ val TypeX = new TypeX
+
+ class TypeSeqX extends HSeqX[Type, trees.Type, Unit](TypeX, ())
+ val TypeSeqX = new TypeSeqX
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/irs/ADTs.scala b/src/main/scala/inox/parser/irs/ADTs.scala
new file mode 100644
index 000000000..2aadc945b
--- /dev/null
+++ b/src/main/scala/inox/parser/irs/ADTs.scala
@@ -0,0 +1,40 @@
+package inox
+package parser
+package irs
+
+trait ADTs { self: IRs =>
+
+ object ADTs {
+ case class Sort(
+ identifier: Identifiers.Identifier,
+ typeParams: Identifiers.IdentifierSeq,
+ constructors: ConstructorSeq) extends IR {
+
+ override def getHoles =
+ identifier.getHoles ++
+ typeParams.getHoles ++
+ constructors.getHoles
+ }
+
+ abstract class Constructor extends IR
+
+ case class ConstructorValue(
+ identifier: Identifiers.Identifier,
+ params: Bindings.BindingSeq) extends Constructor {
+
+ override def getHoles =
+ identifier.getHoles ++
+ params.getHoles
+ }
+
+ case class ConstructorHole(index: Int) extends Constructor {
+ override def getHoles = Seq(Hole(index, HoleTypes.Constructor))
+ }
+
+ type ConstructorSeq = HSeq[Constructor]
+ }
+
+ implicit object holeTypableConstructor extends HoleTypable[ADTs.Constructor] {
+ override val holeType = HoleTypes.Constructor
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/irs/Bindings.scala b/src/main/scala/inox/parser/irs/Bindings.scala
new file mode 100644
index 000000000..b00f5df65
--- /dev/null
+++ b/src/main/scala/inox/parser/irs/Bindings.scala
@@ -0,0 +1,28 @@
+package inox
+package parser
+package irs
+
+trait Bindings { self: IRs =>
+
+ object Bindings {
+
+ abstract class Binding extends IR {
+ override def getHoles: Seq[Hole] = this match {
+ case BindingHole(index) => Seq(Hole(index, HoleTypes.ValDef))
+ case ExplicitValDef(identifier, tpe) => identifier.getHoles ++ tpe.getHoles
+ case InferredValDef(identifier) => identifier.getHoles
+ }
+ }
+
+ case class InferredValDef(identifier: Identifiers.Identifier) extends Binding
+ case class ExplicitValDef(identifier: Identifiers.Identifier, tpe: Types.Type) extends Binding
+ case class BindingHole(index: Int) extends Binding
+
+ type BindingSeq = HSeq[Binding]
+ }
+
+ implicit object holeTypableBinding extends HoleTypable[Bindings.Binding] {
+ override val holeType = HoleTypes.ValDef
+ }
+
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/irs/Exprs.scala b/src/main/scala/inox/parser/irs/Exprs.scala
new file mode 100644
index 000000000..a95083b82
--- /dev/null
+++ b/src/main/scala/inox/parser/irs/Exprs.scala
@@ -0,0 +1,154 @@
+package inox
+package parser
+package irs
+
+trait Exprs { self: IRs =>
+
+ object Exprs {
+ abstract class Quantifier
+ case object Forall extends Quantifier
+ case object Lambda extends Quantifier
+
+ object Unary {
+ abstract class Operator
+ case object Minus extends Operator
+ case object Not extends Operator
+ case object BVNot extends Operator
+ }
+
+ object Binary {
+ abstract class Operator
+ case object Plus extends Operator
+ case object Minus extends Operator
+ case object Times extends Operator
+ case object Division extends Operator
+ case object Modulo extends Operator
+ case object Remainder extends Operator
+ case object Implies extends Operator
+ case object Equals extends Operator
+ case object LessEquals extends Operator
+ case object LessThan extends Operator
+ case object GreaterEquals extends Operator
+ case object GreaterThan extends Operator
+ case object BVAnd extends Operator
+ case object BVOr extends Operator
+ case object BVXor extends Operator
+ case object BVShiftLeft extends Operator
+ case object BVAShiftRight extends Operator
+ case object BVLShiftRight extends Operator
+ }
+
+ object NAry {
+ abstract class Operator
+ case object And extends Operator
+ case object Or extends Operator
+ }
+
+ object Primitive {
+ abstract class Function(val name: String, val typeArgs: Int, val args: Int)
+ case object SetAdd extends Function("setAdd", 1, 2)
+ case object ElementOfSet extends Function("elementOfSet", 1, 2)
+ case object SetIntersection extends Function("setIntersection", 1, 2)
+ case object SetUnion extends Function("setUnion", 1, 2)
+ case object SetDifference extends Function("setDifference", 1, 2)
+ case object Subset extends Function("subset", 1, 2)
+ case object BagAdd extends Function("bagAdd", 1, 2)
+ case object MultiplicityInBag extends Function("multiplicityInBag", 1, 2)
+ case object BagIntersection extends Function("bagIntersection", 1, 2)
+ case object BagUnion extends Function("bagUnion", 1, 2)
+ case object BagDifference extends Function("bagDifference", 1, 2)
+ case object MapApply extends Function("mapApply", 2, 2)
+ case object MapUpdated extends Function("mapUpdated", 2, 3)
+ case object StringConcat extends Function("concatenate", 0, 2)
+ case object StringLength extends Function("length", 0, 1)
+ case object SubString extends Function("substring", 0, 3)
+ }
+
+ object Casts {
+ abstract class Mode
+ case object Widen extends Mode
+ case object Narrow extends Mode
+ }
+
+ abstract class Expr extends IR {
+ override def getHoles: Seq[Hole] = this match {
+ case ExprHole(index) => Seq(Hole(index, HoleTypes.Expr))
+ case SetConstruction(optType, elems) => optType.toSeq.flatMap(_.getHoles) ++ elems.getHoles
+ case BagConstruction(optType, elems) => optType.toSeq.flatMap(_.getHoles) ++ elems.getHoles
+ case MapConstruction(optTypes, elems, default) => optTypes.toSeq.flatMap(_.getHoles) ++ elems.getHoles ++ default.getHoles
+ case Variable(id) => id.getHoles
+ case UnaryOperation(_, expr) => expr.getHoles
+ case BinaryOperation(_, lhs, rhs) => lhs.getHoles ++ rhs.getHoles
+ case NaryOperation(_, args) => args.getHoles
+ case Invocation(id, typeArgs, args) => id.getHoles ++ typeArgs.toSeq.flatMap(_.getHoles) ++ args.getHoles
+ case PrimitiveInvocation(_, typeArgs, args) => typeArgs.toSeq.flatMap(_.getHoles) ++ args.getHoles
+ case Application(callee, args) => callee.getHoles ++ args.getHoles
+ case Abstraction(_, bindings, body) => bindings.getHoles ++ body.getHoles
+ case Let(binding, value, body) => binding.getHoles ++ value.getHoles ++ body.getHoles
+ case If(condition, thenn, elze) => condition.getHoles ++ thenn.getHoles ++ elze.getHoles
+ case Selection(structure, id) => structure.getHoles ++ id.getHoles
+ case Tuple(exprs) => exprs.getHoles
+ case TupleSelection(tuple, _) => tuple.getHoles
+ case TypeAnnotation(expr, tpe) => expr.getHoles ++ tpe.getHoles
+ case Choose(binding, body) => binding.getHoles ++ body.getHoles
+ case Assume(pred, body) => pred.getHoles ++ body.getHoles
+ case IsConstructor(expr, id) => expr.getHoles ++ id.getHoles
+ case Cast(_, expr, _) => expr.getHoles
+ case _ => Seq()
+ }
+ }
+
+ trait Literal
+
+ case class ExprHole(index: Int) extends Expr with Literal
+ case class UnitLiteral() extends Expr with Literal
+ case class BooleanLiteral(value: Boolean) extends Expr with Literal
+ case class IntegerLiteral(value: BigInt) extends Expr with Literal
+ case class FractionLiteral(numerator: BigInt, denominator: BigInt) extends Expr with Literal
+ case class StringLiteral(value: String) extends Expr with Literal
+ case class CharLiteral(value: Char) extends Expr with Literal
+ case class SetConstruction(optType: Option[Types.TypeSeq], elems: ExprSeq) extends Expr
+ case class BagConstruction(optType: Option[Types.TypeSeq], elems: ExprPairSeq) extends Expr
+ case class MapConstruction(optTypes: Option[Types.TypeSeq], elems: ExprPairSeq, default: Expr) extends Expr
+ case class Variable(id: Identifiers.Identifier) extends Expr
+ case class UnaryOperation(operator: Unary.Operator, expr: Expr) extends Expr
+ case class BinaryOperation(operator: Binary.Operator, lhs: Expr, rhs: Expr) extends Expr
+ case class NaryOperation(operator: NAry.Operator, args: ExprSeq) extends Expr
+ case class Invocation(id: Identifiers.Identifier, typeArgs: Option[Types.TypeSeq], args: ExprSeq) extends Expr
+ case class PrimitiveInvocation(fun: Primitive.Function, typeArgs: Option[Types.TypeSeq], args: ExprSeq) extends Expr
+ case class Application(callee: Expr, args: ExprSeq) extends Expr
+ case class Abstraction(quantifier: Quantifier, bindings: Bindings.BindingSeq, body: Expr) extends Expr
+ case class Let(binding: Bindings.Binding, value: Expr, body: Expr) extends Expr
+ case class If(condition: Expr, thenn: Expr, elze: Expr) extends Expr
+ case class Selection(structure: Expr, id: Identifiers.Identifier) extends Expr
+ case class Tuple(exprs: ExprSeq) extends Expr
+ case class TupleSelection(tuple: Expr, index: Int) extends Expr
+ case class TypeAnnotation(expr: Expr, tpe: Types.Type) extends Expr
+ case class Choose(binding: Bindings.Binding, body: Expr) extends Expr
+ case class Assume(pred: Expr, body: Expr) extends Expr
+ case class IsConstructor(expr: Expr, constructor: Identifiers.Identifier) extends Expr
+ case class Cast(mode: Casts.Mode, expr: Expr, target: Int) extends Expr
+
+ type ExprSeq = HSeq[Expr]
+
+ abstract class ExprPair extends IR {
+ override def getHoles: Seq[Hole] = this match {
+ case Pair(lhs, rhs) => lhs.getHoles ++ rhs.getHoles
+ case PairHole(index) => Seq(Hole(index, HoleTypes.Pair(HoleTypes.Expr, HoleTypes.Expr)))
+ }
+ }
+
+ case class Pair(lhs: Expr, rhs: Expr) extends ExprPair
+ case class PairHole(index: Int) extends ExprPair
+
+ type ExprPairSeq = HSeq[ExprPair]
+ }
+
+ implicit object holeTypableExpr extends HoleTypable[Exprs.Expr] {
+ override val holeType = HoleTypes.Expr
+ }
+
+ implicit object holeTypableExprPair extends HoleTypable[Exprs.ExprPair] {
+ override val holeType = HoleTypes.Pair(HoleTypes.Expr, HoleTypes.Expr)
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/irs/Functions.scala b/src/main/scala/inox/parser/irs/Functions.scala
new file mode 100644
index 000000000..ebbe62747
--- /dev/null
+++ b/src/main/scala/inox/parser/irs/Functions.scala
@@ -0,0 +1,23 @@
+package inox
+package parser
+package irs
+
+trait Functions { self: IRs =>
+
+ object Functions {
+ case class Function(
+ identifier: Identifiers.Identifier,
+ typeParams: Identifiers.IdentifierSeq,
+ params: Bindings.BindingSeq,
+ returnType: Option[Types.Type],
+ body: Exprs.Expr) extends IR {
+
+ override def getHoles =
+ identifier.getHoles ++
+ typeParams.getHoles ++
+ params.getHoles ++
+ returnType.toSeq.flatMap(_.getHoles) ++
+ body.getHoles
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/irs/Identifiers.scala b/src/main/scala/inox/parser/irs/Identifiers.scala
new file mode 100644
index 000000000..56131f44e
--- /dev/null
+++ b/src/main/scala/inox/parser/irs/Identifiers.scala
@@ -0,0 +1,23 @@
+package inox
+package parser
+package irs
+
+trait Identifiers { self: IRs =>
+
+ object Identifiers {
+ abstract class Identifier extends IR {
+ override def getHoles = this match {
+ case IdentifierHole(index) => Seq(Hole(index, HoleTypes.Identifier))
+ case _ => Seq()
+ }
+ }
+ case class IdentifierName(name: String) extends Identifier
+ case class IdentifierHole(index: Int) extends Identifier
+
+ type IdentifierSeq = HSeq[Identifier]
+ }
+
+ implicit object holeTypableIdentifier extends HoleTypable[Identifiers.Identifier] {
+ override val holeType = HoleTypes.Identifier
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/irs/Programs.scala b/src/main/scala/inox/parser/irs/Programs.scala
new file mode 100644
index 000000000..9b8124ac9
--- /dev/null
+++ b/src/main/scala/inox/parser/irs/Programs.scala
@@ -0,0 +1,19 @@
+package inox
+package parser
+package irs
+
+trait Programs { self: IRs =>
+
+ import ADTs._
+ import Functions._
+
+ object Programs {
+ case class Program(defs: Seq[Either[Sort, Function]]) extends IR {
+ override def getHoles =
+ defs.flatMap {
+ case Left(s) => s.getHoles
+ case Right(f) => f.getHoles
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parser/irs/Types.scala b/src/main/scala/inox/parser/irs/Types.scala
new file mode 100644
index 000000000..dc4b48673
--- /dev/null
+++ b/src/main/scala/inox/parser/irs/Types.scala
@@ -0,0 +1,58 @@
+package inox
+package parser
+package irs
+
+trait Types { self: IRs =>
+
+ object Types {
+ abstract class Type extends IR {
+ override def getHoles: Seq[Hole] = this match {
+ case TypeHole(index) => Seq(Hole(index, HoleTypes.Type))
+ case Primitive(_) => Seq()
+ case FunctionType(froms, to) => froms.getHoles ++ to.getHoles
+ case TupleType(elems) => elems.getHoles
+ case Invocation(id, args) => id.getHoles ++ args.getHoles
+ case Variable(id) => id.getHoles
+ case Operation(_, args) => args.getHoles
+ case RefinementType(binding, pred) => binding.getHoles ++ pred.getHoles
+ case PiType(bindings, to) => bindings.getHoles ++ to.getHoles
+ case SigmaType(bindings, to) => bindings.getHoles ++ to.getHoles
+ }
+ }
+
+ object Operators {
+ abstract class Operator
+ case object Set extends Operator
+ case object Map extends Operator
+ case object Bag extends Operator
+ }
+
+ object Primitives {
+ abstract class Type
+ case class BVType(signed: Boolean, size: Int) extends Type
+ case object IntegerType extends Type
+ case object StringType extends Type
+ case object CharType extends Type
+ case object BooleanType extends Type
+ case object UnitType extends Type
+ case object RealType extends Type
+ }
+
+ case class TypeHole(index: Int) extends Type
+ case class Primitive(primitive: Primitives.Type) extends Type
+ case class Operation(operator: Operators.Operator, elems: TypeSeq) extends Type
+ case class FunctionType(froms: TypeSeq, to: Type) extends Type
+ case class TupleType(elems: TypeSeq) extends Type
+ case class Invocation(identifier: Identifiers.Identifier, args: TypeSeq) extends Type
+ case class Variable(identifier: Identifiers.Identifier) extends Type
+ case class RefinementType(binding: Bindings.Binding, pred: Exprs.Expr) extends Type
+ case class PiType(bindings: Bindings.BindingSeq, to: Type) extends Type
+ case class SigmaType(bindings: Bindings.BindingSeq, to: Type) extends Type
+
+ type TypeSeq = HSeq[Type]
+ }
+
+ implicit object holeTypableType extends HoleTypable[Types.Type] {
+ override val holeType = HoleTypes.Type
+ }
+}
\ No newline at end of file
diff --git a/src/main/scala/inox/parsing/StringContextLexer.scala b/src/main/scala/inox/parser/sc/StringContextLexer.scala
similarity index 66%
rename from src/main/scala/inox/parsing/StringContextLexer.scala
rename to src/main/scala/inox/parser/sc/StringContextLexer.scala
index 07991b06f..714bd9716 100644
--- a/src/main/scala/inox/parsing/StringContextLexer.scala
+++ b/src/main/scala/inox/parser/sc/StringContextLexer.scala
@@ -1,7 +1,8 @@
/* Copyright 2017 EPFL, Lausanne */
package inox
-package parsing
+package parser
+package sc
import scala.util.parsing.combinator._
import scala.util.parsing.combinator.lexical._
@@ -11,35 +12,26 @@ import scala.util.parsing.input._
/** Contains methods for lexical parsing of StringContext objects and their arguments. */
trait StringContextLexer extends { self: Lexical =>
- /** Converts an argument of the StringContext to a Token. */
- def argToToken(x: Any): Token
+ /** Returns the token for a hole. */
+ def toHole(index: Int): Token
/** Returns a reader from a StringContext and its arguments. */
- def getReader(sc: StringContext, args: Seq[Any]): Reader[Token] = {
- require(sc.parts.size == args.size + 1, "Wrong number of arguments.")
+ def getReader(sc: StringContext): Reader[Token] = {
// For string parts, we can create a Scanner.
val stringReaders = sc.parts.zipWithIndex.map {
case (string, index) => toPartReader(string, sc, index)
}
- // All readers (both for parts and args).
- val readers = if (args.isEmpty) {
-
- // Only string readers in this case.
- stringReaders
- } else {
-
- // Turns all args into readers.
- val argsReaders = args.zipWithIndex.map {
- case (arg, index) => toMetaReader(arg, sc, index)
- }
+ // Handle holes.
+ val holeReaders = Seq.tabulate(sc.parts.size - 1) { (index: Int) =>
+ toHoleReader(sc, index)
+ }
- // Intercalates argsReaders between stringReaders.
- stringReaders.head +: {
- argsReaders.zip(stringReaders.tail).flatMap {
- case (argReader, stringReader) => Seq(argReader, stringReader)
- }
+ // Intercalates holeReaders between stringReaders.
+ val readers = stringReaders.head +: {
+ holeReaders.zip(stringReaders.tail).flatMap {
+ case (argReader, stringReader) => Seq(argReader, stringReader)
}
}
@@ -47,10 +39,9 @@ trait StringContextLexer extends { self: Lexical =>
readers.reduce(sequenceReader(_, _))
}
- /** Turns any value to a reader that produces the associated token. */
- private def toMetaReader(value: Any, context: StringContext, index: Int) = new Reader[Token] {
+ private def toHoleReader(context: StringContext, index: Int) = new Reader[Token] {
override def atEnd: Boolean = false
- override def first: Token = argToToken(value)
+ override def first: Token = toHole(index)
override def pos: Position = InArgumentPosition(index + 1, context)
override def rest: Reader[Token] = new Reader[Token] {
override def atEnd: Boolean = true
@@ -92,4 +83,4 @@ trait StringContextLexer extends { self: Lexical =>
}
}
}
-}
+}
diff --git a/src/main/scala/inox/parser/sc/StringContextParsers.scala b/src/main/scala/inox/parser/sc/StringContextParsers.scala
new file mode 100644
index 000000000..2bef8f4cb
--- /dev/null
+++ b/src/main/scala/inox/parser/sc/StringContextParsers.scala
@@ -0,0 +1,15 @@
+package inox
+package parser
+package sc
+
+import scala.util.parsing.combinator.syntactical.TokenParsers
+
+trait StringContextParsers { self: TokenParsers { type Tokens <: StringContextLexer } =>
+
+ def parseSC[A](sc: StringContext)(parser: Parser[A]): Either[String, A] = {
+ parser(lexical.getReader(sc)) match {
+ case NoSuccess(msg, _) => Left(msg)
+ case Success(value, _) => Right(value)
+ }
+ }
+}
diff --git a/src/main/scala/inox/parsing/StringContextPosition.scala b/src/main/scala/inox/parser/sc/StringContextPosition.scala
similarity index 93%
rename from src/main/scala/inox/parsing/StringContextPosition.scala
rename to src/main/scala/inox/parser/sc/StringContextPosition.scala
index 648913fb2..ce71cd08e 100644
--- a/src/main/scala/inox/parsing/StringContextPosition.scala
+++ b/src/main/scala/inox/parser/sc/StringContextPosition.scala
@@ -1,7 +1,6 @@
-/* Copyright 2017 EPFL, Lausanne */
-
package inox
-package parsing
+package parser
+package sc
import scala.util.parsing.input._
@@ -22,7 +21,7 @@ sealed trait StringContextPosition extends Position {
case (InArgumentPosition(arg, _), InPartPosition(part, _, _, _)) =>
arg < part
case (InPartPosition(part1, _, line1, column1), InPartPosition(part2, _, line2, column2)) =>
- part1 < part2 || (part1 == part2 && (line1 < line2 || (line1 == line2 && column1 < column2)))
+ part1 < part2 || (part1 == part2 && (line1 < line2 || (line1 == line2 && column1 < column2)))
}
}
@@ -44,12 +43,12 @@ sealed trait StringContextPosition extends Position {
private def columnOfArg(arg: Int): Int = {
val str = context.parts.take(arg).mkString
val i = str.lastIndexOf('\n')
-
+
if (i < 0) {
str.length + (1 to (arg - 1)).map(sizeOfArg(_)).sum + 1
}
else {
- str.length - (i + 1) +
+ str.length - (i + 1) +
((arg - 1) to 1 by (-1))
.takeWhile((j: Int) => !context.parts(j + 1).contains('\n'))
.map(sizeOfArg(_)).sum + 1
@@ -62,7 +61,7 @@ sealed trait StringContextPosition extends Position {
}
else {
val arg = part - 1
- columnOfArg(arg) + sizeOfArg(arg)
+ columnOfArg(arg) + sizeOfArg(arg)
}
}
@@ -83,7 +82,7 @@ sealed trait StringContextPosition extends Position {
}
override def longString = {
- lineContents + "\n" +
+ lineContents + "\n" +
" " * (column - 1) + "^"
}
diff --git a/src/main/scala/inox/parsing/BuiltIns.scala b/src/main/scala/inox/parsing/BuiltIns.scala
deleted file mode 100644
index 043357495..000000000
--- a/src/main/scala/inox/parsing/BuiltIns.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait BuiltIns {
-
- lazy val bi = new DefaultBuiltIns {}
-
- trait BuiltInNames {
-
- sealed abstract class BuiltIn(val params: Option[Int], val tparams: Int) {
- val isConstructor = false
- }
-
- trait Constructor { self: BuiltIn =>
- override abstract val isConstructor = true
- }
-
- case object StringConcatenate extends BuiltIn(Some(2), 0)
- case object StringLength extends BuiltIn(Some(1), 0)
- case object StringSubstring extends BuiltIn(Some(3), 0)
-
- case object BooleanAnd extends BuiltIn(None, 0)
- case object BooleanOr extends BuiltIn(None, 0)
-
- case object SetConstructor extends BuiltIn(None, 1) with Constructor
- case object SetContains extends BuiltIn(Some(2), 1)
- case object SetAdd extends BuiltIn(Some(2), 1)
- case object SetUnion extends BuiltIn(Some(2), 1)
- case object SetIntersection extends BuiltIn(Some(2), 1)
- case object SetDifference extends BuiltIn(Some(2), 1)
- case object SetSubset extends BuiltIn(Some(2), 1)
-
- case object BagConstructor extends BuiltIn(None, 1) with Constructor
- case object BagMultiplicity extends BuiltIn(Some(2), 1)
- case object BagAdd extends BuiltIn(Some(2), 1)
- case object BagUnion extends BuiltIn(Some(2), 1)
- case object BagIntersection extends BuiltIn(Some(2), 1)
- case object BagDifference extends BuiltIn(Some(2), 1)
-
- case object MapConstructor extends BuiltIn(None, 2) with Constructor
- case object MapApply extends BuiltIn(Some(2), 2)
- case object MapUpdated extends BuiltIn(Some(3), 2)
-
- val names: Map[String, BuiltIn]
-
- object BuiltIn {
- def unapply(name: String): Option[BuiltIn] = {
- names.get(name)
- }
- }
- }
-
- trait DefaultBuiltIns extends BuiltInNames {
- override val names: Map[String, BuiltIn] = Map(
- BooleanAnd -> "and",
- BooleanOr -> "or",
-
- StringConcatenate -> "concatenate",
- StringLength -> "length",
- StringSubstring -> "substring",
-
- SetConstructor -> "Set",
- SetContains -> "contains",
- SetAdd -> "add",
- SetUnion -> "union",
- SetIntersection -> "intersection",
- SetDifference -> "difference",
- SetSubset -> "subset",
-
- BagConstructor -> "Bag",
- BagMultiplicity -> "multiplicity",
- BagAdd -> "bagAdd",
- BagUnion -> "bagUnion",
- BagIntersection -> "bagIntersection",
- BagDifference -> "bagDifference",
-
- MapConstructor -> "Map",
- MapApply -> "apply",
- MapUpdated -> "updated").map(_.swap)
- }
-
- trait EmptyBuiltIns extends BuiltInNames {
- override val names: Map[String, BuiltIn] = Map()
- }
-}
diff --git a/src/main/scala/inox/parsing/ConstraintSolver.scala b/src/main/scala/inox/parsing/ConstraintSolver.scala
deleted file mode 100644
index 3dc8c853b..000000000
--- a/src/main/scala/inox/parsing/ConstraintSolver.scala
+++ /dev/null
@@ -1,272 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input._
-
-trait ConstraintSolvers { self: Elaborators =>
-
- case class ConstraintException(error: String, position: Position)
- extends ElaborationException(Seq(ErrorLocation(error, position)))
-
- class Solver {
-
- import trees._
-
- object UnknownCollector {
- var unknowns = Set[Unknown]()
-
- private val traverser = new SelfTreeTraverser {
- override def traverse(t: Type) {
- t match {
- case u: Unknown => unknowns += u
- case _ => super.traverse(t)
- }
- }
- }
-
- def apply(tpe: Type): Set[Unknown] = {
- unknowns = Set()
- traverser.traverse(tpe)
- unknowns
- }
- }
-
- class OccurChecker(u: Unknown) {
- var exists = false
-
- val traverser = new SelfTreeTraverser {
- override def traverse(t: Type) {
- t match {
- case u2: Unknown => {
- if (u == u2) exists = true
- }
- case _ => {
- super.traverse(t)
- }
- }
- }
- }
-
- def apply(t: Type): Boolean = {
- exists = false
- traverser.traverse(t)
- exists
- }
- }
-
- def solveConstraints(constraints: Seq[Constraint]): Unifier = {
-
- var unknowns: Set[Unknown] = constraints.flatMap({
- case cs => cs.types.flatMap(UnknownCollector(_))
- }).toSet
- var remaining: Seq[Constraint] = constraints
- var substitutions: Map[Unknown, Type] = Map()
- var typeClasses: Map[Unknown, TypeClass] = Map()
- var tupleConstraints: Map[Unknown, Set[Constraint]] = Map()
- var sortConstraints: Map[Unknown, Map[ADTSort, Type => Seq[Constraint]]] = Map()
-
- def substitute(u: Unknown, t: Type) {
- val subst = new Unifier(Map(u -> t))
- unknowns -= u
- remaining = remaining.map(subst(_))
- substitutions = substitutions.mapValues(subst(_)).view.force
- substitutions += (u -> t)
- tupleConstraints = tupleConstraints.mapValues(_.map(subst(_))).view.force
- sortConstraints = sortConstraints.mapValues(_.mapValues(
- _.andThen(_.map(subst(_)))
- ).view.force).view.force
-
- // If the variable we are substituting has "tuple" constraints...
- tupleConstraints.get(u).foreach { (cs: Set[Constraint]) =>
-
- // We reintroduce those constraints.
- remaining ++= cs
-
- // Remove the entry for the variable.
- tupleConstraints -= u
- }
-
- // If the variable we are substituting has a class constraint...
- typeClasses.get(u).foreach { (c: TypeClass) =>
-
- // We reintroduce this constraints.
- remaining +:= HasClass(t, c).setPos(u.pos)
-
- // Remove the entry for the variable.
- typeClasses -= u
- }
-
- // If the variable we are substituting has a sort constraint...
- sortConstraints.get(u).foreach { (sorts: Map[ADTSort, Type => Seq[Constraint]]) =>
- remaining +:= HasSortIn(t, sorts).setPos(u.pos)
-
- sortConstraints -= u
- }
- }
-
- def className(c: TypeClass) = c match {
- case Comparable => "comparable"
- case Numeric => "numeric"
- case Integral => "integral"
- case Bits => "a bit vector"
- }
-
- def handle(constraint: Constraint) {
-
- constraint match {
- case Equal(a, b) => (a, b) match {
- case _ if (a == b) => ()
- case (u1: Unknown, u2: Unknown) => {
- substitute(u1, u2)
- }
- case (u: Unknown, t) => {
- val checker = new OccurChecker(u)
- if (checker(t)) {
- throw new ConstraintException("Occur check.", constraint.pos)
- }
-
- substitute(u, t)
- }
- case (t, u: Unknown) => {
- val checker = new OccurChecker(u)
- if (checker(t)) {
- throw new ConstraintException("Occur check.", constraint.pos)
- }
-
- substitute(u, t)
- }
- case (FunctionType(fas, ta), FunctionType(fbs, tb)) if (fbs.length == fas.length) => {
- remaining ++= fas.zip(fbs).map({ case (fa, fb) => Equal(fa, fb).setPos(constraint.pos) })
- remaining +:= Equal(ta, tb).setPos(constraint.pos)
- }
- case (TupleType(tas), TupleType(tbs)) if (tas.length == tbs.length) => {
- remaining ++= tas.zip(tbs).map({ case (ta, tb) => Equal(ta, tb).setPos(constraint.pos) })
- }
- case (ADTType(ida, tas), ADTType(idb, tbs)) if (ida == idb && tas.length == tbs.length) => {
- remaining ++= tas.zip(tbs).map({ case (ta, tb) => Equal(ta, tb).setPos(constraint.pos) })
- }
- case (SetType(ta), SetType(tb)) => {
- remaining +:= Equal(ta, tb).setPos(constraint.pos)
- }
- case (BagType(ta), BagType(tb)) => {
- remaining +:= Equal(ta, tb).setPos(constraint.pos)
- }
- case (MapType(fa, ta), MapType(fb, tb)) => {
- remaining +:= Equal(fa, fb).setPos(constraint.pos)
- remaining +:= Equal(ta, tb).setPos(constraint.pos)
- }
- case _ => throw new ConstraintException("Types incompatible: " + a + ", " + b, constraint.pos)
- }
- case AtIndexEqual(a, b, i) => a match {
- case u: Unknown => {
- typeClasses.get(u).foreach {
- case c => throw new ConstraintException("Type " + a + " can not be both a tuple and " + className(c), constraint.pos)
- }
- sortConstraints.get(u).foreach {
- case _ => throw new ConstraintException("Type " + a + " can not be both a tuple and an ADT", constraint.pos)
- }
- tupleConstraints += (u -> (tupleConstraints.get(u).getOrElse(Set()) + constraint))
- }
- case TupleType(tps) => {
- if (tps.length >= i) {
- remaining +:= Equal(tps(i - 1), b).setPos(constraint.pos)
- }
- else {
- throw new ConstraintException("Type " + a + " does not have a field at index " + i, constraint.pos)
- }
- }
- case _ => {
- throw new ConstraintException("Type " + a + " is not a tuple.", constraint.pos)
- }
- }
- case HasClass(a, c) => {
- a match {
- case u: Unknown => {
- tupleConstraints.get(u).foreach {
- case _ => throw new ConstraintException("Type " + a + " can not be both a tuple and " + className(c), constraint.pos)
- }
- sortConstraints.get(u).foreach {
- case _ => throw new ConstraintException("Type " + a + " can not be both an ADT and " + className(c), constraint.pos)
- }
- typeClasses += (u -> { typeClasses.get(u) match {
- case None => c
- case Some(c2) => c & c2
- }})
- }
- case _ if c.hasInstance(a) => ()
- case _ => throw new ConstraintException("Type " + a + " is not " + className(c), constraint.pos)
- }
- }
- case HasSortIn(a, sorts) => {
- val n = sorts.size
- if (n == 0) {
- throw new ConstraintException("Type " + a + " has no valid ADT sort", constraint.pos)
- }
- if (n == 1) {
- val (sort, rest) = sorts.toSeq.head
- val typeArgs = sort.tparams.map(x => Unknown.fresh(constraint.pos))
- val expectedType = ADTType(sort.id, typeArgs)
-
- remaining +:= Equal(a, expectedType)
- remaining ++= rest(expectedType)
- }
- else {
- a match {
- case u: Unknown => {
- sortConstraints.get(u) match {
- case None => sortConstraints += u -> sorts
- case Some(otherSorts) => {
- val intersection = sorts.keySet.intersect(otherSorts.keySet).map { (k: ADTSort) =>
- (k, (tpe: Type) => sorts(k)(tpe) ++ otherSorts(k)(tpe))
- }.toMap
-
- remaining +:= HasSortIn(u, intersection)
- sortConstraints -= u
- }
- }
- }
- case ADTType(id, _) => {
- val rest = sorts.collectFirst({
- case (sort, rest) if (sort.id == id) => rest
- }).getOrElse({
- throw new ConstraintException("Type " + a + " has not a valid sort", constraint.pos)
- })
-
- remaining ++= rest(a)
- }
- case _ => {
- throw new ConstraintException("Type " + a + " is not an ADT", constraint.pos)
- }
- }
- }
- }
- }
- }
-
- while (!remaining.isEmpty) {
- while (!remaining.isEmpty) {
- val constraint = remaining.head
- remaining = remaining.tail
- handle(constraint)
- }
-
- if (remaining.isEmpty) {
- // Set the default instance for classes.
- typeClasses.foreach({
- case (t, Integral | Numeric) => remaining +:= Equal(t, IntegerType()).setPos(t.pos)
- case (t, Bits) => remaining +:= Equal(t, Int32Type()).setPos(t.pos)
- case _ => ()
- })
- }
- }
-
- if (!unknowns.isEmpty) {
- throw new ConstraintException("Ambiguity. Try using type annotations.", unknowns.head.pos)
- }
-
- new Unifier(substitutions)
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/DefinitionElaborator.scala b/src/main/scala/inox/parsing/DefinitionElaborator.scala
deleted file mode 100644
index ad6cd41e1..000000000
--- a/src/main/scala/inox/parsing/DefinitionElaborator.scala
+++ /dev/null
@@ -1,159 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input._
-
-trait DefinitionElaborators { self: Elaborators =>
-
- trait DefinitionElaborator { self0: Elaborator =>
-
- import DefinitionIR._
-
- def getFunction(fd: FunDef)(implicit store: Store): Constrained[trees.FunDef] = {
- for (ds <- getDefinitions(Seq(fd))) yield { implicit u => ds.head.asInstanceOf[trees.FunDef] }
- }
-
- def getSort(td: TypeDef)(implicit store: Store): Constrained[trees.ADTSort] = {
- for (ds <- getDefinitions(Seq(td))) yield { implicit u => ds.head.asInstanceOf[trees.ADTSort] }
- }
-
- def getDefinitions(definitions: Seq[Definition]): Constrained[Seq[trees.Definition]] = {
-
- // Check for duplicate definitions
- def duplicates(names: Seq[(String, Position)], symbols: Set[String]): Option[(String, Position)] = {
- names.groupBy(_._1).filter(p => symbols(p._1) || p._2.size > 1)
- .map { case (n, ps) => (n, ps.map(_._2).sortWith(_ < _).head) }.toSeq
- .sortWith((a,b) => a._2 < b._2).headOption
- }
-
- val duplicateTypeNames = duplicates(
- definitions.collect { case td: TypeDef => td.id.getName -> td.pos },
- symbols.sorts.keySet.map(_.name)
- )
-
- val duplicateFunctionNames = duplicates(
- definitions.flatMap {
- case fd: FunDef => Seq(fd.id.getName -> fd.pos)
- case td: TypeDef => td.constructors.map(p => p._1.getName -> td.pos)
- },
- symbols.sorts.values.flatMap(_.constructors.map(_.id.name)).toSet ++
- symbols.functions.keySet.map(_.name)
- )
-
- val duplicateFields: Option[ExprIR.IdentifierIdentifier] = (
- definitions
- .flatMap {
- case td: TypeDef => td.constructors.flatMap(_._2.map(_._1))
- case _ => Seq()
- }.collect { case ident @ ExprIR.IdentifierIdentifier(_) => ident } ++
- symbols.sorts.values.toSeq
- .flatMap(_.constructors.flatMap(_.fields.map(_.id)))
- .map(id => ExprIR.IdentifierIdentifier(id)))
- .groupBy(id => id)
- .filter(_._2.size > 1)
- .map { case (id, ids) => ids.find(_.pos != NoPosition).getOrElse(id) }
- .headOption
-
- val sortsStore = definitions.foldLeft(Store.empty) {
- case (store, TypeDef(ident, tparams, _)) =>
- store + (ident.getName, new trees.ADTSort(
- getIdentifier(ident),
- tparams.map(id => trees.TypeParameterDef(trees.TypeParameter(getIdentifier(id), Seq()))),
- Seq(), Seq()
- ))
- case (store, _) => store
- }
-
- val newStore = definitions.foldLeft(sortsStore) {
- case (store, td: TypeDef) =>
- val sort = sortsStore getSort td.id.getName
- val tpStore = (td.tparams zip sort.typeArgs).foldLeft(store) {
- case (store, (id, tp)) => store + (id.getName, tp)
- }
-
- td.constructors.foldLeft(store) { case (store, (ident, params)) =>
- val id = getIdentifier(ident)
- val fields = params.map { case (ident, tpe) =>
- trees.ValDef(getIdentifier(ident), getSimpleType(tpe)(tpStore))
- }
- store + (ident.getName, sort, new trees.ADTConstructor(id, sort.id, fields))
- }
-
- case (store, fd: FunDef) =>
- val id = getIdentifier(fd.id)
- val tparams = fd.tparams.map(id => trees.TypeParameter(getIdentifier(id), Seq()))
- val tpds = tparams.map(trees.TypeParameterDef(_))
- val tpStore = (fd.tparams zip tparams).foldLeft(store) {
- case (store, (id, tp)) => store + (id.getName, tp)
- }
-
- val params = fd.params.map(p => trees.ValDef(getIdentifier(p._1), getSimpleType(p._2)(tpStore)))
- val resultType = getSimpleType(fd.returnType)(tpStore)
- val body = trees.Choose(trees.ValDef.fresh("res", resultType), trees.BooleanLiteral(true))
- store + (fd.id.getName, new trees.FunDef(id, tpds, params, resultType, body, Seq()))
- }
-
- Constrained.sequence({
- definitions.map {
- case td: TypeDef =>
- implicit val position: Position = td.pos
- val sort = newStore getSort td.id.getName
- val tpStore = (td.tparams zip sort.typeArgs).foldLeft(newStore) {
- case (store, (id, tp)) => store + (id.getName, tp)
- }
-
- Constrained.sequence({
- td.constructors.map { case (id, params) =>
- val (_, cons) = newStore getConstructor id.getName
- val (_, _, vds) = getExprBindings((params zip cons.fields).map {
- case ((_, tpe), vd) => (ExprIR.IdentifierIdentifier(vd.id), Some(tpe))
- })(tpStore, position)
- vds.transform(cons.id -> _)
- }
- }).transform({ constructors =>
- new trees.ADTSort(sort.id, sort.tparams, constructors.map {
- case (cid, params) => new trees.ADTConstructor(cid, sort.id, params)
- }, Seq())
- })
-
- case fd: FunDef =>
- implicit val position: Position = fd.pos
- val signature = newStore getFunction fd.id.getName
- val initStore = (fd.tparams zip signature.typeArgs).foldLeft(newStore) {
- case (store, (id, tp)) => store + (id.getName, tp)
- }
-
- val (bodyStore, _, vds) = getExprBindings((fd.params zip signature.params).map {
- case ((_, tpe), vd) => (ExprIR.IdentifierIdentifier(vd.id), Some(tpe))
- })(initStore, position)
-
- val returnType = Unknown.fresh
-
- (for {
- params <- vds
- tpe <- getType(fd.returnType)(bodyStore)
- body <- getExpr(fd.body, returnType)(bodyStore)
- } yield { implicit u =>
- new trees.FunDef(signature.id, signature.tparams, params, tpe, body, Seq())
- }).addConstraint({
- Constraint.equal(returnType, signature.returnType)
- })
- }
- }).checkImmediate(
- duplicateTypeNames.isEmpty,
- "Multiple type definitions with name " + duplicateTypeNames.get._1 + ".",
- duplicateTypeNames.get._2
- ).checkImmediate(
- duplicateFunctionNames.isEmpty,
- "Multiple function definitions with name " + duplicateFunctionNames.get._1 + ".",
- duplicateFunctionNames.get._2
- ).checkImmediate(
- duplicateFields.isEmpty,
- "Duplicate field identifiers with name " + duplicateFields.get.getName + ".",
- duplicateFields.get.pos
- )
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/DefinitionExtractor.scala b/src/main/scala/inox/parsing/DefinitionExtractor.scala
deleted file mode 100644
index f7613b548..000000000
--- a/src/main/scala/inox/parsing/DefinitionExtractor.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait DefinitionExtractors { self: Extractors =>
-
- trait DefinitionExtractor { self0: Extractor =>
-
- import DefinitionIR._
-
- def extract(fd: trees.FunDef, template: FunDef): Option[Match] = extract(
- toIdObl(fd.id -> template.id),
- toIdObls(fd.tparams.map(_.id) -> template.tparams),
- toIdObls(fd.params.map(_.id) -> template.params.map(_._1)),
- toTypeObls(fd.params.map(_.getType) -> template.params.map(_._2)),
- toTypeObl(fd.getType -> template.returnType),
- toExprObl(fd.fullBody -> template.body))
-
- def extract(sort: trees.ADTSort, template: TypeDef): Option[Match] = extract(
- toIdObl(sort.id -> template.id),
- toIdObls(sort.tparams.map(_.id) -> template.tparams),
- toIdObls(sort.constructors.map(_.id) -> template.constructors.map(_._1)),
- extract((sort.constructors zip template.constructors).map { case (cons, (_, fields)) =>
- extract(
- toIdObls(cons.fields.map(_.id) -> fields.map(_._1)),
- toTypeObls(cons.fields.map(_.getType) -> fields.map(_._2))
- )
- } : _*))
- }
-}
diff --git a/src/main/scala/inox/parsing/DefinitionIR.scala b/src/main/scala/inox/parsing/DefinitionIR.scala
deleted file mode 100644
index 2daf85a90..000000000
--- a/src/main/scala/inox/parsing/DefinitionIR.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input.Positional
-
-trait DefinitionIRs { self: IRs =>
-
- /** IR for definitions. */
- object DefinitionIR {
-
- import ExprIR.{Identifier, Expression}
- import TypeIR.{Expression => Type}
-
- sealed abstract class Definition(pre: String) extends Positional with Product {
- override def productPrefix = pos + "@" + pre
- }
-
- case class FunDef(
- id: Identifier,
- tparams: Seq[Identifier],
- params: Seq[(Identifier, Type)],
- returnType: Type,
- body: Expression
- ) extends Definition("Function")
-
- case class TypeDef(
- id: Identifier,
- tparams: Seq[Identifier],
- constructors: Seq[(Identifier, Seq[(Identifier, Type)])]
- ) extends Definition("Type")
- }
-}
diff --git a/src/main/scala/inox/parsing/DefinitionParser.scala b/src/main/scala/inox/parsing/DefinitionParser.scala
deleted file mode 100644
index 52c3c9ead..000000000
--- a/src/main/scala/inox/parsing/DefinitionParser.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.token._
-import scala.util.parsing.input.Position
-
-trait DefinitionParsers { self: Parsers =>
-
- class DefinitionParser extends ExpressionParser {
-
- import TypeIR.{Expression => Type}
- import ExprIR.{Identifier, Expression}
- import DefinitionIR._
- import lexical._
-
- lazy val pipe = elem(Operator("|")) withFailureMessage { (p: Position) =>
- withPos("Unexpected character. Separator `|` or end of constructor list expected.", p)
- }
-
- lazy val param: Parser[(Identifier, Type)] = (for {
- id <- identifier
- _ <- commit(p(':'))
- tp <- commit(typeExpression)
- } yield (id, tp)) withFailureMessage {
- (p: Position) => withPos("Parameter declaration expected.", p)
- }
-
- lazy val params: Parser[Seq[(Identifier, Type)]] =
- (p('(') ~> repsep(param, p(',')) <~ commit(p(')') withFailureMessage {
- (p: Position) => withPos("Expected character `)`, or additional parameters (separated by `,`).", p) }
- )) withFailureMessage {
- (p: Position) => withPos("Parameter list expected.", p)
- }
-
- lazy val tparams: Parser[Seq[Identifier]] = (opt(for {
- _ <- p('[')
- ids <- commit(rep1sep(identifier, p(','))) withFailureMessage {
- (p: Position) => withPos("Type parameters expected.", p)
- }
- _ <- commit(p(']')) withFailureMessage {
- (p: Position) => withPos("Expected character `]`, or additional type parameters (separated by `,`).", p)
- }
- } yield ids) ^^ (_.toSeq.flatten)) withFailureMessage {
- (p: Position) => withPos("Type parameter list expected (or no type parameters).", p)
- }
-
- lazy val constructor: Parser[(Identifier, Seq[(Identifier, Type)])] = (for {
- id <- commit(identifier) withFailureMessage { (p: Position) => withPos("Constructor name expected.", p) }
- ps <- opt(params) ^^ (_.getOrElse(Seq()))
- } yield (id, ps)) withFailureMessage {
- (p: Position) => withPos("Constructor declaration expected.", p)
- }
-
- lazy val datatype: Parser[TypeDef] = for {
- _ <- kw("type")
- id <- commit(identifier)
- tps <- commit(tparams)
- _ <- commit(kw("="))
- conss <- commit(rep1sep(constructor, pipe))
- } yield TypeDef(id, tps, conss)
-
- lazy val function: Parser[FunDef] = for {
- _ <- kw("def")
- id <- commit(identifier)
- tps <- commit(tparams)
- ps <- opt(params) ^^ (_.getOrElse(Seq()))
- _ <- commit(p(':') withFailureMessage { (p: Position) =>
- withPos("Parameter list, or character `:`, expected.", p)
- })
- tpe <- commit(typeExpression)
- _ <- commit(kw("="))
- body <- commit(expression)
- } yield FunDef(id, tps, ps, tpe, body)
- }
-}
diff --git a/src/main/scala/inox/parsing/Elaborators.scala b/src/main/scala/inox/parsing/Elaborators.scala
deleted file mode 100644
index be1eca2b7..000000000
--- a/src/main/scala/inox/parsing/Elaborators.scala
+++ /dev/null
@@ -1,268 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input._
-
-import scala.language.implicitConversions
-
-/** Contains description of (type-checking) constraints and
- * and constrained values.
- */
-trait Elaborators
- extends IRs
- with ExpressionDeconstructors
- with TypeDeconstructors
- with ExpressionElaborators
- with TypeElaborators
- with DefinitionElaborators
- with ConstraintSolvers {
-
- import trees.Type
- import trees.ADTSort
-
- class ElaborationException(errors: Seq[ErrorLocation])
- extends Exception(errors.map(_.toString).mkString("\n\n"))
-
- trait Elaborator
- extends ExpressionDeconstructor
- with TypeDeconstructor
- with ExpressionElaborator
- with DefinitionElaborator
- with TypeElaborator {
-
- lazy val solver = new Solver
-
- def elaborate[A](c: Constrained[A]): A = c match {
- case Unsatisfiable(es) => throw new ElaborationException(es)
- case WithConstraints(ev, constraints) =>
- implicit val u = solver.solveConstraints(constraints)
- ev
- }
- }
-
- /** Represents a meta type-parameter. */
- class Unknown(val param: BigInt) extends trees.Type with Positional {
- override def toString: String = pos + "@MetaParam(" + param + ")"
- }
-
- object Unknown {
- def fresh(implicit position: Position): Unknown = new Unknown(next).setPos(position)
-
- private var i: BigInt = 0
-
- def next: BigInt = synchronized {
- val ret = i
- i += 1
- ret
- }
- }
-
- sealed abstract class TypeClass {
- def &(that: TypeClass) = (this, that) match {
- case (Bits, _) => Bits
- case (_, Bits) => Bits
- case (Integral, _) => Integral
- case (_, Integral) => Integral
- case (Numeric, _) => Numeric
- case (_, Numeric) => Numeric
- case _ => Comparable
- }
-
- def hasInstance(tpe: Type): Boolean
- }
- case object Comparable extends TypeClass {
- override def hasInstance(tpe: Type) = {
- tpe == trees.CharType() || Numeric.hasInstance(tpe)
- }
- }
- case object Numeric extends TypeClass {
- override def hasInstance(tpe: Type) = {
- tpe == trees.RealType() || Integral.hasInstance(tpe)
- }
- }
- case object Integral extends TypeClass {
- override def hasInstance(tpe: Type) = {
- tpe == trees.IntegerType() || Bits.hasInstance(tpe)
- }
- }
- case object Bits extends TypeClass {
- override def hasInstance(tpe: Type) =
- tpe.isInstanceOf[trees.BVType]
- }
-
- /** Maps meta type-parameters to actual types. */
- class Unifier(subst: Map[Unknown, Type]) {
-
- val instantiator = new trees.SelfTreeTransformer {
- override def transform(tpe: Type) = tpe match {
- case u: Unknown => subst.getOrElse(u, u)
- case _ => super.transform(tpe)
- }
- }
-
- def apply(tpe: trees.Type): trees.Type = instantiator.transform(tpe)
- def apply(expr: trees.Expr): trees.Expr = instantiator.transform(expr)
- def apply(vd: trees.ValDef): trees.ValDef = instantiator.transform(vd)
- def apply(c: Constraint): Constraint = c match {
- case Equal(a, b) => Equal(instantiator.transform(a), instantiator.transform(b)).setPos(c.pos)
- case HasClass(a, cl) => HasClass(instantiator.transform(a), cl).setPos(c.pos)
- case AtIndexEqual(a, b, idx) => AtIndexEqual(instantiator.transform(a), instantiator.transform(b), idx).setPos(c.pos)
- }
- }
-
- /** Constraint on type(s). */
- abstract class Constraint(val types: Seq[Type]) extends Positional
- case class Equal(a: Type, b: Type) extends Constraint(Seq(a, b))
- case class HasClass(a: Type, c: TypeClass) extends Constraint(Seq(a))
- case class AtIndexEqual(tup: Type, mem: Type, idx: Int) extends Constraint(Seq(tup, mem))
- case class HasSortIn(a: Type, sorts: Map[ADTSort, Type => Seq[Constraint]]) extends Constraint(Seq(a))
-
- object Constraint {
- def exist(a: Unknown)(implicit position: Position): Constraint = Equal(a, a).setPos(position)
- def equal(a: Type, b: Type)(implicit position: Position): Constraint = Equal(a, b).setPos(position)
- def isNumeric(a: Type)(implicit position: Position): Constraint = HasClass(a, Numeric).setPos(position)
- def isIntegral(a: Type)(implicit position: Position): Constraint = HasClass(a, Integral).setPos(position)
- def isComparable(a: Type)(implicit position: Position): Constraint = HasClass(a, Comparable).setPos(position)
- def isBitVector(a: Type)(implicit position: Position): Constraint = HasClass(a, Bits).setPos(position)
- def atIndex(tup: Type, mem: Type, idx: Int)(implicit position: Position) = AtIndexEqual(tup, mem, idx).setPos(position)
- def hasSortIn(a: Type, sorts: (ADTSort, Type => Seq[Constraint])*)(implicit position: Position) = HasSortIn(a, sorts.toMap).setPos(position)
- }
-
- case class Eventual[+A](fun: Unifier => A)
-
- implicit def eventualToValue[A](e: Eventual[A])(implicit unifier: Unifier): A = e.fun(unifier)
-
- class Store private(
- variables: Map[String, (Identifier, Type, Eventual[Type])],
- types: Map[String, trees.TypeParameter],
- functions: Map[String, trees.FunDef],
- constructors: Map[String, (trees.ADTSort, trees.ADTConstructor)],
- fields: Map[String, Seq[(trees.ADTSort, trees.ADTConstructor, trees.ValDef)]],
- sorts: Map[String, trees.ADTSort]) {
-
- def getVariable(name: String): (Identifier, Type, Eventual[Type]) = variables(name)
- def isVariable(name: String): Boolean = variables contains name
-
- def getTypeParameter(name: String): trees.TypeParameter = types(name)
- def isTypeParameter(name: String): Boolean = types contains name
-
- def getFunction(name: String): trees.FunDef = functions(name)
- def isFunction(name: String): Boolean = functions contains name
-
- def getConstructor(name: String): (trees.ADTSort, trees.ADTConstructor) = constructors(name)
- def isConstructor(name: String): Boolean = constructors contains name
-
- def getFields(name: String): Seq[(trees.ADTSort, trees.ADTConstructor, trees.ValDef)] = fields(name)
- def isField(name: String): Boolean = fields contains name
-
- def getSort(name: String): trees.ADTSort = sorts(name)
- def isSort(name: String): Boolean = sorts contains name
-
- def +(p: (String, Identifier, Type, Eventual[Type])): Store = this + (p._1, p._2, p._3, p._4)
- def +(name: String, id: Identifier, simple: Type, tpe: Eventual[Type]): Store =
- new Store(variables + (name -> ((id, simple, tpe))), types, functions, constructors, fields, sorts)
-
- def +(name: String, tp: trees.TypeParameter): Store =
- new Store(variables, types + (name -> tp), functions, constructors, fields, sorts)
-
- def +(name: String, fd: trees.FunDef): Store =
- new Store(variables, types, functions + (name -> fd), constructors, fields, sorts)
- def +(name: String, sort: trees.ADTSort, cons: trees.ADTConstructor): Store =
- new Store(variables, types, functions, constructors + (name -> ((sort, cons))), fields, sorts)
- def +(name: String, sort: trees.ADTSort, cons: trees.ADTConstructor, vd: trees.ValDef): Store =
- new Store(variables, types, functions, constructors,
- fields + (name -> (fields.getOrElse(name, Seq()) :+ ((sort, cons, vd)))), sorts)
- def +(name: String, sort: trees.ADTSort): Store =
- new Store(variables, types, functions, constructors, fields, sorts + (name -> sort))
- }
-
- def getIdentifier(id: ExprIR.Identifier): Identifier = id match {
- case ExprIR.IdentifierIdentifier(i) => i
- case ExprIR.IdentifierName(name) => inox.FreshIdentifier(name)
- case ExprIR.IdentifierHole(_) => throw new Error("Expression contains holes.")
- }
-
- object Store {
- def empty: Store = new Store(Map(), Map(), Map(), Map(), Map(), Map())
- }
-
- /** Represents a set of constraints with a value.
- *
- * The value contained is not directly available,
- * but can be obtained from a `Unifier`.
- *
- * Such a `Unifier` should be obtained by solving the constraints.
- *
- * This class offers an applicative functor interface.
- */
- sealed abstract class Constrained[+A] {
-
- def map[B](f: A => (Unifier => B)): Constrained[B] = this match {
- case Unsatisfiable(es) => Unsatisfiable(es)
- case WithConstraints(v, cs) => WithConstraints(Eventual(implicit u => f(v)(u)), cs)
- }
-
- def flatMap[B](f: (Eventual[A]) => Constrained[B]): Constrained[B] = this match {
- case Unsatisfiable(es) => Unsatisfiable(es)
- case WithConstraints(fA, csA) => f(fA) match {
- case Unsatisfiable(fs) => Unsatisfiable(fs)
- case WithConstraints(fB, csB) => WithConstraints(fB, csA ++ csB)
- }
- }
-
- def transform[B](f: A => B): Constrained[B] = this match {
- case Unsatisfiable(es) => Unsatisfiable(es)
- case WithConstraints(v, cs) => WithConstraints(Eventual(implicit u => f(v)), cs)
- }
-
- def combine[B, C](that: Constrained[B])(f: (A, B) => C): Constrained[C] = (this, that) match {
- case (WithConstraints(vA, csA), WithConstraints(vB, csB)) => WithConstraints(Eventual(implicit u => f(vA, vB)), csA ++ csB)
- case (Unsatisfiable(es), Unsatisfiable(fs)) => Unsatisfiable(es ++ fs)
- case (Unsatisfiable(es), _) => Unsatisfiable(es)
- case (_, Unsatisfiable(fs)) => Unsatisfiable(fs)
- }
-
- def app[B, C](that: Constrained[B])(implicit ev: A <:< (B => C)): Constrained[C] =
- this.combine(that)((f: A, x: B) => ev(f)(x))
-
- def get(implicit unifier: Unifier): A = this match {
- case WithConstraints(vA, cs) => vA
- case Unsatisfiable(_) => throw new Exception("Unsatisfiable.get")
- }
-
- def addConstraint(constraint: => Constraint): Constrained[A] = addConstraints(Seq(constraint))
-
- def addConstraints(constraints: => Seq[Constraint]): Constrained[A] = this match {
- case WithConstraints(vA, cs) => WithConstraints(vA, constraints ++ cs)
- case Unsatisfiable(es) => Unsatisfiable(es)
- }
- def checkImmediate(condition: Boolean, error: => String, location: => Position): Constrained[A] = this match {
- case Unsatisfiable(es) if (!condition) => Unsatisfiable(es :+ ErrorLocation(error, location))
- case WithConstraints(_, _) if (!condition) => Unsatisfiable(Seq(ErrorLocation(error, location)))
- case _ => this
- }
- }
- case class Unsatisfiable(errors: Seq[ErrorLocation]) extends Constrained[Nothing]
- case class WithConstraints[A](value: Eventual[A], constraints: Seq[Constraint]) extends Constrained[A]
-
- object Constrained {
- def fail(error: String, location: Position) = Unsatisfiable(Seq(ErrorLocation(error, location)))
- def fail(errors: Seq[(String, Position)]) = {
- assert(!errors.isEmpty)
- Unsatisfiable(errors.map({ case (error, location) => ErrorLocation(error, location)}))
- }
- def pure[A](x: A): Constrained[A] = WithConstraints(Eventual(implicit u => x), Seq())
- def unify[A](f: Unifier => A): Constrained[A] = WithConstraints(Eventual(f), Seq())
-
- def sequence[A](cs: Seq[Constrained[A]]): Constrained[Seq[A]] = {
- val zero: Constrained[Seq[A]] = pure(Seq[A]())
- val cons: (A, Seq[A]) => Seq[A] = (x: A, xs: Seq[A]) => x +: xs
-
- cs.foldRight(zero) {
- case (c, acc) => c.combine(acc)(cons)
- }
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/ErrorLocation.scala b/src/main/scala/inox/parsing/ErrorLocation.scala
deleted file mode 100644
index 2d7f6eb7d..000000000
--- a/src/main/scala/inox/parsing/ErrorLocation.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input._
-
-case class ErrorLocation(error: String, pos: Position) {
- override def toString: String = error + "\n" + pos.longString
-}
\ No newline at end of file
diff --git a/src/main/scala/inox/parsing/ExprIR.scala b/src/main/scala/inox/parsing/ExprIR.scala
deleted file mode 100644
index 831d69134..000000000
--- a/src/main/scala/inox/parsing/ExprIR.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input._
-
-trait ExprIRs { self: IRs =>
-
- /** IR for expressions. */
- object ExprIR extends IR {
-
- sealed abstract class Identifier extends Positional {
- def getName: String
- def getFullName: String
-
- override def toString = pos + "@" + getFullName
- }
- case class IdentifierName(name: String) extends Identifier {
- override def getName = name
- override def getFullName = name
- }
- case class IdentifierIdentifier(identifier: inox.Identifier) extends Identifier {
- override def getName = identifier.name
- override def getFullName = identifier.uniqueName
- }
- case class IdentifierHole(index: Int) extends Identifier {
- override def getName = "$" + index
- override def getFullName = "$" + index
- }
-
- type Operator = String
-
- sealed abstract class Field extends Positional
- case class FieldName(name: String) extends Field
- case class FieldIdentifier(identifier: inox.Identifier) extends Field
- case class FieldHole(index: Int) extends Field
-
- type Type = TypeIR.Expression
-
- sealed abstract class Value
- case class EmbeddedExpr(expr: trees.Expr) extends Value
- case class EmbeddedValue(value: Any) extends Value
- case class NumericLiteral(value: String) extends Value
- case class DecimalLiteral(whole: String, trailing: String, repeating: String) extends Value
- case class StringLiteral(string: String) extends Value
- case class BooleanLiteral(value: Boolean) extends Value
- case class CharLiteral(value: Char) extends Value
- case object UnitLiteral extends Value
-
- sealed abstract class Quantifier
- case object Lambda extends Quantifier
- case object Forall extends Quantifier
- case object Exists extends Quantifier
- case object Choose extends Quantifier
-
- case class ExpressionHole(index: Int) extends Expression("ExpressionHole")
- case class ExpressionSeqHole(index: Int) extends Expression("ExpressionSeqHole")
- }
-}
diff --git a/src/main/scala/inox/parsing/ExpressionDeconstructor.scala b/src/main/scala/inox/parsing/ExpressionDeconstructor.scala
deleted file mode 100644
index 91c3f6c2f..000000000
--- a/src/main/scala/inox/parsing/ExpressionDeconstructor.scala
+++ /dev/null
@@ -1,383 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait ExpressionDeconstructors extends IRs {
-
- trait ExpressionDeconstructor {
- implicit val symbols: trees.Symbols
-
- import ExprIR._
-
- object TupleField {
- def unapply(field: Field): Option[Int] = field match {
- case FieldName(name) if name.startsWith("_") => scala.util.Try(name.tail.toInt).toOption.filter(_ >= 1)
- case _ => None
- }
- }
-
- object Field {
-
- lazy val allFields = symbols.sorts.values.toSeq
- .flatMap(_.constructors.flatMap(c => c.fields.map(vd => (c, vd))))
-
- lazy val fieldsById = allFields.groupBy(_._2.id)
- lazy val fieldsByName = allFields.groupBy(_._2.id.name)
-
- def unapplySeq(field: Field): Option[Seq[(trees.ADTConstructor, trees.ValDef)]] = field match {
- case FieldName(name) => fieldsByName.get(name)
- case FieldIdentifier(id) => fieldsById.get(id)
- case _ => None
- }
- }
-
- object FunDef {
-
- lazy val functionsByName = symbols.functions.toSeq.map(_._2).groupBy(_.id.name)
-
- def unapplySeq(expression: Expression): Option[Seq[trees.FunDef]] = expression match {
- case Variable(IdentifierIdentifier(identifier)) => symbols.functions.get(identifier).map(Seq(_))
- case Variable(IdentifierName(string)) => functionsByName.get(string)
- case _ => None
- }
- }
-
- object TypedFunDef {
- def unapply(expression: Expression): Option[(trees.FunDef, Option[Seq[Type]])] = expression match {
- case TypeApplication(FunDef(fd), targs) => Some((fd, Some(targs)))
- case FunDef(fd) => Some((fd, None))
- case _ => None
- }
- }
-
- object ConsDef {
-
- lazy val allConstructors = symbols.sorts.values.toSeq.flatMap(_.constructors)
-
- lazy val consById = allConstructors.groupBy(_.id)
- lazy val consByName = allConstructors.groupBy(_.id.name)
-
- def unapplySeq(expression: Expression): Option[Seq[trees.ADTConstructor]] = expression match {
- case Variable(IdentifierIdentifier(identifier)) => consById.get(identifier)
- case Variable(IdentifierName(string)) => consByName.get(string)
- case _ => None
- }
- }
-
- object TypedConsDef {
- def unapply(expression: Expression): Option[(trees.ADTConstructor, Option[Seq[Type]])] = expression match {
- case TypeApplication(ConsDef(cons), targs) => Some((cons, Some(targs)))
- case ConsDef(cons) => Some((cons, None))
- case _ => None
- }
- }
-
- object NumericBinOp {
- def unapply(string: String): Option[(trees.Expr, trees.Expr) => trees.Expr] = string match {
- case "+" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.Plus(lhs, rhs) })
- case "-" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.Minus(lhs, rhs) })
- case "*" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.Times(lhs, rhs) })
- case "/" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.Division(lhs, rhs) })
- case _ => None
- }
- }
-
- object IntegralBinOp {
- def unapply(string: String): Option[(trees.Expr, trees.Expr) => trees.Expr] = string match {
- case "%" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.Remainder(lhs, rhs) })
- case "mod" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.Modulo(lhs, rhs) })
- case _ => None
- }
- }
-
- object ComparableBinOp {
- def unapply(string: String): Option[(trees.Expr, trees.Expr) => trees.Expr] = string match {
- case "<=" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.LessEquals(lhs, rhs) })
- case "<" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.LessThan(lhs, rhs) })
- case ">=" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.GreaterEquals(lhs, rhs) })
- case ">" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.GreaterThan(lhs, rhs) })
- case _ => None
- }
- }
-
- object BooleanBinOp {
- def unapply(string: String): Option[(trees.Expr, trees.Expr) => trees.Expr] = string match {
- case "==>" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.Implies(lhs, rhs) })
- case _ => None
- }
- }
-
- object BooleanAndOperation {
- def unapply(expr: Expression): Option[Seq[Expression]] = expr match {
- case Operation("&&", expressions) => Some(expressions)
- case PrimitiveFunction(bi.BooleanAnd, _, expressions, None) => Some(expressions)
- case _ => None
- }
- }
-
- object BooleanOrOperation {
- def unapply(expr: Expression): Option[Seq[Expression]] = expr match {
- case Operation("||", expressions) => Some(expressions)
- case PrimitiveFunction(bi.BooleanOr, _, expressions, None) => Some(expressions)
- case _ => None
- }
- }
-
- object BooleanNAryOperation {
- def unapply(expr: Expression): Option[(Seq[trees.Expr] => trees.Expr, Seq[Expression])] = expr match {
- case BooleanAndOperation(expressions) => Some(({ (exprs: Seq[trees.Expr]) => trees.And(exprs) }, expressions))
- case BooleanOrOperation(expressions) => Some(({ (exprs: Seq[trees.Expr]) => trees.Or(exprs) }, expressions))
- case _ => None
- }
- }
-
- object BitVectorBinOp {
- def unapply(string: String): Option[(trees.Expr, trees.Expr) => trees.Expr] = string match {
- case "|" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.BVOr(lhs, rhs) })
- case "&" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.BVAnd(lhs, rhs) })
- case "^" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.BVXor(lhs, rhs) })
- case "<<" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.BVShiftLeft(lhs, rhs) })
- case ">>" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.BVAShiftRight(lhs, rhs) })
- case ">>>" => Some({ (lhs: trees.Expr, rhs: trees.Expr) => trees.BVLShiftRight(lhs, rhs) })
- case _ => None
- }
- }
-
- object PrimitiveFunction {
- def unapply(expr: Expression): Option[(bi.BuiltIn, String, Seq[Expression], Option[Seq[Type]])] = expr match {
- case Application(TypeApplication(Variable(IdentifierName(name@bi.BuiltIn(builtIn))), tpes), args) =>
- Some((builtIn, name, args, Some(tpes)))
- case Application(Variable(IdentifierName(name@bi.BuiltIn(builtIn))), args) =>
- Some((builtIn, name, args, None))
- case _ => None
- }
- }
-
- object SetConstruction {
- def unapply(expr: Expression): Option[(Seq[Expression], Option[Type])] = expr match {
- case PrimitiveFunction(bi.SetConstructor, f, es, otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((es, otpes.map(_.head)))
- case Operation("Set", es@Bindings(_, Seq())) =>
- Some((es, None))
- case _ => None
- }
- }
-
- object SetUnionOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.SetUnion, _, Seq(set1, set2), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((set1, set2, otpes.map(_.head)))
- case Operation("∪", Seq(set1, set2)) =>
- Some((set1, set2, None))
- case _ => None
- }
- }
-
- object SetIntersectionOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.SetIntersection, _, Seq(set1, set2), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((set1, set2, otpes.map(_.head)))
- case Operation("∩", Seq(set1, set2)) =>
- Some((set1, set2, None))
- case _ => None
- }
- }
-
- object SetDifferenceOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.SetDifference, _, Seq(set1, set2), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((set1, set2, otpes.map(_.head)))
- case Operation("∖", Seq(set1, set2)) =>
- Some((set1, set2, None))
- case _ => None
- }
- }
-
- object SetBinaryOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, (trees.Expr, trees.Expr) => trees.Expr, Option[Type])] = expr match {
- case SetUnionOperation(set1, set2, otpe) => Some((set1, set2, { (lhs: trees.Expr, rhs: trees.Expr) => trees.SetUnion(lhs, rhs) }, otpe))
- case SetIntersectionOperation(set1, set2, otpe) => Some((set1, set2, { (lhs: trees.Expr, rhs: trees.Expr) => trees.SetIntersection(lhs, rhs) }, otpe))
- case SetUnionOperation(set1, set2, otpe) => Some((set1, set2, { (lhs: trees.Expr, rhs: trees.Expr) => trees.SetDifference(lhs, rhs) }, otpe))
- case _ => None
- }
- }
-
- object SubsetOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.SetSubset, _, Seq(set1, set2), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((set1, set2, otpes.map(_.head)))
- case Operation("⊆", Seq(set1, set2)) =>
- Some((set1, set2, None))
- case _ => None
- }
- }
-
- object ContainsOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.SetContains, _, Seq(set, elem), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((set, elem, otpes.map(_.head)))
- case Operation("∈", Seq(elem, set)) =>
- Some((set, elem, None))
- case _ => None
- }
- }
-
- object SetAddOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.SetAdd, _, Seq(set, elem), otpes) if (otpes.isEmpty || otpes.get.length == 1) => Some((set, elem, otpes.map(_.head)))
- case _ => None
- }
- }
-
- object StringLengthOperation {
- def unapply(expr: Expression): Option[Expression] = expr match {
- case PrimitiveFunction(bi.StringLength, _, Seq(str), None) => {
- Some((str))
- }
- case _ => None
- }
- }
-
- object ConcatenateOperation {
- def unapply(expr: Expression): Option[(Expression, Expression)] = expr match {
- case PrimitiveFunction(bi.StringConcatenate, _, Seq(str1, str2), None) => {
- Some((str1, str2))
- }
- case Operation("++", Seq(str1, str2)) =>
- Some((str1, str2))
- case _ => None
- }
- }
-
- object SubstringOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Expression)] = expr match {
- case PrimitiveFunction(bi.StringSubstring, _, Seq(str, start, end), None) => {
- Some((str, start, end))
- }
- case _ => None
- }
- }
-
- object Binding {
- def unapply(expr: Expression): Option[(Expression, Expression)] = expr match {
- case Operation("->", Seq(a, b)) => Some((a, b))
- case _ => None
- }
- }
-
- object Bindings {
- def unapply(exprs: Seq[Expression]): Option[(Seq[Expression], Seq[(Expression, Expression)])] = {
- Some(Utils.classify(exprs) {
- case Binding(x, y) => Right((x, y))
- case x => Left(x)
- })
- }
- }
-
- object BagConstruction {
- def unapply(expr: Expression): Option[(Seq[Expression], Option[Type])] = expr match {
- case PrimitiveFunction(bi.BagConstructor, _, args, otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((args, otpes.map(_.head)))
- case Operation("Set", es@Bindings(Seq(), _)) =>
- Some((es, None))
- case _ => None
- }
- }
-
- object BagMultiplicityOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.BagMultiplicity, _, Seq(bag, elem), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((bag, elem, otpes.map(_.head)))
- case _ => None
- }
- }
-
- object BagAddOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.BagAdd, _, Seq(bag, elem), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((bag, elem, otpes.map(_.head)))
- case _ => None
- }
- }
-
- object BagUnionOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.BagUnion, _, Seq(bag1, bag2), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((bag1, bag2, otpes.map(_.head)))
- case _ => None
- }
- }
-
- object BagIntersectionOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.BagIntersection, _, Seq(bag1, bag2), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((bag1, bag2, otpes.map(_.head)))
- case _ => None
- }
- }
-
- object BagDifferenceOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[Type])] = expr match {
- case PrimitiveFunction(bi.BagDifference, _, Seq(bag1, bag2), otpes) if (otpes.isEmpty || otpes.get.length == 1) =>
- Some((bag1, bag2, otpes.map(_.head)))
- case _ => None
- }
- }
-
- object BagBinaryOperation {
-
- def unapply(expr: Expression): Option[(Expression, Expression, (trees.Expr, trees.Expr) => trees.Expr, Option[Type])] = expr match {
- case BagUnionOperation(bag1, bag2, otpe) =>
- Some((bag1, bag2, { (b1: trees.Expr, b2: trees.Expr) => trees.BagUnion(b1, b2) }, otpe))
- case BagIntersectionOperation(bag1, bag2, otpe) =>
- Some((bag1, bag2, { (b1: trees.Expr, b2: trees.Expr) => trees.BagIntersection(b1, b2) }, otpe))
- case BagDifferenceOperation(bag1, bag2, otpe) =>
- Some((bag1, bag2, { (b1: trees.Expr, b2: trees.Expr) => trees.BagDifference(b1, b2) }, otpe))
- case _ => None
- }
- }
-
- object MapConstruction {
- def unapply(expr: Expression): Option[(Expression, Seq[Expression], Option[Either[Type, (Type, Type)]])] = expr match {
- case PrimitiveFunction(bi.MapConstructor, _, Seq(e, es @ _*), otpes) if (otpes.isEmpty || otpes.get.length == 2) =>
- Some((e, es, otpes.map({ case Seq(t1, t2) => Right((t1, t2))})))
- case TypeApplication(Operation("Map", Seq(e, es @ _*)), Seq(t)) =>
- Some((e, es, Some(Left(t))))
- case Operation("Map", Seq(e, es @ _*)) =>
- Some((e, es, None))
- case _ => None
- }
- }
-
- object MapApplyOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Option[(Type, Type)])] = expr match {
- case PrimitiveFunction(bi.MapApply, _, Seq(map, key), otpes) if (otpes.isEmpty || otpes.get.length == 2) =>
- Some((map, key, otpes.map({ case Seq(t1, t2) => (t1, t2)})))
- case _ => None
- }
- }
-
- object MapUpdatedOperation {
- def unapply(expr: Expression): Option[(Expression, Expression, Expression, Option[(Type, Type)])] = expr match {
- case PrimitiveFunction(bi.MapUpdated, _, Seq(map, key, value), otpes) if (otpes.isEmpty || otpes.get.length == 2) =>
- Some((map, key, value, otpes.map({ case Seq(t1, t2) => (t1, t2)})))
- case _ => None
- }
- }
-
- object IsConstructorOperation {
- def unapply(expr: Expression): Option[(Expression, trees.ADTConstructor)] = expr match {
- case Operation("is", Seq(lhs, ConsDef(cons))) => Some((lhs, cons))
- case _ => None
- }
- }
-
- object TypeAnnotationOperation {
- def unapply(expr: Expression): Option[(Expression, Type)] = expr match {
- case TypeApplication(Operation("TypeAnnotation", Seq(expr)), Seq(tpe)) => Some((expr, tpe))
- case _ => None
- }
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/ExpressionElaborator.scala b/src/main/scala/inox/parsing/ExpressionElaborator.scala
deleted file mode 100644
index 9d770bf3c..000000000
--- a/src/main/scala/inox/parsing/ExpressionElaborator.scala
+++ /dev/null
@@ -1,1100 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input._
-
-import Utils.plural
-
-trait ExpressionElaborators { self: Elaborators =>
- import trees._
-
- trait ExpressionElaborator { self0: Elaborator =>
-
- import ExprIR._
-
- //---- Errors ----//
-
- def functionArity(name: String, expected: Int, actual: Int, kind: String = "Function"): String =
- kind + " `" + name + "` takes " + expected + " argument" + plural(expected, "", "s") + ", " +
- actual + " " + plural(actual, "was", "were") + " given."
-
- def functionTypeArity(name: String, expected: Int, actual: Int, kind: String = "Function"): String =
- if (expected == 0) {
- kind + " `" + name + "` doesn't have any type parameters."
- } else {
- kind + " `" + name + "` takes " + expected + " type argument" + plural(expected, "", "s") + ", " +
- actual + " " + plural(actual, "was", "were") + " given."
- }
-
- lazy val expectedArrowBinding: String = "Expected binding of the form `key -> value`."
-
- lazy val unexpectedBinding: String = "Unexpected binding. Bindings can only appear in bags and maps constructors."
-
- lazy val unknownConstruct: String = "Unexpected construct."
-
- lazy val tupleInsufficientLength: String = "Tuples should be of length greater or equal to 2."
-
- lazy val warningSetOrBag: String = "Not all elements are of the same shape. " +
- "Use bindings of the form `key -> value` for bag literals and naked values for set literals."
-
- lazy val wrongNumberOfArguments: String = "Wrong number of arguments."
-
- object LocalFunction {
- def unapply(expression: Expression)(implicit store: Store): Option[(trees.FunDef, Option[Seq[Type]])] = expression match {
- case TypeApplication(Variable(id), targs) if store isFunction id.getName => Some((store getFunction id.getName, Some(targs)))
- case Variable(id) if store isFunction id.getName => Some((store getFunction id.getName, None))
- case _ => None
- }
- }
-
- object LocalConstructor {
- def unapply(expression: Expression)(implicit store: Store): Option[(trees.ADTSort, trees.ADTConstructor, Option[Seq[Type]])] = expression match {
- case TypeApplication(Variable(id), targs) if store isConstructor id.getName =>
- val (sort, cons) = store getConstructor id.getName
- Some((sort, cons, Some(targs)))
- case Variable(id) if store isConstructor id.getName =>
- val (sort, cons) = store getConstructor id.getName
- Some((sort, cons, None))
- case _ => None
- }
- }
-
- object TypedDefinition {
- def unapply(expression: Expression)(implicit store: Store): Option[(
- inox.Identifier,
- Seq[trees.TypeParameter],
- Seq[trees.ValDef],
- trees.Type,
- Boolean,
- Option[Seq[Type]]
- )] = expression match {
- case LocalFunction(fd, otpe) =>
- Some((fd.id, fd.typeArgs, fd.params, fd.getType, true, otpe))
- case LocalConstructor(sort, cons, otpe) =>
- Some((cons.id, sort.typeArgs, cons.fields, trees.ADTType(sort.id, sort.typeArgs), false, otpe))
- case TypedFunDef(fd, otps) =>
- Some((fd.id, fd.typeArgs, fd.params, fd.getType, true, otps))
- case TypedConsDef(cons, otps) =>
- val sort = cons.getSort
- Some((cons.id, sort.typeArgs, cons.fields, trees.ADTType(sort.id, sort.typeArgs), false, otps))
- case _ => None
- }
- }
-
- object IsConstructor {
- def unapply(expr: Expression)(implicit store: Store): Option[(Expression, trees.ADTSort, trees.ADTConstructor)] = expr match {
- case IsConstructorOperation(expr, cons) => Some((expr, cons.getSort, cons))
- case Operation("is", Seq(lhs, LocalConstructor(sort, cons, None))) => Some((lhs, sort, cons))
- case _ => None
- }
- }
-
- object Fields {
- def unapply(field: Field)(implicit store: Store): Option[Seq[(trees.ADTSort, trees.ValDef)]] =
- Field.unapplySeq(field)
- .filter(_.nonEmpty)
- .map(_.map { case (cons, vd) => (cons.getSort, vd) })
- .orElse(field match {
- case FieldName(name) if store isField name =>
- Some((store getFields name).map { case (sort, cons, vd) => (sort, vd) })
-
- case FieldIdentifier(id) =>
- val matchingCons = (store getFields id.name).filter {
- case (sort, cons, vd) => cons.fields.exists(_.id == id)
- }
-
- if (matchingCons.nonEmpty) {
- Some(matchingCons.map { case (sort, cons, vd) => (sort, vd) })
- } else {
- None
- }
-
- case _ => None
- })
- }
-
- def getExprBindings(es: Seq[(ExprIR.Identifier, Option[TypeIR.Expression])])
- (implicit store: Store, pos: Position): (Store, Seq[trees.Type], Constrained[Seq[trees.ValDef]]) = {
- val (newStore, tps, vds) = es.foldLeft((store, Seq[trees.Type](), Seq[Constrained[trees.ValDef]]())) {
- case ((store, tps, vds), (ident, otpe)) =>
- val id = getIdentifier(ident)
-
- val (tpe, ctpe) = otpe match {
- case None =>
- val tpe = Unknown.fresh
- (tpe, Constrained.unify(u => u(tpe)))
- case Some(tpe) =>
- (getSimpleType(tpe)(store), getType(tpe, bound = Some(ident.getName))(store))
- }
-
- ctpe match {
- case unsat: Unsatisfiable => (store, tps :+ tpe, vds :+ unsat)
- case c @ WithConstraints(ev, cs) =>
- val newStore = store + (ident.getName, id, tpe, ev)
- val newVds = vds :+ c.transform(tp => trees.ValDef(id, tp))
- (newStore, tps :+ tpe, newVds)
- }
- }
-
- (newStore, tps, Constrained.sequence(vds))
- }
-
- /** Type inference and expression elaboration.
- *
- * @param expr The expression to typecheck.
- * @param expected The type the expression is expected to have.
- * @param store Mappings of variables.
- * @return A sequence of constraints and a way to build an Inox expression given a solution to the constraints.
- */
- def getExpr(expr: Expression, expected: Unknown)(implicit store: Store): Constrained[trees.Expr] = {
- implicit val position: Position = expr.pos
-
- expr match {
-
- //---- Literals ----//
-
- // Boolean literals.
- case Literal(BooleanLiteral(value)) => Constrained.pure({
- trees.BooleanLiteral(value)
- }).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
-
- // Unit literal.
- case Literal(UnitLiteral) => Constrained.pure({
- trees.UnitLiteral()
- }).addConstraint({
- Constraint.equal(expected, trees.UnitType())
- })
-
- // String literal.
- case Literal(StringLiteral(string)) => Constrained.pure({
- trees.StringLiteral(string)
- }).addConstraint({
- Constraint.equal(expected, trees.StringType())
- })
-
- // Char literal.
- case Literal(CharLiteral(character)) => Constrained.pure({
- trees.CharLiteral(character)
- }).addConstraint({
- Constraint.equal(expected, trees.CharType())
- })
-
- // Numeric literal.
- case Literal(NumericLiteral(string)) => Constrained.unify({ (unifier: Unifier) =>
- unifier(expected) match {
- case trees.IntegerType() => trees.IntegerLiteral(BigInt(string))
- case trees.BVType(signed, n) => trees.BVLiteral(signed, BigInt(string), n)
- case trees.RealType() => trees.FractionLiteral(BigInt(string), 1)
- case tpe => throw new Exception("getExpr: Unexpected type during elaboration: " + tpe)
- }
- }).addConstraint({
- Constraint.isNumeric(expected)
- })
-
- // Decimal literal.
- case Literal(DecimalLiteral(whole, trailing, repeating)) => Constrained.pure({
- val (n, d) = Utils.toFraction(whole, trailing, repeating)
- trees.FractionLiteral(n, d)
- }).addConstraint({
- Constraint.equal(expected, trees.RealType())
- })
-
- // Empty set literal.
- // TODO: Also accept it as a Bag literal.
- case Operation("Set", Seq()) => {
- val elementType = Unknown.fresh
- Constrained.unify({ implicit u =>
- trees.FiniteSet(Seq(), u(elementType))
- }).addConstraint({
- Constraint.equal(expected, trees.SetType(elementType))
- })
- }
-
- //---- Variables ----//
-
- // Variable.
- case Variable(variable) => Constrained.unify({ implicit u =>
- val (i, _, tpe) = store getVariable variable.getName
- trees.Variable(i, tpe, Seq.empty)
- }).checkImmediate(
- store isVariable variable.getName, "Unknown variable " + variable.getName + ".", expr.pos
- ).addConstraint({
- Constraint.equal((store getVariable variable.getName)._2, expected)
- })
-
- //---- Embedded values ----//
-
- // Embedded expressions.
- case Literal(EmbeddedExpr(e)) => Constrained.pure({
- e
- }).addConstraint({
- Constraint.equal(e.getType(symbols), expected)
- }).checkImmediate(
- e.getType(symbols) != trees.Untyped, "Untyped embedded expression.", expr.pos
- )
-
- // Embedded Scala values.
- case Literal(EmbeddedValue(value)) => value match {
- case b : Boolean =>
- Constrained.pure({
- trees.BooleanLiteral(b)
- }).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
- case n : Int =>
- Constrained.pure({
- trees.Int32Literal(n)
- }).addConstraint({
- Constraint.equal(expected, trees.Int32Type())
- })
- case n : BigInt =>
- Constrained.pure({
- trees.IntegerLiteral(n)
- }).addConstraint({
- Constraint.equal(expected, trees.IntegerType())
- })
- case c : Char =>
- Constrained.pure({
- trees.CharLiteral(c)
- }).addConstraint({
- Constraint.equal(expected, trees.CharType())
- })
- case s : String =>
- Constrained.pure({
- trees.StringLiteral(s)
- }).addConstraint({
- Constraint.equal(expected, trees.StringType())
- })
- case _ : Unit =>
- Constrained.pure({
- trees.UnitLiteral()
- }).addConstraint({
- Constraint.equal(expected, trees.UnitType())
- })
- case _ => Constrained.fail("Unsupported embedded value: " + value + ".", expr.pos)
- }
-
- //---- Operators ----//
-
- // Unary minus.
- case Operation("-", Seq(arg)) => {
- getExpr(arg, expected).transform(trees.UMinus(_)).addConstraint({
- Constraint.isNumeric(expected)
- })
- }
-
- // Unary plus.
- case Operation("+", Seq(arg)) => {
- getExpr(arg, expected).addConstraint({
- Constraint.isNumeric(expected)
- })
- }
-
- // Binary operation defined on numeric types.
- case Operation(NumericBinOp(op), args) => {
-
- Constrained.sequence({
- args.map(getExpr(_, expected))
- }).transform({
- case Seq(a, b) => op(a, b)
- }).checkImmediate(
- args.length == 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.isNumeric(expected)
- })
- }
-
- // Binary operation defined on integral types.
- case Operation(IntegralBinOp(op), args) => {
-
- Constrained.sequence({
- args.map(getExpr(_, expected))
- }).transform({
- case Seq(a, b) => op(a, b)
- }).checkImmediate(
- args.length == 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.isIntegral(expected)
- })
- }
-
- // Unary negation.
- case Operation("!", Seq(arg)) => {
- getExpr(arg, expected).transform(trees.Not(_)).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
- }
-
- // Bitwise negation.
- case Operation("~", Seq(arg)) => {
- getExpr(arg, expected).transform(trees.BVNot(_)).addConstraint({
- Constraint.isBitVector(expected)
- })
- }
-
- // Binary operation defined on comparable types.
- case Operation(ComparableBinOp(op), args) => {
-
- val expectedArg = Unknown.fresh
-
- Constrained.sequence({
- args.map(getExpr(_, expectedArg))
- }).transform({
- case Seq(a, b) => op(a, b)
- }).checkImmediate(
- args.length == 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.isComparable(expectedArg)
- }).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
- }
-
- // Binary operation defined on bit vectors.
- case Operation(BitVectorBinOp(op), args) => {
- Constrained.sequence({
- args.map(getExpr(_, expected))
- }).transform({
- case Seq(a, b) => op(a, b)
- }).checkImmediate(
- args.length == 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.isBitVector(expected)
- })
- }
-
- // Equality.
- case Operation("==", args) => {
-
- val expectedArg = Unknown.fresh
-
- Constrained.sequence({
- args.map(getExpr(_, expectedArg))
- }).transform({
- case Seq(a, b) => trees.Equals(a, b)
- }).checkImmediate(
- args.length == 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
- }
-
- // Inequality.
- case Operation("!=", args) => {
-
- val expectedArg = Unknown.fresh
-
- Constrained.sequence({
- args.map(getExpr(_, expectedArg))
- }).transform({
- case Seq(a, b) => trees.Not(trees.Equals(a, b))
- }).checkImmediate(
- args.length == 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
- }
-
- // Binary operations defined on booleans.
- case Operation(BooleanBinOp(op), args) => {
-
- Constrained.sequence({
- args.map(getExpr(_, expected))
- }).transform({
- case Seq(a, b) => op(a, b)
- }).checkImmediate(
- args.length == 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
- }
-
- // NAry operations defined on booleans.
- case BooleanNAryOperation(op, args) => {
-
- Constrained.sequence({
- args.map(getExpr(_, expected))
- }).transform(
- op(_)
- ).checkImmediate(
- args.length >= 2, wrongNumberOfArguments, expr.pos
- ).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- })
- }
-
- //---- Arity Errors on Primitive Functions and Constructors ----//
-
- case PrimitiveFunction(builtIn, name, args, otpes) if
- ((builtIn.params.isDefined && args.length != builtIn.params.get) || (otpes.isDefined && otpes.get.length != builtIn.tparams)) => {
-
- val kind = if (builtIn.isConstructor) "Primitive constructor" else "Primitive function"
-
- val argsError = if (builtIn.params.isDefined && args.length != builtIn.params.get) Seq {
- functionArity(name, builtIn.params.get, args.length, kind)
- } else Seq()
-
- val typesError = if (otpes.isDefined && otpes.get.length != builtIn.tparams) Seq {
- functionTypeArity(name, builtIn.tparams, otpes.get.length, kind)
- } else Seq()
-
- Constrained.fail((argsError ++ typesError).map({ case error => (error, expr.pos) }))
- }
-
- //---- Syntax Error on Set or Bags Literals ----//
-
- case Operation("Set", Bindings(es, bs)) if (!es.isEmpty && !bs.isEmpty) => {
- Constrained.fail(warningSetOrBag, expr.pos)
- }
-
- //---- Operations on Strings ----//
-
- // String concatenation.
- case ConcatenateOperation(str1, str2) => {
- (for {
- s1 <- getExpr(str1, expected)
- s2 <- getExpr(str2, expected)
- } yield { implicit u =>
- trees.StringConcat(s1, s2)
- }).addConstraint({
- Constraint.equal(expected, trees.StringType())
- })
- }
-
- // Substring.
- case SubstringOperation(str, start, end) => {
- val indexExpected = Unknown.fresh
-
- (for {
- s <- getExpr(str, expected)
- a <- getExpr(start, indexExpected)
- b <- getExpr(end, indexExpected)
- } yield { implicit u =>
- trees.SubString(s, a, b)
- }).addConstraint({
- Constraint.equal(expected, trees.StringType())
- }).addConstraint({
- Constraint.equal(indexExpected, trees.IntegerType())
- })
- }
-
- // String length.
- case StringLengthOperation(s) => {
- val stringExpected = Unknown.fresh
- getExpr(s, stringExpected).transform({
- trees.StringLength(_)
- }).addConstraint({
- Constraint.equal(stringExpected, trees.StringType())
- }).addConstraint({
- Constraint.equal(expected, trees.IntegerType())
- })
- }
-
- //---- Operations on Bags ----//
-
- case BagConstruction(Bindings(fs, _), _) if (!fs.isEmpty) => {
- Constrained.fail(fs.map {
- (e: Expression) => (expectedArrowBinding, e.pos)
- })
- }
-
- case BagConstruction(Bindings(_, bs), otpe) => {
- val (et, elementType) = otpe match {
- case None =>
- val et = Unknown.fresh
- (et, Constrained.unify(u => u(et)))
- case Some(tpe) =>
- (getSimpleType(tpe), getType(tpe))
- }
-
- val freshs = Seq.fill(bs.length)(Unknown.fresh)
- val countType = Unknown.fresh
-
- val bindingsExpr = Constrained.sequence({
- bs.zip(freshs).map({ case ((k, v), t) =>
- (for {
- key <- getExpr(k, t)
- value <- getExpr(v, countType)
- } yield { implicit u =>
- (key, value): (Expr, Expr)
- }).addConstraint({
- Constraint.equal(t, et)
- })
- })
- })
-
- (for {
- bindings <- bindingsExpr
- base <- elementType
- } yield { implicit u =>
- trees.FiniteBag(bindings, base)
- }).addConstraint({
- Constraint.equal(countType, trees.IntegerType())
- }).addConstraint({
- Constraint.equal(expected, trees.BagType(et))
- })
- }
-
- // Bag multiplicity.
- case BagMultiplicityOperation(map, key, otpe) => {
- val elementType = otpe.map(getSimpleType).getOrElse(Unknown.fresh)
- val keyExpected = Unknown.fresh
- val mapExpected = Unknown.fresh
-
- (for {
- m <- getExpr(map, mapExpected)
- k <- getExpr(key, keyExpected)
- } yield { implicit u =>
- trees.MultiplicityInBag(k, m)
- }).addConstraint({
- Constraint.equal(expected, trees.IntegerType())
- }).addConstraint({
- Constraint.equal(keyExpected, elementType)
- }).addConstraint({
- Constraint.equal(mapExpected, trees.BagType(elementType))
- })
- }
-
- // Bag binary operation.
- case BagBinaryOperation(map1, map2, op, otpe) => {
- val elementType = otpe.map(getSimpleType).getOrElse(Unknown.fresh)
- val mapExpected = Unknown.fresh
-
- (for {
- m1 <- getExpr(map1, mapExpected)
- m2 <- getExpr(map2, mapExpected)
- } yield { implicit u =>
- op(m1, m2)
- }).addConstraint({
- Constraint.equal(mapExpected, trees.BagType(elementType))
- })
- }
-
- // Bag add operation.
- case BagAddOperation(bag, elem, otpe) => {
- val elementExpected = Unknown.fresh
- val elementType = otpe.map(getSimpleType).getOrElse(Unknown.fresh)
-
- (for {
- b <- getExpr(bag, expected)
- e <- getExpr(elem, elementExpected)
- } yield { implicit u =>
- trees.BagAdd(b, e)
- }).addConstraint({
- Constraint.equal(expected, trees.BagType(elementType))
- }).addConstraint({
- Constraint.equal(elementExpected, elementType)
- })
- }
-
- //---- Operations on Maps ----//
-
- case MapConstruction(_, Bindings(fs, _), _) if (!fs.isEmpty) => {
- Constrained.fail(fs.map {
- (e: Expression) => (expectedArrowBinding, e.pos)
- })
- }
-
- case MapConstruction(dflt, Bindings(_, bs), optEitherKeyAll) => {
- val (kt, keyType, vt, valueType) = optEitherKeyAll match {
- case None =>
- val (kt, vt) = (Unknown.fresh, Unknown.fresh)
- (kt, Constrained.unify(u => u(kt)), vt, Constrained.unify(u => u(vt)))
- case Some(Left(t)) =>
- val vt = Unknown.fresh
- (getSimpleType(t), getType(t), vt, Constrained.unify(u => u(vt)))
- case Some(Right((t1, t2))) =>
- (getSimpleType(t1), getType(t1), getSimpleType(t2), getType(t2))
- }
-
- val mappingsFresh = Seq.fill(bs.length)((Unknown.fresh, Unknown.fresh))
- val mappingsExpr = Constrained.sequence(bs.zip(mappingsFresh).map({
- case ((k, v), (tk, tv)) =>
- (for {
- key <- getExpr(k, tk)
- value <- getExpr(v, tv)
- } yield { implicit u =>
- (key, value): (Expr, Expr)
- }).addConstraint({
- Constraint.equal(tk, kt)
- }).addConstraint({
- Constraint.equal(tv, vt)
- })
- }))
-
- val defaultFresh = Unknown.fresh
- val defaultExpr = getExpr(dflt, defaultFresh).addConstraint({
- Constraint.equal(defaultFresh, vt)
- })
-
- (for {
- mappings <- mappingsExpr
- default <- defaultExpr
- key <- keyType
- value <- valueType
- } yield { implicit u =>
- trees.FiniteMap(mappings, default, key, value)
- }).addConstraint({
- Constraint.equal(expected, trees.MapType(kt, vt))
- })
- }
-
- // Map apply.
- case MapApplyOperation(map, key, otpes) => {
- val mapExpected = Unknown.fresh
- val keyExpected = Unknown.fresh
-
- val (keyType, valueType) = otpes.map({
- case (t1, t2) => (getSimpleType(t1), getSimpleType(t2))
- }).getOrElse((Unknown.fresh, Unknown.fresh))
-
- (for {
- m <- getExpr(map, mapExpected)
- k <- getExpr(key, keyExpected)
- } yield { implicit u =>
- trees.MapApply(m, k)
- }).addConstraint({
- Constraint.equal(keyExpected, keyType)
- }).addConstraint({
- Constraint.equal(expected, valueType)
- }).addConstraint({
- Constraint.equal(mapExpected, trees.MapType(keyType, valueType))
- })
- }
-
- // Map updated.
- case MapUpdatedOperation(map, key, value, otpes) => {
- val keyExpected = Unknown.fresh
- val valueExpected = Unknown.fresh
-
- val (keyType, valueType) = otpes.map({
- case (t1, t2) => (getSimpleType(t1), getSimpleType(t2))
- }).getOrElse((Unknown.fresh, Unknown.fresh))
-
- (for {
- m <- getExpr(map, expected)
- k <- getExpr(key, keyExpected)
- v <- getExpr(value, valueExpected)
- } yield { implicit u =>
- trees.MapUpdated(m, k, v)
- }).addConstraint({
- Constraint.equal(expected, trees.MapType(keyType, valueType))
- }).addConstraint({
- Constraint.equal(keyExpected, keyType)
- }).addConstraint({
- Constraint.equal(valueExpected, valueType)
- })
- }
-
- //---- Operations on Set ----//
-
- // Call to the Set constructor.
- case SetConstruction(es, otpe) => {
- val lowers = Seq.fill(es.length) { Unknown.fresh }
- val (upper, elementType) = otpe match {
- case None =>
- val et = Unknown.fresh
- (et, Constrained.unify(u => u(et)))
- case Some(tpe) =>
- (getSimpleType(tpe), getType(tpe))
- }
-
- val constrainedEs = Constrained.sequence(es.zip(lowers).map {
- case (e, lower) => getExpr(e, lower).addConstraint({
- Constraint.equal(lower, upper)
- })
- })
-
- (for {
- es <- constrainedEs
- base <- elementType
- } yield { implicit u =>
- trees.FiniteSet(es, base)
- }).addConstraint({
- Constraint.equal(expected, trees.SetType(upper))
- })
- }
-
- // Call to contains.
- case ContainsOperation(set, elem, otpe) => {
- val setType = Unknown.fresh
- val elementExpected = Unknown.fresh
- val elementType = otpe.map(getSimpleType).getOrElse(Unknown.fresh)
-
- (for {
- s <- getExpr(set, setType)
- e <- getExpr(elem, elementExpected)
- } yield { implicit u =>
- trees.ElementOfSet(e, s)
- }).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- }).addConstraint({
- Constraint.equal(setType, trees.SetType(elementType))
- }).addConstraint({
- Constraint.equal(elementExpected, elementType)
- })
- }
-
- // Call to subset.
- case SubsetOperation(set1, set2, otpe) => {
- val setType = Unknown.fresh
- val elementType = otpe.map(getSimpleType).getOrElse(Unknown.fresh)
-
- (for {
- s1 <- getExpr(set1, setType)
- s2 <- getExpr(set2, setType)
- } yield { implicit u =>
- trees.SubsetOf(s1, s2)
- }).addConstraint({
- Constraint.equal(expected, trees.BooleanType())
- }).addConstraint({
- Constraint.equal(setType, trees.SetType(elementType))
- })
- }
-
- // Binary operations on set that return sets.
- case SetBinaryOperation(set1, set2, f, otpe) => {
- val elementType = otpe.map(getSimpleType).getOrElse(Unknown.fresh)
-
- (for {
- s1 <- getExpr(set1, expected)
- s2 <- getExpr(set2, expected)
- } yield { implicit u =>
- f(s1, s2)
- }).addConstraint({
- Constraint.equal(expected, trees.SetType(elementType))
- })
- }
-
- // Set add operation.
- case SetAddOperation(set, elem, otpe) => {
- val elementExpected = Unknown.fresh
- val elementType = otpe.map(getSimpleType).getOrElse(Unknown.fresh)
-
- (for {
- s <- getExpr(set, expected)
- e <- getExpr(elem, elementExpected)
- } yield { implicit u =>
- trees.SetAdd(s, e)
- }).addConstraint({
- Constraint.equal(expected, trees.SetType(elementType))
- }).addConstraint({
- Constraint.equal(elementExpected, elementType)
- })
- }
-
- //---- Conditionals ----//
-
- // Conditional expression.
- case Operation("IfThenElse", Seq(cond, thenn, elze)) => {
- val expectedCond = Unknown.fresh
-
- (for {
- c <- getExpr(cond, expectedCond)
- t <- getExpr(thenn, expected)
- e <- getExpr(elze, expected)
- } yield { implicit u =>
- trees.IfExpr(c, t, e)
- }).addConstraint({
- Constraint.equal(expectedCond, trees.BooleanType())
- })
- }
-
- // Assumptions
- case Operation("Assume", Seq(pred, rest)) => {
- val booleanExpected = Unknown.fresh
- (for {
- p <- getExpr(pred, booleanExpected)
- r <- getExpr(rest, expected)
- } yield { implicit u =>
- trees.Assume(p, r)
- }).addConstraint({
- Constraint.equal(booleanExpected, trees.BooleanType())
- })
- }
-
- //---- Functions ----//
-
- // Function and constructor invocation.
- case Application(TypedDefinition(id, tparams, params, resultType, isFun, optTpeArgs), args) => {
-
- val freshs = args.map({ a => Unknown.fresh(a.pos) })
- val tfreshs = tparams.map({ tp => Unknown.fresh })
-
- val constrainedArgs = Constrained.sequence({
- args.zip(freshs).map({ case (e, t) => getExpr(e, t) })
- })
-
- val constrainedTpArgs = optTpeArgs match {
- case None =>
- Constrained.sequence(tfreshs.map(tp => Constrained.unify(u => u(tp))))
- case Some(tpeArgs) => {
- Constrained.sequence({
- tpeArgs.map(getType(_))
- }).addConstraints({
- // The annotated types should correspond to the type of the parameters.
- tpeArgs.zip(tfreshs).map({ case (a, b) => Constraint.equal(getSimpleType(a), b) })
- }).checkImmediate(
- // Their should be the same number of type applied than type parameters of the function.
- tpeArgs.length == tparams.length,
- functionTypeArity(id.name, tparams.length, tpeArgs.length),
- expr.pos
- )
- }
- }
-
- val instantiator = new typeOps.TypeInstantiator((tparams zip tfreshs).toMap)
- val paramTypes = params.map(vd => instantiator.transform(vd.getType))
-
- (for {
- tpArgs <- constrainedTpArgs
- args <- constrainedArgs
- } yield { implicit u =>
- if (isFun) trees.FunctionInvocation(id, tpArgs, args)
- else trees.ADT(id, tpArgs, args)
- }).checkImmediate(
- // There should be the same number of argument applied than parameters of the function.
- args.length == params.length,
- functionArity(id.name, params.length, args.length),
- expr.pos
- ).addConstraints({
- // The types of arguments should be equal to the type of the parameters.
- freshs.zip(paramTypes).map({ case (a, b) => Constraint.equal(a, b)(a.pos) }) ++
- // The type parameter unknown must exist or we won't assign anything to them
- tfreshs.map(Constraint.exist)
- }).addConstraint({
- // The return type of the function should be what is expected.
- Constraint.equal(instantiator.transform(resultType), expected)
- })
- }
-
- // Tuple Construction.
- case Operation("Tuple", args) => {
- val argsTypes = Seq.fill(args.size)(Unknown.fresh)
-
- Constrained.sequence(args.zip(argsTypes).map({
- case (e, t) => getExpr(e, t)
- })).transform({
- trees.Tuple(_)
- }).checkImmediate(
- args.size >= 2,
- tupleInsufficientLength,
- expr.pos
- ).addConstraint({
- // This assumes that Tuples are invariant. Is this really the case in Inox ?
- Constraint.equal(expected, trees.TupleType(argsTypes))
- })
- }
-
- //---- Bindings ----//
-
- // Let binding.
- case Let(bs, body) if (!bs.isEmpty) => {
-
- val (ident, otype, value) = bs.head
- val rest = if (bs.tail.isEmpty) body else Let(bs.tail, body)
-
- val id = getIdentifier(ident)
-
- val (lt, letType) = otype match {
- case None =>
- val lt = Unknown.fresh
- (lt, Constrained.unify(u => u(lt)))
- case Some(tpe) =>
- (getSimpleType(tpe), getType(tpe, bound = Some(ident.getName)))
- }
-
- val valueType = Unknown.fresh
-
- (for {
- v <- getExpr(value, valueType)
- tpe <- letType
- r <- getExpr(rest, expected)(store + (ident.getName, id, lt, tpe))
- } yield { implicit u =>
- trees.Let(trees.ValDef(id, tpe), v, r)
- }).addConstraint({
- Constraint.equal(valueType, lt)
- })
- }
-
- // Lambda abstraction.
- case Abstraction(Lambda, bindings, body) => {
- val expectedBody = Unknown.fresh
-
- val (newStore, tps, cvds) = getExprBindings(bindings)
-
- (for {
- params <- cvds
- b <- getExpr(body, expectedBody)(newStore)
- } yield { implicit u =>
- trees.Lambda(params, b)
- }).addConstraint({
- // The expected type should be a function.
- Constraint.equal(expected, trees.FunctionType(tps, expectedBody))
- })
- }
-
- // Forall-Quantification.
- case Abstraction(Forall, bindings, body) => {
- val (newStore, tps, cvds) = getExprBindings(bindings)
-
- (for {
- params <- cvds
- b <- getExpr(body, expected)(newStore)
- } yield { implicit u =>
- trees.Forall(params, b)
- }).addConstraint({
- // The expected type should be boolean.
- Constraint.equal(expected, trees.BooleanType())
- }).addConstraints({
- // The fresh parameter types must exist in the final solution.
- tps.collect { case u: Unknown => Constraint.exist(u) }
- })
- }
-
- // Exists-Quantification.
- case Abstraction(Exists, bindings, body) => {
- val (newStore, tps, cvds) = getExprBindings(bindings)
-
- (for {
- params <- cvds
- b <- getExpr(body, expected)(newStore)
- } yield { implicit u =>
- trees.Not(trees.Forall(params, trees.Not(b)))
- }).addConstraint({
- // The expected type should be boolean.
- Constraint.equal(expected, trees.BooleanType())
- }).addConstraints({
- // The fresh parameter types must exist in the final solution.
- tps.collect { case u: Unknown => Constraint.exist(u) }
- })
- }
-
- case Abstraction(Choose, bindings @ Seq((id, otype)), body) => {
- val predType = Unknown.fresh
-
- val (newStore, Seq(tp), cvds) = getExprBindings(bindings)
-
- (for {
- res <- cvds
- b <- getExpr(body, predType)(newStore)
- } yield { implicit u =>
- trees.Choose(res.head, b)
- }).addConstraint({
- Constraint.equal(predType, trees.BooleanType())
- }).addConstraint({
- Constraint.equal(tp, expected)
- })
- }
-
- //---- Type Casting ----//
-
- // Annotation.
- case TypeAnnotationOperation(expr, tpe) => {
- val inoxTpe = getSimpleType(tpe)
-
- getExpr(expr, expected).addConstraint({
- Constraint.equal(expected, inoxTpe)
- })
- }
-
- // Instance checking.
- case IsConstructor(expr, sort, cons) => {
- val tpe = Unknown.fresh
- val tps = sort.tparams.map(_ => Unknown.fresh)
-
- getExpr(expr, tpe).transform({
- trees.IsConstructor(_, cons.id)
- }).addConstraint({
- // The expected type should be Boolean.
- Constraint.equal(expected, trees.BooleanType())
- }).addConstraint({
- // The expression's type should be an ADT type (with free type parameters)
- Constraint.equal(tpe, trees.ADTType(sort.id, tps))
- })
- }
-
- //---- Accessors ----//
-
- // Tuple Selection.
- case Selection(expr, TupleField(i)) => {
- val tupleType = Unknown.fresh
- val memberType = Unknown.fresh
-
- getExpr(expr, tupleType).transform({
- trees.TupleSelect(_, i)
- }).addConstraint({
- Constraint.equal(memberType, expected)
- }).addConstraint({
- Constraint.atIndex(tupleType, memberType, i)
- })
- }
-
- // Field Selection.
- case Selection(expr, f @ Fields(fields)) => {
- val expectedExpr = Unknown.fresh
-
- (for {
- e <- getExpr(expr, expectedExpr)
- } yield { implicit u =>
- val trees.ADTType(id, tps) = u(expectedExpr)
- val sort = symbols.lookupSort(id).getOrElse(store getSort id.name)
- val vd = sort.constructors.flatMap(_.fields).find(vd => f match {
- case FieldName(name) => vd.id.name == name
- case FieldIdentifier(id) => vd.id == id
- case _ => false
- }).getOrElse {
- throw new Exception("getExpr: Unexpected unification result for field: " + f)
- }
-
- trees.ADTSelector(e, vd.id)
- }).addConstraint({
- Constraint.hasSortIn(expectedExpr, fields.map { case (sort, vd) =>
- sort -> { (tpe: trees.Type) =>
- val ADTType(_, tps) = tpe
- val instantiator = new typeOps.TypeInstantiator((sort.typeArgs zip tps).toMap)
- Seq(Constraint.equal(instantiator.transform(vd.getType), expected))
- }
- } : _*)
- })
- }
-
- //---- Function application ----//
- // This is matched last since other constructs have the same shape.
-
- // Function application.
- case Application(callee, args) => {
- val expectedCallee = Unknown.fresh
- val expectedArgs = Seq.fill(args.length)(Unknown.fresh)
-
- val constrainedArgs = Constrained.sequence({
- (args zip expectedArgs).map { case (arg, tpe) => getExpr(arg, tpe) }
- })
-
- (for {
- c <- getExpr(callee, expectedCallee)
- as <- constrainedArgs
- } yield { implicit u =>
- trees.Application(c, as)
- }).addConstraint({
- Constraint.equal(expectedCallee, trees.FunctionType(expectedArgs, expected))
- })
- }
-
- //---- Others ----//
-
- case Binding(_, _) => {
- Constrained.fail(unexpectedBinding, expr.pos)
- }
-
- case _ => {
- Constrained.fail(unknownConstruct, expr.pos)
- }
- }
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/ExpressionExtractor.scala b/src/main/scala/inox/parsing/ExpressionExtractor.scala
deleted file mode 100644
index b9776f6f9..000000000
--- a/src/main/scala/inox/parsing/ExpressionExtractor.scala
+++ /dev/null
@@ -1,587 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait ExpressionExtractors { self: Extractors =>
-
- trait ExpressionExtractor { self0: Extractor =>
-
- import ExprIR._
-
- private type MatchObligation = Option[Match]
-
- protected def toIdObl(pair: (inox.Identifier, Identifier)): MatchObligation = {
- val (id, templateId) = pair
-
- templateId match {
- case IdentifierName(name) if name == id.name => Some(empty)
- case IdentifierHole(index) => Some(matching(index, id))
- case _ => None
- }
- }
- protected def toExprObl(pair: (trees.Expr, Expression)): MatchObligation = {
- extract(pair._1, pair._2)
- }
- protected def toTypeObl(pair: (trees.Type, Type)): MatchObligation = {
- val (tpe, template) = pair
- extract(tpe, template)
- }
- protected def toOptTypeObl(pair: (trees.Type, Option[Type])): MatchObligation = {
- val (tpe, optTemplateType) = pair
-
- if (optTemplateType.isEmpty) {
- Some(empty)
- }
- else {
- toTypeObl(tpe -> optTemplateType.get)
- }
- }
- protected def toExprObls(pair: (Seq[trees.Expr], Seq[Expression])): MatchObligation = {
- pair match {
- case (Seq(), Seq()) => Some(empty)
- case (Seq(), _) => None
- case (_, Seq()) => None
- case (_, Seq(ExpressionSeqHole(i), templateRest @ _*)) => {
- val n = pair._1.length - templateRest.length
-
- if (n < 0) {
- None
- }
- else {
- val (matches, rest) = pair._1.splitAt(n)
-
- toExprObls(rest -> templateRest) map {
- case matchings => matching(i, matches) ++ matchings
- }
- }
- }
- case (Seq(expr, exprRest @ _*), Seq(template, templateRest @ _*)) => for {
- matchingsHead <- extract(toExprObl(expr -> template))
- matchingsRest <- extract(toExprObls(exprRest -> templateRest))
- } yield matchingsHead ++ matchingsRest
- }
- }
- protected def toTypeObls(pair: (Seq[trees.Type], Seq[Type])): MatchObligation = {
- extractSeq(pair._1, pair._2)
- }
- protected def toOptTypeObls(pair: (Seq[trees.Type], Seq[Option[Type]])): MatchObligation = {
- val pairs = pair._1.zip(pair._2).collect {
- case (tpe, Some(template)) => toTypeObl(tpe -> template)
- }
- extract(pairs : _*)
- }
- protected def toIdObls(pair: (Seq[inox.Identifier], Seq[Identifier])): MatchObligation = {
-
- // TODO: Change this.
- val (ids, templatesIds) = pair
-
- if (ids.length == templatesIds.length) {
- extract(ids.zip(templatesIds).map(toIdObl) : _*)
- }
- else {
- None
- }
- }
-
- protected def extract(pairs: MatchObligation*): MatchObligation = {
-
- val zero: MatchObligation = Some(empty)
-
- pairs.foldLeft(zero) {
- case (None, _) => None
- case (Some(matchingsAcc), obligation) => {
- obligation map {
- case extraMatchings => matchingsAcc ++ extraMatchings
- }
- }
- }
- }
-
- def extract(expr: trees.Expr, template: Expression): MatchObligation = {
-
- val success = Some(empty)
-
- template match {
- case ExpressionHole(index) =>
- return Some(Map(index -> expr))
- case TypeAnnotationOperation(templateInner, templateType) =>
- return extract(toTypeObl(expr.getType -> templateType), toExprObl(expr -> templateInner))
- case _ => ()
- }
-
- expr match {
-
- // Variables
-
- case trees.Variable(inoxId, _, _) => template match {
- case Variable(templateId) => extract(toIdObl(inoxId -> templateId))
- case _ => fail
- }
-
- // Control structures.
-
- case trees.IfExpr(cond, thenn, elze) => template match {
- case Operation("IfThenElse", Seq(templateCond, templateThenn, templateElze)) =>
- extract(toExprObl(cond -> templateCond), toExprObl(thenn -> templateThenn), toExprObl(elze -> templateElze))
- case _ => fail
- }
-
- case trees.Assume(pred, body) => template match {
- case Operation("Assume", Seq(templatePred, templateBody)) =>
- extract(toExprObl(pred -> templatePred), toExprObl(body -> templateBody))
- case _ => fail
- }
-
- case trees.Let(vd, value, body) => template match {
- case Let(Seq((templateId, optTemplateType, templateValue), rest @ _*), templateBody) => {
-
- val templateRest = rest match {
- case Seq() => templateBody
- case _ => Let(rest, templateBody)
- }
-
- extract(
- toExprObl(value -> templateValue),
- toOptTypeObl(vd.getType -> optTemplateType),
- toIdObl(vd.id -> templateId),
- toExprObl(body -> templateRest))
- }
- case _ => fail
- }
-
- case trees.Lambda(args, body) => template match {
- case Abstraction(Lambda, templateArgs, templateBody) =>
- extract(
- toOptTypeObls(args.map(_.getType) -> templateArgs.map(_._2)),
- toIdObls(args.map(_.id) -> templateArgs.map(_._1)),
- toExprObl(body -> templateBody))
- case _ => fail
- }
-
- case trees.Forall(args, body) => template match {
- case Abstraction(Forall, templateArgs, templateBody) =>
- extract(
- toOptTypeObls(args.map(_.getType) -> templateArgs.map(_._2)),
- toIdObls(args.map(_.id) -> templateArgs.map(_._1)),
- toExprObl(body -> templateBody))
- case _ => fail
- }
-
- case trees.Choose(arg, pred) => template match {
- case Abstraction(Choose, Seq((templateId, optTemplateType), rest @ _*), templatePred) => {
- val templateRest = rest match {
- case Seq() => templatePred
- case _ => Abstraction(Choose, rest, templatePred)
- }
-
- extract(
- toOptTypeObl(arg.getType -> optTemplateType),
- toIdObl(arg.id -> templateId),
- toExprObl(pred -> templateRest))
- }
- case _ => fail
- }
-
- // Functions.
-
- case trees.Application(callee, args) => template match {
- case Application(templateCallee, templateArgs) =>
- extract(toExprObl(callee -> templateCallee), toExprObls(args -> templateArgs))
- case _ => fail
- }
-
- case trees.FunctionInvocation(id, tpes, args) => template match {
- case Application(TypedFunDef(fd, optTemplatesTypes), templateArgs) if (id == fd.id) => {
- optTemplatesTypes match {
- case None => extract(toExprObls(args -> templateArgs))
- case Some(templateTypes) => extract(toExprObls(args -> templateArgs), toTypeObls(tpes -> templateTypes))
- case _ => fail
- }
- }
- case Application(TypeApplication(ExpressionHole(index), templateTypes), templateArgs) => for {
- matchings <- extract(toTypeObls(tpes -> templateTypes), toExprObls(args -> templateArgs))
- } yield matching(index, id) ++ matchings
- case Application(ExpressionHole(index), templateArgs) => for {
- matchings <- extract(toExprObls(args -> templateArgs))
- } yield matching(index, id) ++ matchings
- case _ => fail
- }
-
- // ADTs.
-
- case trees.ADT(id, tpes, args) => template match {
- case Application(TypedConsDef(cons, optTemplatesTypes), templateArgs) if (id == cons.id) => {
- optTemplatesTypes match {
- case None => extract(toExprObls(args -> templateArgs))
- case Some(templateTypes) => extract(toExprObls(args -> templateArgs), toTypeObls(tpes -> templateTypes))
- case _ => fail
- }
- }
- case Application(TypeApplication(ExpressionHole(index), templateTypes), templateArgs) => for {
- matchings <- extract(toTypeObls(tpes -> templateTypes), toExprObls(args -> templateArgs))
- } yield matching(index, id) ++ matchings
- case Application(ExpressionHole(index), templateArgs) => for {
- matchings <- extract(toExprObls(args -> templateArgs))
- } yield matching(index, id) ++ matchings
- case _ => fail
- }
-
- case trees.ADTSelector(adt, selector) => template match {
- case Selection(adtTemplate, FieldHole(index)) => for {
- matchings <- extract(toExprObl(adt -> adtTemplate))
- } yield matching(index, selector) ++ matchings
- case Selection(adtTemplate, Field((cons, vd))) if (vd.id == selector) => // TODO: Handle selectors with the same name.
- extract(toExprObl(adt -> adtTemplate))
- case _ => fail
- }
-
- // Instance checking and casting.
-
- case trees.IsConstructor(inner, id) => template match {
- case IsConstructorOperation(templateInner, name) if id.name == name =>
- extract(toExprObl(inner -> templateInner))
- case _ => fail
- }
-
- // Various.
-
- case trees.CharLiteral(char) => template match {
- case Literal(CharLiteral(`char`)) => success
- case _ => fail
- }
-
- case trees.UnitLiteral() => template match {
- case Literal(UnitLiteral) => success
- case _ => fail
- }
-
- case trees.Equals(left, right) => template match {
- case Operation("==", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- // Booleans.
-
- case trees.BooleanLiteral(bool) => template match {
- case Literal(BooleanLiteral(`bool`)) => success
- case _ => fail
- }
-
- case trees.And(exprs) => template match {
- case BooleanAndOperation(templates) =>
- extract(toExprObls(exprs -> templates))
- case _ => fail
- }
-
- case trees.Or(exprs) => template match {
- case BooleanOrOperation(templates) =>
- extract(toExprObls(exprs -> templates))
- case _ => fail
- }
-
- case trees.Implies(left, right) => template match {
- case Operation("==>", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.Not(inner) => template match {
- case Operation("!", Seq(templateInner)) => extract(toExprObl(inner -> templateInner))
- case _ => fail
- }
-
- // Strings.
-
- case trees.StringLiteral(string) => template match {
- case Literal(StringLiteral(`string`)) => success
- case _ => fail
- }
-
- case trees.StringConcat(left, right) => template match {
- case ConcatenateOperation(templateLeft, templateRight) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.SubString(string, from, to) => template match {
- case SubstringOperation(templateString, templateFrom, templateTo) =>
- extract(toExprObl(string -> templateString), toExprObl(from -> templateFrom), toExprObl(to -> templateTo))
- case _ => fail
- }
-
- case trees.StringLength(string) => template match {
- case StringLengthOperation(templateString) => extract(toExprObl(string -> templateString))
- case _ => fail
- }
-
- // Numbers.
-
- case trees.IntegerLiteral(value) => template match {
- case Literal(NumericLiteral(string)) if (scala.util.Try(BigInt(string)).toOption == Some(value)) => success
- case _ => fail
- }
-
- case trees.FractionLiteral(numerator, denominator) => template match {
- case Literal(NumericLiteral(string)) if { val n = BigInt(string); n * denominator == numerator } => success
- case Literal(DecimalLiteral(w, t, r)) if { val (n, d) = Utils.toFraction(w, t, r); n * denominator == d * numerator } => success
- case _ => fail
- }
-
- case trees.Plus(left, right) => template match {
- case Operation("+", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.Minus(left, right) => template match {
- case Operation("-", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.Times(left, right) => template match {
- case Operation("*", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.Division(left, right) => template match {
- case Operation("/", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.UMinus(inner) => template match {
- case Operation("-", Seq(templateInner)) => extract(toExprObl(inner -> templateInner))
- case _ => fail
- }
-
- case trees.Remainder(left, right) => template match {
- case Operation("%", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.Modulo(left, right) => template match {
- case Operation("mod", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.LessThan(left, right) => template match {
- case Operation("<", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.GreaterThan(left, right) => template match {
- case Operation(">", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.LessEquals(left, right) => template match {
- case Operation("<=", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.GreaterEquals(left, right) => template match {
- case Operation(">=", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- // Bit vectors.
-
- case v@trees.BVLiteral(signed, value, base) => template match {
- case Literal(NumericLiteral(string)) if (scala.util.Try(trees.BVLiteral(signed, BigInt(string), base)).toOption == Some(v)) => success
- case _ => fail
- }
-
- case trees.BVNot(inner) => template match {
- case Operation("~", Seq(templateInner)) => extract(toExprObl(inner -> templateInner))
- case _ => fail
- }
-
- case trees.BVOr(left, right) => template match {
- case Operation("|", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- }
-
- case trees.BVAnd(left, right) => template match {
- case Operation("&", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.BVXor(left, right) => template match {
- case Operation("^", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.BVShiftLeft(left, right) => template match {
- case Operation("<<", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.BVAShiftRight(left, right) => template match {
- case Operation(">>", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- case trees.BVLShiftRight(left, right) => template match {
- case Operation(">>>", Seq(templateLeft, templateRight)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
- case _ => fail
- }
-
- // Tuples.
-
- case trees.Tuple(exprs) => template match {
- case Operation("Tuple", templates) =>
- extract(toExprObls(exprs -> templates))
- case _ => fail
- }
-
- case trees.TupleSelect(inner, index) => template match {
- case Selection(templateInner, TupleField(`index`)) => extract(toExprObl(inner -> templateInner))
- case _ => fail
- }
-
- // Sets.
-
- case trees.FiniteSet(elements, tpe) => template match {
- case SetConstruction(templatesElements, optTemplateType) =>
- extract(toExprObls(elements -> templatesElements), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.SetAdd(set, element) => (set.getType(symbols), template) match {
- case (trees.SetType(tpe), SetAddOperation(templateSet, templateElement, optTemplateType)) =>
- extract(toExprObl(set -> templateSet), toExprObl(element -> templateElement), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.ElementOfSet(element, set) => (set.getType(symbols), template) match {
- case (trees.SetType(tpe), ContainsOperation(templateSet, templateElement, optTemplateType)) =>
- extract(toExprObl(set -> templateSet), toExprObl(element -> templateElement), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.SubsetOf(left, right) => (left.getType(symbols), template) match {
- case (trees.SetType(tpe), SubsetOperation(templateLeft, templateRight, optTemplateType)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.SetIntersection(left, right) => (left.getType(symbols), template) match {
- case (trees.SetType(tpe), SetIntersectionOperation(templateLeft, templateRight, optTemplateType)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.SetDifference(left, right) => (left.getType(symbols), template) match {
- case (trees.SetType(tpe), SetDifferenceOperation(templateLeft, templateRight, optTemplateType)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- // Bags.
-
- case trees.FiniteBag(mappings, tpe) => template match {
- case BagConstruction(Bindings(Seq(), templateMappings), optTemplateType) => {
- val (keys, values) = mappings.unzip
- val (templatesKeys, templatesValues) = templateMappings.unzip
-
- extract(toExprObls(keys -> templatesKeys), toExprObls(values -> templatesValues), toOptTypeObl(tpe -> optTemplateType))
- }
- case _ => fail
- }
-
- case trees.BagAdd(bag, element) => (bag.getType(symbols), template) match {
- case (trees.BagType(tpe), BagAddOperation(templateBag, templateElement, optTemplateType)) =>
- extract(toExprObl(bag -> templateBag), toExprObl(element -> templateElement), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.MultiplicityInBag(element, bag) => (bag.getType, template) match {
- case (trees.BagType(tpe), BagMultiplicityOperation(templateBag, templateElement, optTemplateType)) =>
- extract(toExprObl(element -> templateElement), toExprObl(bag -> templateBag), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.BagIntersection(left, right) => (left.getType, template) match {
- case (trees.BagType(tpe), BagIntersectionOperation(templateLeft, templateRight, optTemplateType)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.BagUnion(left, right) => (left.getType, template) match {
- case (trees.BagType(tpe), BagUnionOperation(templateLeft, templateRight, optTemplateType)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- case trees.BagDifference(left, right) => (left.getType, template) match {
- case (trees.BagType(tpe), BagDifferenceOperation(templateLeft, templateRight, optTemplateType)) =>
- extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
- case _ => fail
- }
-
- // Maps.
-
- case trees.FiniteMap(pairs, default, keyType, valueType) => template match {
- case MapConstruction(templateDefault, Bindings(Seq(), templatesPairs), optTemplatesTypes) => {
-
- val (optTemplateKeyType, optTemplateValueType) = optTemplatesTypes match {
- case Some(Right((k, v))) => (Some(k), Some(v))
- case Some(Left(k)) => (Some(k), None)
- case None => (None, None)
- }
-
- val (keys, values) = pairs.unzip
- val (templatesKeys, templatesValues) = templatesPairs.unzip
-
- extract(toExprObls(keys -> templatesKeys), toExprObls(values -> templatesValues),
- toOptTypeObl(keyType -> optTemplateKeyType), toOptTypeObl(valueType -> optTemplateValueType), toExprObl(default -> templateDefault))
- }
- case _ => fail
- }
-
- case trees.MapApply(map, key) => (map.getType, template) match {
- case (trees.MapType(keyType, valueType), MapApplyOperation(templateMap, templateKey, optTemplatesTypes)) => {
- val (optTemplateKeyType, optTemplateValueType) = optTemplatesTypes match {
- case Some((k, v)) => (Some(k), Some(v))
- case None => (None, None)
- }
-
- extract(toExprObl(map -> templateMap), toExprObl(key -> templateKey),
- toOptTypeObl(keyType -> optTemplateKeyType), toOptTypeObl(valueType -> optTemplateValueType))
- }
- case _ => fail
- }
-
- case trees.MapUpdated(map, key, value) => (map.getType, template) match {
- case (trees.MapType(keyType, valueType), MapUpdatedOperation(templateMap, templateKey, templateValue, optTemplatesTypes)) => {
- val (optTemplateKeyType, optTemplateValueType) = optTemplatesTypes match {
- case Some((k, v)) => (Some(k), Some(v))
- case None => (None, None)
- }
-
- extract(toExprObl(map -> templateMap), toExprObl(key -> templateKey), toOptTypeObl(keyType -> optTemplateKeyType),
- toExprObl(value -> templateValue), toOptTypeObl(valueType -> optTemplateValueType))
- }
- case _ => fail
- }
-
- case _ => fail
- }
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/ExpressionParser.scala b/src/main/scala/inox/parsing/ExpressionParser.scala
deleted file mode 100644
index 81bf65e8b..000000000
--- a/src/main/scala/inox/parsing/ExpressionParser.scala
+++ /dev/null
@@ -1,345 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.token._
-import scala.util.parsing.input.Position
-
-trait ExpressionParsers { self: Parsers =>
-
- class ExpressionParser extends TypeParser {
-
- import lexical.{Identifier => _, Quantifier => _, Hole => _, _}
-
- import ExprIR._
-
- lazy val expression: Parser[Expression] = positioned(greedyRight | operatorExpr) withFailureMessage {
- (p: Position) => withPos("Expression expected.", p)
- }
-
- lazy val nonOperatorExpr: Parser[Expression] = positioned(withPrefix(greedyRight | withTypeAnnotation(selectionExpr))) withFailureMessage {
- (p: Position) => withPos("Expression expected.", p)
- }
-
- lazy val selectableExpr: Parser[Expression] = withApplication {
- holeExpr | literalExpr | withTypeApplication(variableExpr) | literalSetLikeExpr | tupleOrParensExpr
- }
-
- def withTypeAnnotation(exprParser: Parser[Expression]): Parser[Expression] = {
- for {
- e <- exprParser
- ot <- opt(p(':') ~> commit(typeExpression))
- } yield ot match {
- case None => e
- case Some(t) => TypeApplication(Operation("TypeAnnotation", Seq(e)), Seq(t))
- }
- }
-
- def withApplication(exprParser: Parser[Expression]): Parser[Expression] =
- for {
- expr <- exprParser
- argss <- rep(arguments)
- } yield {
- argss.foldLeft(expr) {
- case (acc, args) => Application(acc, args)
- }
- }
-
- def withTypeApplication(exprParser: Parser[Expression]): Parser[Expression] =
- for {
- expr <- exprParser
- oargs <- opt(typeArguments)
- } yield {
- oargs.foldLeft(expr) {
- case (acc, args) => TypeApplication(acc, args)
- }
- }
-
- lazy val prefix: Parser[Expression => Expression] = unaryOps.map({
- (op: String) => elem(Operator(op)) ^^^ { (x: Expression) => Operation(op, Seq(x)) }
- }).reduce(_ | _)
-
- def withPrefix(exprParser: Parser[Expression]): Parser[Expression] =
- for {
- pres <- rep(prefix)
- expr <- exprParser
- } yield {
- pres.foldRight(expr) {
- case (pre, acc) => pre(acc)
- }
- }
-
- lazy val selectionExpr: Parser[Expression] = {
-
- val selector = (for {
- i <- positioned(selectorIdentifier)
- targs <- opt(typeArguments)
- argss <- rep(arguments)
- } yield { (expr: Expression) =>
- val zero: Expression = if (targs.isDefined) {
- TypeApplication(Selection(expr, i).setPos(i.pos), targs.get).setPos(i.pos)
- } else {
- Selection(expr, i).setPos(i.pos)
- }
-
- argss.foldLeft(zero) {
- case (acc, args) => Application(acc, args)
- }
- }) withFailureMessage {
- (p: Position) => withPos("Selector expected.", p)
- }
-
- positioned(selectableExpr) ~ rep(kw(".") ~> commit(selector)) ^^ {
- case expr ~ funs => funs.foldLeft(expr) {
- case (acc, f) => f(acc)
- }
- }
- }
-
- lazy val selectorIdentifier: Parser[Field] = acceptMatch("Selector", {
- case lexical.Identifier(name) => FieldName(name)
- case Embedded(i : inox.Identifier) => FieldIdentifier(i)
- case lexical.Hole(i) => FieldHole(i)
- })
-
- lazy val greedyRight: Parser[Expression] = lambdaExpr | quantifierExpr | ifExpr | letExpr | assumeExpr
-
- lazy val assumeExpr: Parser[Expression] = for {
- _ <- kw("assume")
- p <- commit(expression)
- _ <- commit(kw("in"))
- e <- commit(expression)
- } yield Operation("Assume", Seq(p, e))
-
- lazy val ifExpr: Parser[Expression] = for {
- _ <- kw("if")
- c <- commit(parensExpr withFailureMessage {
- (p: Position) => withPos("Missing condition, between parentheses '(' and ')'.", p)
- })
- t <- commit(expression)
- _ <- commit(kw("else") withFailureMessage {
- (p: Position) => withPos("Missing `else`. `if` expressions must have an accompanying `else`.", p)
- })
- e <- commit(expression)
- } yield Operation("IfThenElse", Seq(c, t, e))
-
- lazy val letExpr: Parser[Expression] = for {
- _ <- kw("let")
- bs <- commit(rep1sep(for {
- v <- valDef
- _ <- commit(kw("=") withFailureMessage {
- (p: Position) => withPos("Missing assignment to variable `" + v._1.getName +"`. Use `=` to assign a value to the variable.", p)
- })
- e <- commit(expression)
- } yield (v._1, v._2, e), p(',')) withFailureMessage {
- (p: Position) => withPos("Binding expected. Bindings take the form `variable = expression`, and are separated by `,`.", p)
- })
- _ <- commit(kw("in") withFailureMessage {
- (p: Position) => withPos("Missing `in`. `let` expressions must be followed by an expression introduced by the keyword `in`.", p)
- })
- bd <- commit(expression)
- } yield Let(bs, bd)
-
- lazy val holeExpr: Parser[Expression] = acceptMatch("Hole", {
- case lexical.Hole(i) => ExpressionHole(i)
- })
-
- lazy val holeExprSeq: Parser[Expression] = acceptMatch("Hole with ellipsis", {
- case lexical.Hole(i) => ExpressionSeqHole(i)
- }) <~ kw("...")
-
-
- lazy val literalExpr: Parser[Expression] = positioned(acceptMatch("Literal", {
- case Keyword("true") => BooleanLiteral(true)
- case Keyword("false") => BooleanLiteral(false)
- case StringLit(s) => StringLiteral(s)
- case NumericLit(n) => NumericLiteral(n)
- case DecimalLit(w, t, r) => DecimalLiteral(w, t, r)
- case CharLit(c) => CharLiteral(c)
- case Embedded(e : trees.Expr) => EmbeddedExpr(e)
- }) ^^ (Literal(_)))
-
- lazy val variableExpr: Parser[Expression] = identifier ^^ (Variable(_))
-
- lazy val identifier: Parser[Identifier] = positioned(acceptMatch("Identifier", {
- case lexical.Identifier(name) => IdentifierName(name)
- case Embedded(i : inox.Identifier) => IdentifierIdentifier(i)
- case lexical.Hole(i) => IdentifierHole(i)
- })) withFailureMessage {
- (p: Position) => withPos("Identifier expected.", p)
- }
-
- lazy val parensExpr: Parser[Expression] =
- (p('(') ~> commit(expression) <~ commit(p(')') withFailureMessage {
- (p: Position) => withPos("Missing `)`.", p)
- }))
-
- lazy val tupleOrParensExpr: Parser[Expression] =
- p('(') ~> repsep(expression, p(',')) <~ commit(p(')') withFailureMessage {
- (p: Position) => withPos("Missing `)`.", p)
- }) ^^ {
- case Seq() => Literal(UnitLiteral)
- case Seq(e) => e
- case es => Operation("Tuple", es)
- }
-
- def repsepOnce[A, B](parser: Parser[A], sep: Parser[Any], once: Parser[B]): Parser[(Option[B], Seq[A])] = {
- opt(rep1sepOnce(parser, sep, once)) ^^ {
- case None => (None, Seq())
- case Some(t) => t
- }
- }
-
- def rep1sepOnce[A, B](parser: Parser[A], sep: Parser[Any], once: Parser[B]): Parser[(Option[B], Seq[A])] =
- {
- for {
- a <- parser
- o <- opt(sep ~> rep1sepOnce(parser, sep, once))
- } yield o match {
- case None => (None, Seq(a))
- case Some((ob, as)) => (ob, a +: as)
- }
- } | {
- for {
- b <- once
- o <- opt(sep ~> rep1sep(parser, sep))
- } yield o match {
- case None => (Some(b), Seq())
- case Some(as) => (Some(b), as)
- }
- }
-
-
- lazy val literalSetLikeExpr: Parser[Expression] =
- p('{') ~> repsepOnce(expression, p(','), defaultMap) <~ commit(p('}') withFailureMessage {
- (p: Position) => withPos("Missing `}`.", p)
- }) ^^ {
- case (None, as) => Operation("Set", as)
- case (Some((d, None)), as) => Operation("Map", d +: as)
- case (Some((d, Some(t))), as) => TypeApplication(Operation("Map", d +: as), Seq(t))
- }
-
- lazy val defaultMap: Parser[(Expression, Option[Type])] =
- for {
- _ <- elem(Operator("*"))
- ot <- opt(p(':') ~> typeExpression)
- _ <- commit(elem(Operator("->")) withFailureMessage {
- (p: Position) => withPos("Missing binding for the default case. Expected `->`.", p)
- })
- e <- expression
- } yield (e, ot)
-
- lazy val arguments: Parser[List[Expression]] =
- p('(') ~> repsep(exprEllipsis | (holeExprSeq | expression) ^^ {List(_)}, p(',')) <~ commit(p(')') withFailureMessage {
- (p: Position) => withPos("Missing ')' at the end of the arguments.", p)
- }) ^^ {
- _.flatten
- }
-
- lazy val exprEllipsis: Parser[List[Expression]] = acceptMatch("Multiple embedded expressions", {
- case Embedded(es: Traversable[_]) if es.forall(_.isInstanceOf[trees.Expr]) =>
- es.map((e: Any) => Literal(EmbeddedExpr(e.asInstanceOf[trees.Expr]))).toList
- }) <~ commit(kw("...") withFailureMessage {
- (p: Position) => withPos("Missing `...` after embedded sequence of expressions.", p)
- })
-
- lazy val quantifier: Parser[Quantifier] = acceptMatch("Quantifier expected.", {
- case lexical.Quantifier("forall") => Forall
- case lexical.Quantifier("exists") => Exists
- case lexical.Quantifier("lambda") => Lambda
- case lexical.Quantifier("choose") => Choose
- })
-
- lazy val valDef: Parser[(Identifier, Option[Type])] = for {
- i <- identifier
- otype <- opt(p(':') ~> commit(typeExpression))
- } yield (i, otype)
-
- lazy val quantifierExpr: Parser[Expression] = for {
- q <- quantifier
- vds <- rep1sep(commit(valDef), p(','))
- _ <- commit(p('.') withFailureMessage {
- (p: Position) => "Missing `.` between bindings and expression body."
- })
- e <- commit(expression)
- } yield Abstraction(q, vds, e)
-
- lazy val lambdaExpr: Parser[Expression] = for {
- vds <- p('(') ~> repsep(valDef, p(',')) <~ p(')') | identifier ^^ (id => Seq((id, None)))
- _ <- kw("=>") withFailureMessage {
- (p: Position) => "Missing `=>` between bindings and lambda body."
- }
- e <- commit(expression)
- } yield Abstraction(Lambda, vds, e)
-
- lazy val operatorExpr: Parser[Expression] = {
-
- def operator(op: String) = (elem(Operator(op)) ^^^ { op }) withFailureMessage {
- (p: Position) => withPos("Unknown operator.", p)
- }
-
- def oneOf(ops: Seq[String]) = ops.map(operator(_)).reduce(_ | _) withFailureMessage {
- (p: Position) => withPos("Unknown operator.", p)
- }
-
- def toBinary(op: String): (Expression, Expression) => Expression =
- (a: Expression, b: Expression) => Operation(op, Seq(a, b))
-
- val zero = nonOperatorExpr
-
- Operators.binaries.foldLeft(zero) {
- case (morePrio, level) => {
-
- level match {
- case RightAssoc(ops) => {
- val bin = oneOf(ops).map(toBinary(_))
- morePrio ~ rep(bin ~ commit(morePrio)) ^^ {
- case first ~ opsAndExprs => {
- if (opsAndExprs.isEmpty) {
- first
- }
- else {
- val (ops, exprs) = opsAndExprs.map({ case a ~ b => (a, b) }).unzip
- val exprsAndOps = (first +: exprs).zip(ops)
- val last = exprs.last
-
- exprsAndOps.foldRight(last) {
- case ((expr, f), acc) => f(expr, acc)
- }
- }
- }
- }
- }
- case LeftAssoc(ops) => {
- val bin = oneOf(ops).map(toBinary(_))
- chainl1(morePrio, bin)
- }
- case AnyAssoc(op) => {
- rep1sep(morePrio, operator(op)) ^^ {
- case Seq(x) => x
- case xs => Operation(op, xs)
- }
- }
- }
- }
- }
- }
-
- lazy val typeArguments: Parser[List[Type]] = p('[') ~> rep1sep(commit(typeExpression), p(',')) <~ commit(p(']') withFailureMessage {
- (p: Position) => withPos("Missing ']'.", p)
- })
-
- lazy val inoxValDef: Parser[(inox.Identifier, ExprIR.Type)] = for {
- i <- identifier
- _ <- p(':')
- t <- typeExpression
- } yield i match {
- case IdentifierIdentifier(v) => (v, t)
- case IdentifierName(n) => (FreshIdentifier(n), t)
- case IdentifierHole(_) => throw new scala.Error("Unexpected hole in value definition.")
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/Extractors.scala b/src/main/scala/inox/parsing/Extractors.scala
deleted file mode 100644
index 7755e800a..000000000
--- a/src/main/scala/inox/parsing/Extractors.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait Extractors
- extends IRs
- with ExpressionDeconstructors
- with TypeDeconstructors
- with ExpressionExtractors
- with DefinitionExtractors
- with TypeExtractors {
-
- trait Extractor
- extends ExpressionDeconstructor
- with TypeDeconstructor
- with ExpressionExtractor
- with DefinitionExtractor
- with TypeExtractor {
-
- type Match = Map[Int, Any]
-
- def matching(index: Int, value: Any): Match = Map(index -> value)
- val empty: Match = Map()
- val success = Some(Map[Int, Any]())
- val fail = None
- }
-}
diff --git a/src/main/scala/inox/parsing/IR.scala b/src/main/scala/inox/parsing/IR.scala
deleted file mode 100644
index f54ac7ccc..000000000
--- a/src/main/scala/inox/parsing/IR.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input.Positional
-
-trait IRs extends BuiltIns with ExprIRs with TypeIRs with DefinitionIRs {
- protected val trees: ast.Trees
-}
-
-/** Contains abstract Intermediate Representation (IR) language. */
-trait IR {
-
- type Identifier // Identifier of the language.
- type Type // Types.
- type Operator // Primitive operators.
- type Value // Literal values.
- type Field // Fields.
- type Quantifier // Quantifiers.
-
- abstract class Expression(pre: String) extends Positional with Product {
- override def productPrefix = pos + "@" + pre
- }
- case class Variable(identifier: Identifier) extends Expression("Variable")
- case class Application(callee: Expression, args: Seq[Expression]) extends Expression("Application")
- case class Abstraction(quantifier: Quantifier, bindings: Seq[(Identifier, Option[Type])], body: Expression) extends Expression("Abstraction")
- case class Operation(operator: Operator, args: Seq[Expression]) extends Expression("Operation")
- case class Selection(structure: Expression, field: Field) extends Expression("Selection")
- case class Literal(value: Value) extends Expression("Literal")
- case class TypeApplication(callee: Expression, args: Seq[Type]) extends Expression("TypeApplication")
- case class Let(bindings: Seq[(Identifier, Option[Type], Expression)], body: Expression) extends Expression("Let")
-}
diff --git a/src/main/scala/inox/parsing/Interpolator.scala b/src/main/scala/inox/parsing/Interpolator.scala
deleted file mode 100644
index 6e1e9ee5f..000000000
--- a/src/main/scala/inox/parsing/Interpolator.scala
+++ /dev/null
@@ -1,116 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import ast._
-
-trait Interpolator
- extends Parsers
- with Elaborators
- with Extractors
- with ConstraintSolvers {
-
- import trees._
-
- class Converter(implicit val symbols: trees.Symbols)
- extends Elaborator
- with Extractor
-
- implicit class ExpressionInterpolator(sc: StringContext)(implicit symbols: trees.Symbols = trees.NoSymbols) {
-
- private lazy val converter = new Converter()
- private lazy val parser = new DefinitionParser()
-
- object e {
- def apply(args: Any*): Expr = {
- val ire = ir(args : _*)
- val expr = converter.getExpr(ire, Unknown.fresh(ire.pos))(Store.empty)
- converter.elaborate(expr)
- }
-
- def unapplySeq(expr: Expr): Option[Seq[Any]] = {
- val args = Seq.tabulate(sc.parts.length - 1)(MatchPosition(_))
- val ir = parser.getFromSC(sc, args)(parser.phrase(parser.expression))
- converter.extract(expr, ir) match {
- case Some(mappings) if mappings.size == sc.parts.length - 1 => Some(mappings.toSeq.sortBy(_._1).map(_._2))
- case _ => None
- }
- }
- }
-
- def ir(args: Any*): ExprIR.Expression = {
- parser.getFromSC(sc, args)(parser.phrase(parser.expression))
- }
-
- def v(args: Any*): ValDef = {
- val (id, ir) = parser.getFromSC(sc, args)(parser.phrase(parser.inoxValDef))
- val tpe = converter.getType(ir)(Store.empty)
- trees.ValDef(id, converter.elaborate(tpe))
- }
-
- def r(args: Any*): Seq[Lexer.Token] = {
- val reader = Lexer.getReader(sc, args)
-
- import scala.util.parsing.input.Reader
-
- def go[A](r: Reader[A]): Seq[A] = {
- if (r.atEnd) Seq()
- else r.first +: go(r.rest)
- }
-
- go(reader)
- }
-
- object t {
- def apply(args: Any*): Type = {
- val ir = parser.getFromSC(sc, args)(parser.phrase(parser.typeExpression))
- val tpe = converter.getType(ir)(Store.empty)
- converter.elaborate(tpe)
- }
-
- def unapplySeq(tpe: Type): Option[Seq[Any]] = {
- val args = Seq.tabulate(sc.parts.length - 1)(MatchPosition(_))
- val ir = parser.getFromSC(sc, args)(parser.phrase(parser.typeExpression))
- converter.extract(tpe, ir) match {
- case Some(mappings) if mappings.size == sc.parts.length - 1 => Some(mappings.toSeq.sortBy(_._1).map(_._2))
- case _ => None
- }
- }
- }
-
- object td {
- def apply(args: Any*): ADTSort = {
- val ir = parser.getFromSC(sc, args)(parser.phrase(parser.datatype))
- val srt = converter.getSort(ir)(Store.empty)
- converter.elaborate(srt)
- }
-
- def unapplySeq(sort: ADTSort): Option[Seq[Any]] = {
- val args = Seq.tabulate(sc.parts.length - 1)(MatchPosition(_))
- val ir = parser.getFromSC(sc, args)(parser.phrase(parser.datatype))
- converter.extract(sort, ir) match {
- case Some(mappings) if mappings.size == sc.parts.length - 1 => Some(mappings.toSeq.sortBy(_._1).map(_._2))
- case _ => None
- }
- }
- }
-
- object fd {
- def apply(args: Any*): FunDef = {
- val ir = parser.getFromSC(sc, args)(parser.phrase(parser.function))
- val fundef = converter.getFunction(ir)(Store.empty)
- converter.elaborate(fundef)
- }
-
- def unapplySeq(fun: FunDef): Option[Seq[Any]] = {
- val args = Seq.tabulate(sc.parts.length - 1)(MatchPosition(_))
- val ir = parser.getFromSC(sc, args)(parser.phrase(parser.function))
- converter.extract(fun, ir) match {
- case Some(mappings) if mappings.size == sc.parts.length - 1 => Some(mappings.toSeq.sortBy(_._1).map(_._2))
- case _ => None
- }
- }
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/MatchPosition.scala b/src/main/scala/inox/parsing/MatchPosition.scala
deleted file mode 100644
index 5bf2c6058..000000000
--- a/src/main/scala/inox/parsing/MatchPosition.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-case class MatchPosition(id: Int)
\ No newline at end of file
diff --git a/src/main/scala/inox/parsing/Parsers.scala b/src/main/scala/inox/parsing/Parsers.scala
deleted file mode 100644
index 0670f1a77..000000000
--- a/src/main/scala/inox/parsing/Parsers.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait Parsers
- extends Lexers
- with IRs
- with ExpressionParsers
- with TypeParsers
- with DefinitionParsers
diff --git a/src/main/scala/inox/parsing/PositionalErrors.scala b/src/main/scala/inox/parsing/PositionalErrors.scala
deleted file mode 100644
index 80f5c298d..000000000
--- a/src/main/scala/inox/parsing/PositionalErrors.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input._
-
-trait PositionalErrors { self: scala.util.parsing.combinator.Parsers =>
-
- implicit class PositionalErrorsDecorator[A](parser: Parser[A]) {
-
- def withErrorMessage(onError: Position => String): Parser[A] = new Parser[A] {
- override def apply(input: Input) = parser(input) match {
- case s @ Success(_, _) => s
- case e @ Error(_, rest) => Error(onError(input.pos), rest)
- case f @ Failure(_, _) => f
- }
- }
-
- def withFailureMessage(onFailure: Position => String): Parser[A] = new Parser[A] {
- override def apply(input: Input) = parser(input) match {
- case s @ Success(_, _) => s
- case e @ Error(_, _) => e
- case f @ Failure(_, rest) => Failure(onFailure(input.pos), rest)
- }
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/StringContextParsers.scala b/src/main/scala/inox/parsing/StringContextParsers.scala
deleted file mode 100644
index 292628397..000000000
--- a/src/main/scala/inox/parsing/StringContextParsers.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.combinator.syntactical.TokenParsers
-
-trait StringContextParsers { self: TokenParsers { type Tokens <: StringContextLexer } =>
-
- case class ParsingException(error: String) extends Exception(error)
-
- def getFromSC[A](sc: StringContext, args: Seq[Any])(parser: Parser[A]): A =
- parser(lexical.getReader(sc, args)) match {
- case NoSuccess(msg, _) => throw ParsingException(msg)
- case Success(value, _) => value
- }
-}
diff --git a/src/main/scala/inox/parsing/TypeDeconstructor.scala b/src/main/scala/inox/parsing/TypeDeconstructor.scala
deleted file mode 100644
index 481f411ff..000000000
--- a/src/main/scala/inox/parsing/TypeDeconstructor.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* Copyright 2009-2018 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait TypeDeconstructors extends IRs {
-
- trait TypeDeconstructor {
-
- import TypeIR._
-
- object BVType {
- def apply(signed: Boolean, size: Int): String = {
- require(size > 0)
-
- if (signed) "Int" + size
- else "UInt" + size
- }
-
- def unapply(name: String): Option[(Boolean,Int)] = {
- if (name.startsWith("Int")) {
- scala.util.Try(name.drop(3).toInt).toOption.filter(_ > 0).map(i => (true,i))
- } else if (name.startsWith("UInt")) {
- scala.util.Try(name.drop(4).toInt).toOption.filter(_ > 0).map(i => (false,i))
- } else {
- None
- }
- }
- }
-
- lazy val basic: Map[Value, trees.Type] = Seq(
- "Boolean" -> trees.BooleanType(),
- "BigInt" -> trees.IntegerType(),
- "Char" -> trees.CharType(),
- "Int" -> trees.Int32Type(),
- "Real" -> trees.RealType(),
- "String" -> trees.StringType(),
- "Unit" -> trees.UnitType()).map({ case (n, v) => Name(n) -> v }).toMap
- }
-}
diff --git a/src/main/scala/inox/parsing/TypeElaborator.scala b/src/main/scala/inox/parsing/TypeElaborator.scala
deleted file mode 100644
index e8953bf9f..000000000
--- a/src/main/scala/inox/parsing/TypeElaborator.scala
+++ /dev/null
@@ -1,242 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input._
-
-import Utils.plural
-
-trait TypeElaborators { self: Elaborators =>
-
- import Utils.{either, traverse, plural}
-
- trait TypeElaborator { self: Elaborator =>
-
- import TypeIR._
-
- private lazy val basicInv = basic.map(_.swap)
-
- private lazy val parametric: Map[Value, (Int, Seq[trees.Type] => trees.Type)] =
- (primitives ++ sorts).toMap
-
- private lazy val primitives = Seq(
- "Set" -> (1, (ts: Seq[trees.Type]) => trees.SetType(ts.head)),
- "Map" -> (2, (ts: Seq[trees.Type]) => trees.MapType(ts(0), ts(1))),
- "Bag" -> (1, (ts: Seq[trees.Type]) => trees.BagType(ts.head))).map({ case (n, v) => Name(n) -> v })
-
- private lazy val sorts = symbols.sorts.toSeq.flatMap({
- case (i, d) => {
- val f = (d.tparams.length, (ts: Seq[trees.Type]) => trees.ADTType(i, ts))
-
- Seq(
- Name(i.name) -> f,
- EmbeddedIdentifier(i) -> f)
- }
- })
-
- def getSimpleType(tpe: Expression)(implicit store: Store): trees.Type = {
- toSimpleType(tpe) match {
- case Right(inoxType) => inoxType
- case Left(errors) => throw new ElaborationException(errors)
- }
- }
-
- def toSimpleType(expr: Expression)(implicit store: Store): Either[Seq[ErrorLocation], trees.Type] = expr match {
- case Operation(Tuple | Sigma, irs) if irs.size >= 2 =>
- traverse(irs.map {
- case TypeBinding(_, tpe) => toSimpleType(tpe)
- case tpe => toSimpleType(tpe)
- }).left.map(_.flatten).right.map(trees.TupleType(_))
-
- case Operation(Arrow | Pi, Seq(Operation(Group, froms), to)) =>
- either(
- traverse(froms.map {
- case TypeBinding(_, tpe) => toSimpleType(tpe)
- case tpe => toSimpleType(tpe)
- }).left.map(_.flatten),
- toSimpleType(to)
- ){
- case (argTpes, retTpe) => trees.FunctionType(argTpes, retTpe)
- }
-
- case Refinement(_, tpe, _) => toSimpleType(tpe)
-
- case Application(l @ Literal(value), irs) =>
- either(
- parametric.get(value).orElse(value match {
- case Name(name) if store isTypeParameter name => None
- case Name(name) if store isSort name =>
- val sort = store getSort name
- Some((sort.tparams.length, (tps: Seq[trees.Type]) => trees.ADTType(sort.id, tps)))
- case _ => None
- }).map { case (n, cons) =>
- if (n == irs.length) {
- Right(cons)
- } else {
- Left(Seq(ErrorLocation("Type constructor " + value + " takes " +
- n + " " + plural(n, "argument", "arguments") + ", " +
- irs.length + " " + plural(irs.length, "was", "were") + " given.", l.pos)))
- }
- }.getOrElse {
- Left(Seq(ErrorLocation("Unknown type constructor: " + value, l.pos)))
- },
- traverse(irs.map(toSimpleType(_))).left.map(_.flatten)
- ){
- case (cons, tpes) => cons(tpes)
- }
-
- case Literal(EmbeddedType(t)) => Right(t)
-
- case Literal(Name(BVType(signed, size))) => Right(trees.BVType(signed, size))
-
- case l @ Literal(value) =>
- basic.get(value)
- .map(tpe => (0 -> ((tps: Seq[trees.Type]) => tpe)))
- .orElse(parametric.get(value))
- .orElse(value match {
- case Name(name) if store isTypeParameter name =>
- val tp = store getTypeParameter name
- Some((0, (tps: Seq[trees.Type]) => tp))
- case Name(name) if store isSort name =>
- val sort = store getSort name
- Some((sort.tparams.length, (tps: Seq[trees.Type]) => trees.ADTType(sort.id, tps)))
- case _ => None
- }).map { case (n, cons) =>
- if (n == 0) {
- Right(cons(Seq()))
- } else {
- Left(Seq(ErrorLocation("Type " + value + " expects " +
- n + " " + plural(n, "argument", "arguments") + ", none were given", l.pos)))
- }
- }.getOrElse {
- Left(Seq(ErrorLocation("Unknown type: " + value, l.pos)))
- }
-
- case _ => Left(Seq(ErrorLocation("Invalid type.", expr.pos)))
- }
-
- private def getTypeBindings(tps: Seq[(Option[ExprIR.Identifier], Expression)])
- (implicit store: Store): (Store, Constrained[Seq[trees.ValDef]]) = {
- val (newStore, vds) = tps.foldLeft((store, Seq[Constrained[trees.ValDef]]())) {
- case ((store, vds), (oid, tpe)) =>
- getType(tpe)(store) match {
- case unsat: Unsatisfiable => (store, vds :+ unsat)
- case c @ WithConstraints(ev, cs) => oid match {
- case Some(ident) =>
- val id = getIdentifier(ident)
- val newStore = store + (ident.getName, id, getSimpleType(tpe)(store), ev)
- val newVds = vds :+ c.transform(tp => trees.ValDef(id, tp))
- (newStore, newVds)
- case None =>
- (store, vds :+ c.transform(tp => trees.ValDef.fresh("x", tp)))
- }
- }
- }
-
- (newStore, Constrained.sequence(vds))
- }
-
- def getType(expr: Expression, bound: Option[String] = None)
- (implicit store: Store): Constrained[trees.Type] = {
- implicit val position: Position = expr.pos
-
- expr match {
- case Operation(Tuple, irs) if irs.size >= 2 =>
- Constrained.sequence({
- irs.map(getType(_))
- }).transform({
- trees.TupleType(_)
- })
-
- case Operation(Sigma, irs) if irs.size >= 2 =>
- val (newStore, bindings) = getTypeBindings(irs.init.map {
- case TypeBinding(id, tpe) => (Some(id), tpe)
- case tpe => (None, tpe)
- })
-
- bindings.combine(getType(irs.last)(newStore))({
- case (params, to) => trees.SigmaType(params, to)
- })
-
- case Operation(Arrow, Seq(Operation(Group, froms), to)) =>
- Constrained.sequence({
- froms.map(getType(_))
- }).combine(getType(to))({
- case (from, to) => trees.FunctionType(from, to)
- })
-
- case Operation(Pi, Seq(Operation(Group, froms), to)) =>
- val (newStore, bindings) = getTypeBindings(froms.map {
- case TypeBinding(id, tpe) => (Some(id), tpe)
- case tpe => (None, tpe)
- })
-
- bindings.combine(getType(to)(newStore))({
- case (params, to) => trees.PiType(params, to)
- })
-
- case Refinement(oid, tpe, pred) =>
- val ident = oid orElse bound.map(ExprIR.IdentifierName(_))
- val (newStore, vds) = getTypeBindings(Seq(ident -> tpe))
-
- val u = Unknown.fresh
- vds.combine(getExpr(pred, u)(newStore))({
- case (Seq(vd), pred) => trees.RefinementType(vd, pred)
- }).addConstraint({
- Constraint.equal(u, trees.BooleanType())
- })
-
- case Application(l @ Literal(value), irs) =>
- (parametric.get(value).orElse(value match {
- case Name(name) if store isTypeParameter name => None
- case Name(name) if store isSort name =>
- val sort = store getSort name
- Some((sort.tparams.length, (tps: Seq[trees.Type]) => trees.ADTType(sort.id, tps)))
- case _ => None
- }).map { case (n, cons) =>
- if (n == irs.length) {
- Constrained.pure(cons)
- } else {
- Constrained.fail("Type constructor " + value + " takes " +
- n + " " + plural(n, "argument", "arguments") + ", " +
- irs.length + " " + plural(irs.length, "was", "were") + " given.", l.pos)
- }
- }.getOrElse {
- Constrained.fail("Unknown type constructor: " + value, l.pos)
- }).combine(Constrained.sequence(irs.map(getType(_))))({
- case (cons, tpes) => cons(tpes)
- })
-
- case Literal(EmbeddedType(t)) => Constrained.pure(t)
-
- case Literal(Name(BVType(signed, size))) => Constrained.pure(trees.BVType(signed, size))
-
- case l @ Literal(value) =>
- basic.get(value)
- .map(tpe => (0 -> ((tps: Seq[trees.Type]) => tpe)))
- .orElse(parametric.get(value))
- .orElse(value match {
- case Name(name) if store isTypeParameter name =>
- val tp = store getTypeParameter name
- Some((0, (tps: Seq[trees.Type]) => tp))
- case Name(name) if store isSort name =>
- val sort = store getSort name
- Some((sort.tparams.length, (tps: Seq[trees.Type]) => trees.ADTType(sort.id, tps)))
- case _ => None
- }).map { case (n, cons) =>
- if (n == 0) {
- Constrained.pure(cons(Seq()))
- } else {
- Constrained.fail("Type " + value + " expects " +
- n + " " + plural(n, "argument", "arguments") + ", none were given", l.pos)
- }
- }.getOrElse {
- Constrained.fail("Unknown type: " + value, l.pos)
- }
-
- case _ => Constrained.fail("Invalid type.", expr.pos)
- }
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/TypeExtractor.scala b/src/main/scala/inox/parsing/TypeExtractor.scala
deleted file mode 100644
index 16d07efa4..000000000
--- a/src/main/scala/inox/parsing/TypeExtractor.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-trait TypeExtractors { self: Extractors =>
-
- trait TypeExtractor { self0: Extractor =>
-
- import TypeIR._
-
- def extractSeq(tpes: Seq[trees.Type], templates: Seq[Expression]): Option[Match] = (tpes, templates) match {
- case (Seq(), Seq()) => Some(empty)
- case (Seq(), _) => None
- case (_, Seq()) => None
- case (_, Seq(TypeSeqHole(i), templateRest @ _*)) =>
- val n = tpes.length - templateRest.length
- if (n < 0) None else {
- val (matches, rest) = tpes.splitAt(n)
- extractSeq(rest, templateRest) map (_ ++ matching(i, matches))
- }
- case (Seq(tpe, tpeRest @ _*), Seq(template, templateRest @ _*)) =>
- for {
- matchingsHead <- extract(tpe, template)
- matchingsRest <- extractSeq(tpeRest, templateRest)
- } yield matchingsHead ++ matchingsRest
- }
-
- def extractSeq(vds: Seq[trees.ValDef], templates: Seq[Expression])(implicit dummy: DummyImplicit): Option[Match] =
- (vds, templates) match {
- case (Seq(), Seq()) => Some(empty)
- case (Seq(), _) => None
- case (_, Seq()) => None
- case (_, Seq(TypeSeqHole(i), templateRest @ _*)) =>
- val n = vds.length - templateRest.length
- if (n < 0) None else {
- val (matches, rest) = vds.splitAt(n)
- extractSeq(rest, templateRest) map (_ ++ matching(i, matches))
- }
- case (Seq(vd, vdRest @ _*), Seq(template, templateRest @ _*)) =>
- for {
- matchingsHead <- extract(vd, template)
- matchingsRest <- extractSeq(vdRest, templateRest)
- } yield matchingsHead ++ matchingsRest
- }
-
- def extract(vd: trees.ValDef, template: Expression): Option[Match] = template match {
- case TypeBinding(id, tpe) =>
- for {
- matchingsId <- toIdObl(vd.id, id)
- matchingsTpe <- extract(vd.tpe, tpe)
- } yield matchingsId ++ matchingsTpe
- case _ => fail
- }
-
- def extract(tpe: trees.Type, template: Expression): Option[Match] = (template, tpe) match {
- case (TypeHole(i), _) => Some(matching(i, tpe))
- case (_, trees.Untyped) => fail
- case (Literal(Name(BVType(templateSigned, templateSize))), trees.BVType(signed, size)) =>
- if (templateSigned == signed && templateSize == size) success else fail
- case (Literal(name), _) if (basic.get(name) == Some(tpe)) => success
- case (Operation(Tuple, templates), trees.TupleType(tpes)) => extractSeq(tpes, templates)
- case (Operation(Sigma, templatesFroms :+ templateTo), trees.SigmaType(params, to)) =>
- for {
- matchingsParams <- extractSeq(params, templatesFroms)
- matchingsTo <- extract(to, templateTo)
- } yield matchingsParams ++ matchingsTo
- case (Operation(Arrow, Seq(Operation(Group, templatesFroms), templateTo)), trees.FunctionType(froms, to)) =>
- for {
- matchingsFroms <- extractSeq(froms, templatesFroms)
- matchingsTo <- extract(to, templateTo)
- } yield matchingsFroms ++ matchingsTo
- case (Operation(Pi, Seq(Operation(Group, templatesFroms), templateTo)), trees.PiType(params, to)) =>
- for {
- matchingsFroms <- extractSeq(params, templatesFroms)
- matchingsTo <- extract(to, templateTo)
- } yield matchingsFroms ++ matchingsTo
- case (Application(Literal(Name("Set")), templatesElems), trees.SetType(elem)) => extractSeq(Seq(elem), templatesElems)
- case (Application(Literal(Name("Bag")), templatesElems), trees.BagType(elem)) => extractSeq(Seq(elem), templatesElems)
- case (Application(Literal(Name("Map")), templatesElems), trees.MapType(key, value)) => extractSeq(Seq(key, value), templatesElems)
- case (Application(NameHole(index), templates), trees.ADTType(id, tpes)) =>
- for (matchings <- extractSeq(tpes, templates)) yield matchings ++ matching(index, id)
- case (Application(Literal(Name(name)), templates), trees.ADTType(id, tpes)) if (id.name == name) => extractSeq(tpes, templates)
- case (Refinement(templateId, templateType, templatePred), trees.RefinementType(vd, pred)) =>
- for {
- matchingsIdent <- templateId.map(ident => toIdObl(vd.id -> ident)).getOrElse(success)
- matchingsType <- extract(vd.tpe, templateType)
- matchingsPred <- extract(pred, templatePred)
- } yield matchingsIdent ++ matchingsType ++ matchingsPred
- case (_, _) => fail
- }
- }
-}
diff --git a/src/main/scala/inox/parsing/TypeIR.scala b/src/main/scala/inox/parsing/TypeIR.scala
deleted file mode 100644
index c5d243097..000000000
--- a/src/main/scala/inox/parsing/TypeIR.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.input.Position
-
-trait TypeIRs { self: IRs =>
-
- object TypeIR extends IR {
-
- type Identifier = Nothing
- type Type = Nothing
- type Field = Nothing
- type Quantifier = Nothing
-
- sealed abstract class Value
- case class Name(name: String) extends Value { override def toString = name }
- case class EmbeddedType(tpe: trees.Type) extends Value { override def toString = tpe.toString }
- case class EmbeddedIdentifier(id: inox.Identifier) extends Value { override def toString = id.toString }
-
- sealed abstract class Operator
- case object Group extends Operator
- case object Tuple extends Operator
- case object Sigma extends Operator
- case object Arrow extends Operator
- case object Pi extends Operator
-
- case class TypeHole(index: Int) extends Expression("TypeHole")
- case class NameHole(index: Int) extends Expression("NameHole")
- case class TypeSeqHole(index: Int) extends Expression("TypeSeqHole")
-
- case class Refinement(id: Option[ExprIR.Identifier], tpe: Expression, pred: ExprIR.Expression) extends Expression("RefinementType")
- case class TypeBinding(id: ExprIR.Identifier, tpe: Expression) extends Expression("TypeBinding")
- }
-}
diff --git a/src/main/scala/inox/parsing/TypeParser.scala b/src/main/scala/inox/parsing/TypeParser.scala
deleted file mode 100644
index a738f75db..000000000
--- a/src/main/scala/inox/parsing/TypeParser.scala
+++ /dev/null
@@ -1,145 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.token._
-import scala.util.parsing.input._
-
-trait TypeParsers { self: Parsers =>
-
- class TypeParser
- extends StdTokenParsers
- with PositionalErrors
- with StringContextParsers { self: ExpressionParser =>
-
- type Tokens = Lexer.type
-
- override val lexical = Lexer
-
- import TypeIR._
- import lexical.{Hole => _, _}
-
- def withPos(error: String, pos: Position) = ErrorLocation(error, pos).toString
-
- def p(c: Char): Parser[Token] = (elem(Parenthesis(c)) | elem(Punctuation(c))) withFailureMessage {
- (p: Position) => withPos("Expected character: " + c, p)
- }
-
- def kw(s: String): Parser[Token] = elem(Keyword(s)) withFailureMessage {
- (p: Position) => withPos("Expected keyword: " + s, p)
- }
-
- lazy val arrow = kw("=>") withFailureMessage {
- (p: Position) => withPos("Unexpected character. Arrow `=>` or end of type expected.", p)
- }
-
- lazy val typeExpression: Parser[Expression] = positioned(rep1sep(betweenArrows, arrow) flatMap {
- case tss => tss.reverse match {
- case returnTypes :: rest =>
- if (returnTypes.isEmpty) {
- failure("Illegal empty list of types.")
- } else if (returnTypes.lastOption.exists(_.isInstanceOf[TypeBinding])) {
- failure("Illegal type binding in last return type position.")
- } else {
- val retType = returnTypes match {
- case Seq(TypeSeqHole(i)) => Operation(Tuple, Seq(TypeSeqHole(i)))
- case Seq(t) => t
- case ts if ts.exists(_.isInstanceOf[TypeBinding]) => Operation(Sigma, ts)
- case ts => Operation(Tuple, ts)
- }
-
- success(rest.foldLeft(retType) {
- case (to, froms) => Operation(
- if (froms.exists(_.isInstanceOf[TypeBinding])) Pi else Arrow,
- Seq(Operation(Group, froms), to)
- )
- })
- }
- case Nil => throw new IllegalStateException("Empty list of types.") // Should never happen.
- }
- }) withFailureMessage {
- (p: Position) => withPos("Type expected.", p)
- }
-
- lazy val betweenArrows: Parser[List[Expression]] = (
- ((p('(') ~ p(')')) ^^ (_ => Nil)) |
- argumentTypes('(', ')', allowNamed = true) |
- uniqueType) withFailureMessage {
- (p: Position) => withPos("Expected type or group of types.", p)
- }
-
- lazy val uniqueType: Parser[List[Expression]] = (typeHole | appliedType | parensType | refinementType) ^^ {
- case t => List(t)
- }
-
- def endOfGroup(c: Char) = p(c) withFailureMessage {
- (p: Position) => withPos("Expected character `" + c + "`, or more types (separated by `,`).", p)
- }
-
- def argumentTypes(open: Char, close: Char, allowNamed: Boolean = false): Parser[List[Expression]] = {
- val typeOrHole = if (allowNamed) typeSeqHole | typeBinding | typeExpression else typeSeqHole | typeExpression
- val typeOrEllipsis = ((typeOrHole ^^ (List(_))) | typeEllipsis) withFailureMessage {
- (p: Position) => withPos("Single type, or embedded sequence of types followed by `...`, expected.", p)
- }
-
- (p(open) ~> commit(rep1sep(typeOrEllipsis, p(',')) <~ endOfGroup(close))) ^^ (_.flatten) withFailureMessage {
- (p: Position) => withPos("Group of arguments expected.", p)
- }
- }
-
- lazy val parensType: Parser[Expression] = p('(') ~> typeExpression <~ p(')')
-
- lazy val name: Parser[Expression] = positioned(acceptMatch("Name", {
- case Embedded(t : trees.Type) => Literal(EmbeddedType(t))
- case Embedded(i : inox.Identifier) => Literal(EmbeddedIdentifier(i))
- case lexical.Identifier(s) => Literal(Name(s))
- }))
-
- lazy val typeSeqHole: Parser[Expression] = for {
- i <- acceptMatch("Hole", { case lexical.Hole(i) => i })
- _ <- kw("...")
- } yield (TypeSeqHole(i))
-
- lazy val typeHole: Parser[Expression] = for {
- i <- acceptMatch("Hole", { case lexical.Hole(i) => i })
- r <- opt(argumentTypes('[', ']'))
- } yield r match {
- case None => TypeHole(i)
- case Some(ts) => Application(NameHole(i), ts)
- }
-
- lazy val typeEllipsis: Parser[List[Expression]] = acceptMatch("Multiple embedded types", {
- case Embedded(ts: Traversable[_]) if ts.forall(_.isInstanceOf[trees.Type]) =>
- ts.map((t: Any) => Literal(EmbeddedType(t.asInstanceOf[trees.Type]))).toList
- }) <~ commit(kw("...") withFailureMessage {
- (p: Position) => withPos("Missing `...` after embedded sequence of types.", p)
- })
-
- lazy val appliedType: Parser[Expression] = for {
- n <- name
- oArgs <- opt(argumentTypes('[', ']'))
- } yield oArgs match {
- case None => n
- case Some(args) => Application(n, args)
- }
-
- lazy val refinementType: Parser[Expression] = for {
- _ <- p('{')
- (oid ~ tpe) <- commit(opt(identifier <~ p(':')) ~ typeExpression) withFailureMessage {
- (p: Position) => withPos("Expected (possibly bound) refinement base type.", p)
- }
- _ <- commit(elem(Operator("|")))
- pred <- commit(expression)
- _ <- commit(p('}'))
- } yield Refinement(oid, tpe, pred)
-
- lazy val typeBinding: Parser[Expression] = for {
- id <- identifier
- _ <- p(':')
- tpe <- commit(typeExpression)
- } yield TypeBinding(id, tpe)
- }
-}
diff --git a/src/main/scala/inox/parsing/Utils.scala b/src/main/scala/inox/parsing/Utils.scala
deleted file mode 100644
index a8e220009..000000000
--- a/src/main/scala/inox/parsing/Utils.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/* Copyright 2017 EPFL, Lausanne */
-
-package inox
-package parsing
-
-import scala.collection.BitSet
-
-object Utils {
-
- def traverse[A](xs: Seq[Option[A]]): Option[Seq[A]] = {
- val zero: Option[Seq[A]] = Some(Seq[A]())
-
- xs.foldRight(zero) {
- case (Some(x), Some(xs)) => Some(x +: xs)
- case _ => None
- }
- }
-
- def traverse[E, A](xs: Seq[Either[E, A]]): Either[Seq[E], Seq[A]] = {
- val zero: Either[Seq[E], Seq[A]] = Right(Seq[A]())
-
- xs.foldRight(zero) {
- case (Right(x), Right(xs)) => Right(x +: xs)
- case (Right(_), Left(es)) => Left(es)
- case (Left(e), Right(_)) => Left(Seq(e))
- case (Left(e), Left(es)) => Left(e +: es)
- }
- }
-
- def either[E, A, B, R](a: Either[Seq[E], A], b: Either[Seq[E], B])(f: (A, B) => R): Either[Seq[E], R] = {
- (a, b) match {
- case (Left(eas), Left(ebs)) => Left(eas ++ ebs)
- case (Left(eas), _) => Left(eas)
- case (_, Left(ebs)) => Left(ebs)
- case (Right(xa), Right(xb)) => Right(f(xa, xb))
- }
- }
-
- def plural(n: Int, s: String, p: String): String = {
- if (n == 1) s else p
- }
-
- def classify[A, B, C](xs: Seq[A])(f: A => Either[B, C]): (Seq[B], Seq[C]) = {
- val mapped = xs.map(f)
- val lefts = mapped.collect {
- case Left(x) => x
- }
- val rights = mapped.collect {
- case Right(x) => x
- }
- (lefts, rights)
- }
-
- def toFraction(whole: String, trailing: String, repeating: String): (BigInt, BigInt) = {
-
- type Fraction = (BigInt, BigInt)
-
- def add(a: Fraction, b: Fraction): Fraction = {
- val (na, da) = a
- val (nb, db) = b
-
- (na * db + nb * da, da * db)
- }
-
- def normalize(a: Fraction): Fraction = {
- val (na, da) = a
-
- val gcd = na.gcd(da)
-
- (na / gcd, da / gcd)
- }
-
- val t = BigInt(10).pow(trailing.length)
-
- val nonRepeatingPart: Fraction = (BigInt(whole + trailing), t)
- if (repeating.length == 0) {
- normalize(nonRepeatingPart)
- }
- else {
- val r = BigInt(10).pow(repeating.length)
- val sign = if (whole.startsWith("-")) -1 else 1
- val repeatingPart: Fraction = (sign * BigInt(repeating), (r - 1) * t)
-
- normalize(add(nonRepeatingPart, repeatingPart))
- }
- }
-}
\ No newline at end of file
diff --git a/src/test/scala/inox/parsing/ArithmeticParserSuite.scala b/src/test/scala/inox/parser/ArithmeticParserSuite.scala
similarity index 99%
rename from src/test/scala/inox/parsing/ArithmeticParserSuite.scala
rename to src/test/scala/inox/parser/ArithmeticParserSuite.scala
index 17293096b..867d1d64b 100644
--- a/src/test/scala/inox/parsing/ArithmeticParserSuite.scala
+++ b/src/test/scala/inox/parser/ArithmeticParserSuite.scala
@@ -1,5 +1,5 @@
package inox
-package parsing
+package parser
import org.scalatest._
@@ -79,7 +79,7 @@ class ArithmeticParserSuite extends FunSuite {
assertResult(Plus(IntegerLiteral(4), Times(IntegerLiteral(5), IntegerLiteral(6)))) {
e"4 + 5 * 6"
}
-
+
assertResult(Plus(Times(IntegerLiteral(4), IntegerLiteral(5)), IntegerLiteral(6))) {
e"4 * 5 + 6"
}
diff --git a/src/test/scala/inox/parsing/BooleanOperationsParserSuite.scala b/src/test/scala/inox/parser/BooleanOperationsParserSuite.scala
similarity index 99%
rename from src/test/scala/inox/parsing/BooleanOperationsParserSuite.scala
rename to src/test/scala/inox/parser/BooleanOperationsParserSuite.scala
index 9ae114f0d..769a02656 100644
--- a/src/test/scala/inox/parsing/BooleanOperationsParserSuite.scala
+++ b/src/test/scala/inox/parser/BooleanOperationsParserSuite.scala
@@ -1,5 +1,5 @@
package inox
-package parsing
+package parser
import org.scalatest._
diff --git a/src/test/scala/inox/parsing/ComparisonOperationsParserSuite.scala b/src/test/scala/inox/parser/ComparisonOperationsParserSuite.scala
similarity index 82%
rename from src/test/scala/inox/parsing/ComparisonOperationsParserSuite.scala
rename to src/test/scala/inox/parser/ComparisonOperationsParserSuite.scala
index 86e6227e0..e1af7cf5d 100644
--- a/src/test/scala/inox/parsing/ComparisonOperationsParserSuite.scala
+++ b/src/test/scala/inox/parser/ComparisonOperationsParserSuite.scala
@@ -1,5 +1,5 @@
package inox
-package parsing
+package parser
import org.scalatest._
@@ -20,11 +20,11 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(Equals(Int32Literal(1), Int32Literal(2))) {
- e"1 == 2 : Int"
+ e"1 == 2 as Int"
}
assertResult(Equals(FractionLiteral(1, 1), FractionLiteral(2, 1))) {
- e"1 : Real == 2"
+ e"1 as Real == 2"
}
assertResult(Equals(FractionLiteral(3, 2), FractionLiteral(2, 1))) {
@@ -40,13 +40,13 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(Equals(BVLiteral(true, 1, 17), BVLiteral(true, 4, 17))) {
- e"1 : Int17 == 4 : Int17"
+ e"1 as Int17 == 4 as Int17"
}
}
test("Parsing less-or-equals.") {
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"true <= false"
}
@@ -55,18 +55,18 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(LessEquals(Int32Literal(1), Int32Literal(2))) {
- e"1 <= 2 : Int"
+ e"1 <= 2 as Int"
}
assertResult(LessEquals(FractionLiteral(1, 1), FractionLiteral(2, 1))) {
- e"1 : Real <= 2"
+ e"1 as Real <= 2"
}
assertResult(LessEquals(FractionLiteral(3, 2), FractionLiteral(2, 1))) {
e"1.5 <= 2.0"
}
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"'hello' <= 'world'"
}
@@ -75,13 +75,13 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(LessEquals(BVLiteral(true, 1, 17), BVLiteral(true, 4, 17))) {
- e"1 : Int17 <= 4 : Int17"
+ e"1 as Int17 <= 4 as Int17"
}
}
test("Parsing greater-or-equals.") {
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"true >= false"
}
@@ -90,18 +90,18 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(GreaterEquals(Int32Literal(1), Int32Literal(2))) {
- e"1 >= 2 : Int"
+ e"1 >= 2 as Int"
}
assertResult(GreaterEquals(FractionLiteral(1, 1), FractionLiteral(2, 1))) {
- e"1 : Real >= 2"
+ e"1 as Real >= 2"
}
assertResult(GreaterEquals(FractionLiteral(3, 2), FractionLiteral(2, 1))) {
e"1.5 >= 2.0"
}
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"'hello' >= 'world'"
}
@@ -110,13 +110,13 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(GreaterEquals(BVLiteral(true, 1, 17), BVLiteral(true, 4, 17))) {
- e"1 : Int17 >= 4 : Int17"
+ e"1 as Int17 >= 4 as Int17"
}
}
test("Parsing less-than.") {
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"true < false"
}
@@ -125,18 +125,18 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(LessThan(Int32Literal(1), Int32Literal(2))) {
- e"1 < 2 : Int"
+ e"1 < 2 as Int"
}
assertResult(LessThan(FractionLiteral(1, 1), FractionLiteral(2, 1))) {
- e"1 : Real < 2"
+ e"1 as Real < 2"
}
assertResult(LessThan(FractionLiteral(3, 2), FractionLiteral(2, 1))) {
e"1.5 < 2.0"
}
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"'hello' < 'world'"
}
@@ -145,13 +145,13 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(LessThan(BVLiteral(true, 1, 17), BVLiteral(true, 4, 17))) {
- e"1 : Int17 < 4 : Int17"
+ e"1 as Int17 < 4 as Int17"
}
}
test("Parsing greater-than.") {
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"true > false"
}
@@ -160,18 +160,18 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(GreaterThan(Int32Literal(1), Int32Literal(2))) {
- e"1 > 2 : Int"
+ e"1 > 2 as Int"
}
assertResult(GreaterThan(FractionLiteral(1, 1), FractionLiteral(2, 1))) {
- e"1 : Real > 2"
+ e"1 as Real > 2"
}
assertResult(GreaterThan(FractionLiteral(3, 2), FractionLiteral(2, 1))) {
e"1.5 > 2.0"
}
- assertThrows[ElaborationException] {
+ assertThrows[InterpolatorException] {
e"'hello' > 'world'"
}
@@ -180,7 +180,7 @@ class ComparisonOperationsParserSuite extends FunSuite {
}
assertResult(GreaterThan(BVLiteral(true, 1, 17), BVLiteral(true, 4, 17))) {
- e"1 : Int17 > 4 : Int17"
+ e"1 as Int17 > 4 as Int17"
}
}
}
\ No newline at end of file
diff --git a/src/test/scala/inox/parsing/ExprLiteralParserSuite.scala b/src/test/scala/inox/parser/ExprLiteralParserSuite.scala
similarity index 72%
rename from src/test/scala/inox/parsing/ExprLiteralParserSuite.scala
rename to src/test/scala/inox/parser/ExprLiteralParserSuite.scala
index 6aa1d7ba7..143f681a2 100644
--- a/src/test/scala/inox/parsing/ExprLiteralParserSuite.scala
+++ b/src/test/scala/inox/parser/ExprLiteralParserSuite.scala
@@ -1,5 +1,5 @@
package inox
-package parsing
+package parser
import scala.collection.BitSet
import org.scalatest._
@@ -70,7 +70,7 @@ class ExprLiteralParserSuite extends FunSuite {
val large = "123456789012345678901234567890"
- assert(BigInt(BigInt(large).toInt) != BigInt(large))
+ assert(BigInt(BigInt(large).toInt) != BigInt(large))
assertResult(IntegerLiteral(BigInt(large))) {
e"123456789012345678901234567890"
@@ -80,50 +80,93 @@ class ExprLiteralParserSuite extends FunSuite {
test("Parsing Int literals.") {
assertResult(Int32Literal(0)) {
- e"0 : Int"
+ e"0 as Int"
}
assertResult(Int32Literal(217)) {
- e"217 : Int"
+ e"217 as Int"
}
assertResult(Int32Literal(-12)) {
- e"-12 : Int"
+ e"-12 as Int"
}
}
test("Parsing BV literals.") {
assertResult(BVLiteral(true, 0, 8)) {
- e"0 : Int8"
+ e"0 as Int8"
}
assertResult(BVLiteral(true, 7, 64)) {
- e"7 : Int64"
+ e"7 as Int64"
}
assertResult(BVLiteral(true, -1, 4)) {
- e"-1 : Int4"
+ e"-1 as Int4"
}
assertResult(BVLiteral(true, BitSet(), 2)) {
- e"4 : Int2"
+ e"4 as Int2"
}
assertResult(BVLiteral(true, BitSet(1), 2)) {
- e"1 : Int2"
+ e"1 as Int2"
}
assertResult(BVLiteral(true, BitSet(2), 2)) {
- e"2 : Int2"
+ e"2 as Int2"
}
assertResult(BVLiteral(true, BitSet(1, 2), 2)) {
- e"3 : Int2"
+ e"3 as Int2"
}
assertResult(BVLiteral(true, BitSet(1, 2), 2)) {
- e"-1 : Int2"
+ e"-1 as Int2"
+ }
+ }
+
+ test("Parsing unsigned BV literals.") {
+
+ assertResult(BVLiteral(false, 0, 8)) {
+ e"0 as UInt8"
+ }
+
+ assertResult(BVLiteral(false, 2, 8)) {
+ e"258 as UInt8"
+ }
+
+ assertResult(BVLiteral(false, 3, 5)) {
+ e"1027 as UInt5"
+ }
+
+ assertResult(BVLiteral(false, 7, 64)) {
+ e"7 as UInt64"
+ }
+
+ assertResult(BVLiteral(false, 15, 4)) {
+ e"-1 as UInt4"
+ }
+
+ assertResult(BVLiteral(false, BitSet(), 2)) {
+ e"4 as UInt2"
+ }
+
+ assertResult(BVLiteral(false, BitSet(1), 2)) {
+ e"1 as UInt2"
+ }
+
+ assertResult(BVLiteral(false, BitSet(2), 2)) {
+ e"2 as UInt2"
+ }
+
+ assertResult(BVLiteral(false, BitSet(1, 2), 2)) {
+ e"3 as UInt2"
+ }
+
+ assertResult(BVLiteral(true, BitSet(1, 2), 2)) {
+ e"-1 as Int2"
}
}
@@ -138,7 +181,7 @@ class ExprLiteralParserSuite extends FunSuite {
}
assertResult(FractionLiteral(7, 1)) {
- e"7 : Real"
+ e"7 as Real"
}
assertResult(FractionLiteral(7, 2)) {
@@ -170,7 +213,7 @@ class ExprLiteralParserSuite extends FunSuite {
}
assertResult(FractionLiteral(-7, 1)) {
- e"-7 : Real"
+ e"-7 as Real"
}
assertResult(FractionLiteral(-7, 2)) {
diff --git a/src/test/scala/inox/parser/ExprParserSuite.scala b/src/test/scala/inox/parser/ExprParserSuite.scala
new file mode 100644
index 000000000..1649e4d46
--- /dev/null
+++ b/src/test/scala/inox/parser/ExprParserSuite.scala
@@ -0,0 +1,147 @@
+package inox
+package parser
+
+import org.scalatest._
+
+class ExprParserSuite extends FunSuite {
+
+ import inox.trees._
+ import interpolator._
+ implicit val symbols = NoSymbols
+
+ test("Parsing expressions with various parentheses.") {
+
+ assert(e"((1))" == IntegerLiteral(1))
+
+ assert(e"({ 2} + (3))" == Plus(IntegerLiteral(2), IntegerLiteral(3)))
+
+ assert(e"{ let x = 4; x + x }".isInstanceOf[Let])
+ }
+
+ test("Parsing lambda expressions.") {
+ val e1 = e"lambda (x: Int) => x"
+
+ assert(e1.isInstanceOf[Lambda])
+
+ val l1 = e1.asInstanceOf[Lambda]
+
+ assert(l1.params.size == 1)
+
+ assert(l1.params(0).id.name == "x")
+ assert(l1.params(0).tpe == Int32Type())
+ assert(l1.body == l1.params(0).toVariable)
+
+ val e2 = e"lambda (x) => x + 1"
+
+ assert(e2.isInstanceOf[Lambda])
+
+ val l2 = e2.asInstanceOf[Lambda]
+
+ assert(l2.params.size == 1)
+
+ assert(l2.params(0).id.name == "x")
+ assert(l2.params(0).tpe == IntegerType())
+ assert(l2.body == Plus(l2.params(0).toVariable, IntegerLiteral(1)))
+
+ assert(e"(foo: Integer) => foo * foo".isInstanceOf[Lambda])
+
+ val e3 = e"(foo: String, bar) => length(foo) + bar"
+
+ assert(e3.isInstanceOf[Lambda])
+
+ val l3 = e3.asInstanceOf[Lambda]
+
+ assert(l3.params.size == 2)
+
+ assert(l3.params(0).id.name == "foo")
+ assert(l3.params(1).id.name == "bar")
+ assert(l3.params(0).tpe == StringType())
+ assert(l3.params(1).tpe == IntegerType())
+ assert(l3.body == Plus(StringLength(l3.params(0).toVariable), l3.params(1).toVariable))
+ }
+
+ test("Parsing forall expressions.") {
+ val e1 = e"forall (x: Int) => x > 0"
+
+ assert(e1.isInstanceOf[Forall])
+
+ val l1 = e1.asInstanceOf[Forall]
+
+ assert(l1.params.size == 1)
+
+ assert(l1.params(0).id.name == "x")
+ assert(l1.params(0).tpe == Int32Type())
+ assert(l1.body == GreaterThan(l1.params(0).toVariable, Int32Literal(0)))
+
+ val e2 = e"forall (x) => x == 1"
+
+ assert(e2.isInstanceOf[Forall])
+
+ val l2 = e2.asInstanceOf[Forall]
+
+ assert(l2.params.size == 1)
+
+ assert(l2.params(0).id.name == "x")
+ assert(l2.params(0).tpe == IntegerType())
+ assert(l2.body == Equals(l2.params(0).toVariable, IntegerLiteral(1)))
+
+ val e3 = e"forall (foo: String, bar) => length(foo) == bar"
+
+ assert(e3.isInstanceOf[Forall])
+
+ val l3 = e3.asInstanceOf[Forall]
+
+ assert(l3.params.size == 2)
+
+ assert(l3.params(0).id.name == "foo")
+ assert(l3.params(1).id.name == "bar")
+ assert(l3.params(0).tpe == StringType())
+ assert(l3.params(1).tpe == IntegerType())
+ assert(l3.body == Equals(StringLength(l3.params(0).toVariable), l3.params(1).toVariable))
+ }
+
+ test("Parsing choose expressions.") {
+ val e1 = e"choose (x: Int) => x > 0"
+
+ assert(e1.isInstanceOf[Choose])
+
+ val l1 = e1.asInstanceOf[Choose]
+
+ assert(l1.res.id.name == "x")
+ assert(l1.res.tpe == Int32Type())
+ assert(l1.pred == GreaterThan(l1.res.toVariable, Int32Literal(0)))
+
+ val e2 = e"choose (x) => x == 1"
+
+ assert(e2.isInstanceOf[Choose])
+
+ val l2 = e2.asInstanceOf[Choose]
+
+ assert(l2.res.id.name == "x")
+ assert(l2.res.tpe == IntegerType())
+ assert(l2.pred == Equals(l2.res.toVariable, IntegerLiteral(1)))
+ }
+
+ test("Parsing if-expressions.") {
+ val e = e"if (3 > 4) 'Hello.' else 'Hi!'"
+
+ assert(e.isInstanceOf[IfExpr])
+
+ val i = e.asInstanceOf[IfExpr]
+
+ assert(i.cond == GreaterThan(IntegerLiteral(3), IntegerLiteral(4)))
+ assert(i.thenn == StringLiteral("Hello."))
+ assert(i.elze == StringLiteral("Hi!"))
+ }
+
+ test("Parsing assume.") {
+ val e = e"assume(3 > 4); 7"
+
+ assert(e.isInstanceOf[Assume])
+
+ val a = e.asInstanceOf[Assume]
+
+ assert(a.pred == GreaterThan(IntegerLiteral(3), IntegerLiteral(4)))
+ assert(a.body == IntegerLiteral(7))
+ }
+}
\ No newline at end of file
diff --git a/src/test/scala/inox/parsing/ExtractorSuite.scala b/src/test/scala/inox/parser/ExtractorSuite.scala
similarity index 79%
rename from src/test/scala/inox/parsing/ExtractorSuite.scala
rename to src/test/scala/inox/parser/ExtractorSuite.scala
index 0a9c3f8e1..b29527413 100644
--- a/src/test/scala/inox/parsing/ExtractorSuite.scala
+++ b/src/test/scala/inox/parser/ExtractorSuite.scala
@@ -1,5 +1,5 @@
package inox
-package parsing
+package parser
import org.scalatest._
@@ -13,8 +13,8 @@ class ExtractorSuite extends FunSuite {
val es = Seq(
e"true",
e"12 + 3",
- e"forall x. x + x > 12.0",
- e"choose x. x * x == 2.0")
+ e"forall (x) => x + x > 12.0",
+ e"choose (x) => x * x == 2.0")
for (e <- es) {
e match {
@@ -51,8 +51,8 @@ class ExtractorSuite extends FunSuite {
e"2 + 4 - 3 / 5 * 2" match {
case e"$x * $y" => fail("Did match.")
- case e"$x : Real - (3 / $y) * $z" => fail("Did match.")
- case e"$x - (3 : BigInt / $y) * $z" => {
+ case e"$x as Real - (3 / $y) * $z" => fail("Did match.")
+ case e"$x - (3 as Integer / $y) * $z" => {
assert(x == Plus(IntegerLiteral(2), IntegerLiteral(4)))
assert(y == IntegerLiteral(5))
assert(z == IntegerLiteral(2))
@@ -73,9 +73,8 @@ class ExtractorSuite extends FunSuite {
}
test("Matching dependent types.") {
- t"{ x: BigInt | x > 0 }" match {
- case t"{ y: $t | $e }" => fail("Did match.")
- case t"{ $t | $e }" =>
+ t"{ x: Integer | x > 0 }" match {
+ case t"{ $v: $t | $e }" =>
assert(t == IntegerType())
e match {
case GreaterThan(Variable(id, IntegerType(), _), IntegerLiteral(i)) =>
@@ -88,18 +87,25 @@ class ExtractorSuite extends FunSuite {
t"{ y: Unit | true }" match {
case t"{ $t | false }" => fail("Did match.")
- case t"{ y: $t | $p }" =>
+ case t"{ x: $t | $p }" =>
assert(t == UnitType())
assert(p == BooleanLiteral(true))
case _ => fail("Did not match.")
}
- t"(x: Int) => { y: Int | x < y }" match {
+ t"Pi (x: Int) => { y: Int | x < y }" match {
case t"Int => Int" => fail("Did match.")
case t"Int => $t" => fail("Did match.")
- case t"(x: $t) => $t2" =>
+ case t"Pi (x: $t) => $t2" =>
assert(t == Int32Type())
case _ => fail("Did not match.")
}
}
+
+ test("Matching primitive invocations.") {
+ e"concatenate('hello', 'world')" match {
+ case e"$x('hello', 'world')" => fail("Did match.")
+ case e"concatenate('hello', 'world')" => ()
+ }
+ }
}
diff --git a/src/test/scala/inox/parser/FunctionDefinitionsParserSuite.scala b/src/test/scala/inox/parser/FunctionDefinitionsParserSuite.scala
new file mode 100644
index 000000000..64fc40d18
--- /dev/null
+++ b/src/test/scala/inox/parser/FunctionDefinitionsParserSuite.scala
@@ -0,0 +1,134 @@
+package inox
+package parser
+
+import org.scalatest._
+
+class FunctionDefinitionsParserSuite extends FunSuite {
+
+ import inox.trees._
+ import interpolator._
+ implicit val symbols = NoSymbols
+
+ test("Parsing id.") {
+ val idFunDef = fd"def id[A](x: A): A = x"
+
+ assert(idFunDef.id.name == "id")
+ assert(idFunDef.tparams.size == 1)
+ assert(idFunDef.tparams(0).id.name == "A")
+ assert(idFunDef.params.size == 1)
+ assert(idFunDef.params(0).id.name == "x")
+ assert(idFunDef.params(0).tpe.asInstanceOf[TypeParameter].id == idFunDef.tparams(0).id)
+ assert(idFunDef.returnType.asInstanceOf[TypeParameter].id == idFunDef.tparams(0).id)
+ assert(idFunDef.fullBody.asInstanceOf[Variable].id == idFunDef.params(0).id)
+ }
+
+ test("Parsing fac.") {
+ val facFunDef = fd"def fac(n: Int) = if (n <= 0) 1 else n * fac(n - 1)"
+
+ assert(facFunDef.id.name == "fac")
+ assert(facFunDef.tparams.size == 0)
+ assert(facFunDef.params.size == 1)
+ assert(facFunDef.params(0).id.name == "n")
+ assert(facFunDef.params(0).tpe == Int32Type())
+ assert(facFunDef.returnType == Int32Type())
+
+ facFunDef.fullBody match {
+ case e"if (${ n1: Variable } <= 0) 1 else ${ n2: Variable } * $f(${ n3: Variable } - 1)" => {
+ assert(n1.id == facFunDef.params(0).id)
+ assert(n2.id == facFunDef.params(0).id)
+ assert(n3.id == facFunDef.params(0).id)
+ assert(f == facFunDef.id)
+ }
+ case _ => fail("Did not match.")
+ }
+ }
+
+ test("Parsing rep.") {
+ val repFunDef = fd"def rep[A](f: A => A, n: Int) = if (n == 0) lambda (x) => x else lambda (x) => f(rep(f, n - 1)(x))"
+
+ assert(repFunDef.id.name == "rep")
+ assert(repFunDef.tparams.size == 1)
+ assert(repFunDef.tparams(0).id.name == "A")
+ assert(repFunDef.params.size == 2)
+ assert(repFunDef.params(0).id.name == "f")
+ assert(repFunDef.params(0).tpe == FunctionType(Seq(repFunDef.tparams(0).tp), repFunDef.tparams(0).tp))
+ assert(repFunDef.params(1).id.name == "n")
+ assert(repFunDef.params(1).tpe == Int32Type())
+ assert(repFunDef.returnType == FunctionType(Seq(repFunDef.tparams(0).tp), repFunDef.tparams(0).tp))
+ }
+
+ test("Parsing function with dependant parameters.") {
+ val fooFunDef = fd"def foo(n: Int, m: { Int | n < m }) = n + m"
+
+ assert(fooFunDef.params(0).id.name == "n")
+
+ assert(fooFunDef.params(1).id.name == "m")
+
+ assert(fooFunDef.params(1).tpe.isInstanceOf[RefinementType])
+
+ val reTpe = fooFunDef.params(1).tpe.asInstanceOf[RefinementType]
+
+ assert(reTpe.vd.id.name == fooFunDef.params(1).id.name)
+ assert(reTpe.vd.id != fooFunDef.params(1).id)
+ assert(reTpe.prop == LessThan(fooFunDef.params(0).toVariable, reTpe.vd.toVariable))
+ }
+
+ test("Parsing function with dependant parameters and type parameters.") {
+ val barFunDef = fd"def bar[A, B](x: A, y: A, f: { A => B | f(x) == f(y) }): { r: Boolean | r ==> x != y } = x != y"
+
+ assert(barFunDef.tparams.size == 2)
+
+ assert(barFunDef.params.size == 3)
+
+ assert(barFunDef.fullBody == Not(Equals(barFunDef.params(0).toVariable, barFunDef.params(1).toVariable)))
+
+ assert(barFunDef.params(2).tpe.isInstanceOf[RefinementType])
+
+ val reTpe = barFunDef.params(2).tpe.asInstanceOf[RefinementType]
+
+ assert(reTpe.vd.tpe == FunctionType(Seq(barFunDef.tparams(0).tp), barFunDef.tparams(1).tp))
+
+ assert(reTpe.vd.id.name == barFunDef.params(2).id.name)
+ assert(reTpe.vd.id != barFunDef.params(2).id)
+ assert(reTpe.prop == Equals(
+ Application(reTpe.vd.toVariable, Seq(barFunDef.params(0).toVariable)),
+ Application(reTpe.vd.toVariable, Seq(barFunDef.params(1).toVariable))))
+
+
+ assert(barFunDef.returnType.isInstanceOf[RefinementType])
+ val retReTpe = barFunDef.returnType.asInstanceOf[RefinementType]
+
+ assert(retReTpe.vd.id.name == "r")
+ assert(retReTpe.vd.tpe == BooleanType())
+ assert(retReTpe.prop == Implies(retReTpe.vd.toVariable,
+ Not(Equals(barFunDef.params(0).toVariable, barFunDef.params(1).toVariable))))
+ }
+
+ test("Matching against function definitions.") {
+ val fooFunDef = fd"def foo[A, B, C](x: A, y: B, f: (A, B) => C): C = f(x, y)"
+
+ fooFunDef match {
+ case fd"def foo($xs...) = $e" => fail("Did match.")
+ case fd"def foo[$ts...]($xs...): $t = $e" => {
+ assert(ts.size == 3)
+ assert(xs.size == 3)
+ }
+ case _ => fail("Did not match.")
+ }
+
+ val barFunDef = fd"def bar(x: Integer) = x + x + x"
+
+ barFunDef match {
+ case fd"def foo[$ts...](x: Integer) = x + x + x" => assert(ts.isEmpty)
+ case _ => fail("Did not match.")
+ }
+
+ barFunDef match {
+ case fd"def foo(x: Integer) = x + x * x" => fail("Did match.")
+ case fd"def foo($x): Integer = x + ${y : Variable} + x" => {
+ assert(x.id == y.id)
+ }
+ case _ => fail("Did not match.")
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/scala/inox/parser/LetParserSuite.scala b/src/test/scala/inox/parser/LetParserSuite.scala
new file mode 100644
index 000000000..29ec6f38d
--- /dev/null
+++ b/src/test/scala/inox/parser/LetParserSuite.scala
@@ -0,0 +1,86 @@
+package inox
+package parser
+
+import org.scalatest._
+
+class LetParserSuite extends FunSuite {
+
+ import inox.trees._
+ import interpolator._
+ implicit val symbols = NoSymbols
+
+ test("Parsing let expression with explicitly typed binding.") {
+ val e = e"let x: String = 'Hello World'; length(x)"
+
+ assert(e.isInstanceOf[Let])
+
+ val l = e.asInstanceOf[Let]
+
+ assert(l.vd.id.name == "x")
+ assert(l.vd.tpe == StringType())
+ assert(l.value == StringLiteral("Hello World"))
+ assert(l.body == StringLength(l.vd.toVariable))
+ }
+
+ test("Parsing let expression with implicitly typed binding.") {
+ val e = e"let x = 'Hello World'; length(x)"
+
+ assert(e.isInstanceOf[Let])
+
+ val l = e.asInstanceOf[Let]
+
+ assert(l.vd.id.name == "x")
+ assert(l.vd.tpe == StringType())
+ assert(l.value == StringLiteral("Hello World"))
+ assert(l.body == StringLength(l.vd.toVariable))
+ }
+
+ test("Multiple lets.") {
+ val e = e"""
+ let x = let z = 3; z + z;
+ let y = x;
+
+ x + y
+ """
+
+ e match {
+ case Let(x, Let(z, IntegerLiteral(_), Plus(z1, z2)), Let(y, x1, Plus(x2, y1))) => {
+ assert(x.id.name == "x")
+ assert(y.id.name == "y")
+ assert(z.id.name == "z")
+ assert(x.toVariable == x1)
+ assert(x.toVariable == x2)
+ assert(y.toVariable == y1)
+ assert(z.toVariable == z1)
+ assert(z.toVariable == z2)
+ }
+ case _ => fail("Did not match.")
+ }
+ }
+
+ test("Multiple lets, with block.") {
+ val e = e"""
+ let x = {
+ let z = 3;
+ z + z
+ };
+ let y = x;
+
+ x + y
+ """
+
+ e match {
+ case Let(x, Let(z, IntegerLiteral(_), Plus(z1, z2)), Let(y, x1, Plus(x2, y1))) => {
+ assert(x.id.name == "x")
+ assert(y.id.name == "y")
+ assert(z.id.name == "z")
+ assert(x.toVariable == x1)
+ assert(x.toVariable == x2)
+ assert(y.toVariable == y1)
+ assert(z.toVariable == z1)
+ assert(z.toVariable == z2)
+ }
+ case _ => fail("Did not match.")
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/scala/inox/parsing/OperationsPrecedenceParserSuite.scala b/src/test/scala/inox/parser/OperationsPrecedenceParserSuite.scala
similarity index 95%
rename from src/test/scala/inox/parsing/OperationsPrecedenceParserSuite.scala
rename to src/test/scala/inox/parser/OperationsPrecedenceParserSuite.scala
index 75d8862f8..8561e72aa 100644
--- a/src/test/scala/inox/parsing/OperationsPrecedenceParserSuite.scala
+++ b/src/test/scala/inox/parser/OperationsPrecedenceParserSuite.scala
@@ -1,5 +1,5 @@
package inox
-package parsing
+package parser
import org.scalatest._
@@ -16,7 +16,7 @@ class OperationsPrecedenceParserSuite extends FunSuite {
Or(
Equals(
Plus(
- IntegerLiteral(1),
+ IntegerLiteral(1),
Times(
IntegerLiteral(2),
IntegerLiteral(3))),
diff --git a/src/test/scala/inox/parser/PrimitiveFunctionsParserSuite.scala b/src/test/scala/inox/parser/PrimitiveFunctionsParserSuite.scala
new file mode 100644
index 000000000..617a364ab
--- /dev/null
+++ b/src/test/scala/inox/parser/PrimitiveFunctionsParserSuite.scala
@@ -0,0 +1,30 @@
+package inox
+package parser
+
+import org.scalatest._
+
+class PrimitiveFunctionsParserSuite extends FunSuite {
+
+ import inox.trees._
+ import interpolator._
+ implicit val symbols = NoSymbols
+
+ test("Parsing casts.") {
+
+ assertResult(BVWideningCast(Int32Literal(1), BVType(true, 64))) {
+ e"widen64(1 as Int32)"
+ }
+
+ assertResult(BVNarrowingCast(Int32Literal(2), BVType(true, 16))) {
+ e"narrow16(2 as Int32)"
+ }
+
+ assertThrows[InterpolatorException] {
+ e"widen32(3 as Int64)"
+ }
+
+ assertThrows[InterpolatorException] {
+ e"narrow32(4 as Int16)"
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/scala/inox/parser/ProgramsParserSuite.scala b/src/test/scala/inox/parser/ProgramsParserSuite.scala
new file mode 100644
index 000000000..1aa5036dd
--- /dev/null
+++ b/src/test/scala/inox/parser/ProgramsParserSuite.scala
@@ -0,0 +1,74 @@
+package inox
+package parser
+
+import org.scalatest._
+
+class ProgramsParserSuite extends FunSuite {
+
+ import inox.trees._
+ import interpolator._
+ implicit val symbols = NoSymbols
+
+ test("Parsing program with one ADT, one function.") {
+ val Seq(listSort, sizeFunDef) = p"""
+ type List[A] = Cons(head: A, tail: List[A]) | Nil()
+
+ def size[A](xs: List[A]): Integer =
+ if (xs is Cons) 1 + size(xs.tail) else 0
+ """
+
+ assert(listSort.id.name == "List")
+ assert(sizeFunDef.id.name == "size")
+ }
+
+ test("Parsing program with zero ADT, three mutually recursive functions.") {
+ val Seq(fooFunDef, barFunDef, bazFunDef) = p"""
+
+ def foo(x: Int, y: Int) = bar(x) + baz(y)
+
+ def bar(x: Int) = if (x > 0) foo(x - 1, x - 1) * bar(x - 1) else bar(1)
+
+ def baz(y: Int): Int = 4 + bar(y) * foo(1, 1)
+ """
+
+ assert(fooFunDef.id.name == "foo")
+ assert(barFunDef.id.name == "bar")
+ assert(bazFunDef.id.name == "baz")
+ }
+
+ test("Parsing program with three mutually dependent ADTs.") {
+ val Seq(fooSort, barSort, bazSort) = p"""
+ type Foo[A, B] = FooBar(getBar: Bar[A]) | FooBaz(getBaz: Baz[B])
+
+ type Bar[A] = BarFoo(getFoo: Foo[A, A]) | BarBar(getBar: Bar[A])
+
+ type Baz[X] = BazBarFoo(getBar: Bar[X], getFoo: Foo[Integer, Integer])
+ """
+
+ assert(fooSort.id.name == "Foo")
+ assert(barSort.id.name == "Bar")
+ assert(bazSort.id.name == "Baz")
+ }
+
+ test("Order of definitions is preserved.") {
+ val Seq(fooSort, barFunDef, bazSort, fooBazFunDef) = p"""
+ type Foo[A] = Foo(getFoo: A)
+
+ def bar[A](x: Foo[A]): A = x.getFoo
+
+ type Baz[A] = Baz(getBaz: Foo[A])
+
+ def fooBaz[A, B](foo: Foo[A], baz: Baz[B]) = 4
+ """
+
+ assert(fooSort.id.name == "Foo")
+ assert(barFunDef.id.name == "bar")
+ assert(bazSort.id.name == "Baz")
+ assert(fooBazFunDef.id.name == "fooBaz")
+
+ assert(fooSort.isInstanceOf[ADTSort])
+ assert(barFunDef.isInstanceOf[FunDef])
+ assert(bazSort.isInstanceOf[ADTSort])
+ assert(fooBazFunDef.isInstanceOf[FunDef])
+ }
+}
\ No newline at end of file
diff --git a/src/test/scala/inox/parsing/QuantifierParserSuite.scala b/src/test/scala/inox/parser/QuantifierParserSuite.scala
similarity index 62%
rename from src/test/scala/inox/parsing/QuantifierParserSuite.scala
rename to src/test/scala/inox/parser/QuantifierParserSuite.scala
index 8c0158616..3a2b69c60 100644
--- a/src/test/scala/inox/parsing/QuantifierParserSuite.scala
+++ b/src/test/scala/inox/parser/QuantifierParserSuite.scala
@@ -1,5 +1,5 @@
package inox
-package parsing
+package parser
import org.scalatest._
@@ -11,7 +11,7 @@ class QuantifierParserSuite extends FunSuite {
test("Parsing forall.") {
- e"forall x. x > 2" match {
+ e"forall (x) => x > 2" match {
case Forall(Seq(ValDef(id, IntegerType(), _)), expr) =>
assertResult(GreaterThan(Variable(id, IntegerType(), Seq()), IntegerLiteral(2))) {
expr
@@ -19,7 +19,7 @@ class QuantifierParserSuite extends FunSuite {
case e => fail("Unexpected shape: " + e)
}
- e"forall x: BigInt. false ==> true" match {
+ e"forall (x: Integer) => false ==> true" match {
case Forall(Seq(ValDef(id, IntegerType(), _)), expr) =>
assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
expr
@@ -27,7 +27,7 @@ class QuantifierParserSuite extends FunSuite {
case e => fail("Unexpected shape: " + e)
}
- e"true && forall x: BigInt. false ==> true" match {
+ e"true && forall (x: Integer) => false ==> true" match {
case And(Seq(BooleanLiteral(true), Forall(Seq(ValDef(id, IntegerType(), _)), expr))) =>
assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
expr
@@ -35,7 +35,7 @@ class QuantifierParserSuite extends FunSuite {
case e => fail("Unexpected shape: " + e)
}
- e"forall f, x: Int, y, z. f(f(x, y), z) == f(x, f(y, z))" match {
+ e"forall (f, x: Int, y, z) => f(f(x, y), z) == f(x, f(y, z))" match {
case Forall(Seq(ValDef(idF, FunctionType(Seq(Int32Type(), Int32Type()), Int32Type()), _),
ValDef(idX, Int32Type(), _),
ValDef(idY, Int32Type(), _),
@@ -53,80 +53,52 @@ class QuantifierParserSuite extends FunSuite {
}
}
- test("Parsing exists.") {
+ test("Parsing choose.") {
- e"exists x. x > 2" match {
- case Not(Forall(Seq(ValDef(id, IntegerType(), _)), Not(expr))) =>
+ e"choose (x) => x > 2" match {
+ case Choose(ValDef(id, IntegerType(), _), expr) =>
assertResult(GreaterThan(Variable(id, IntegerType(), Seq()), IntegerLiteral(2))) {
expr
}
case e => fail("Unexpected shape: " + e)
}
- e"exists x: BigInt. false ==> true" match {
- case Not(Forall(Seq(ValDef(id, IntegerType(), _)), Not(expr))) =>
+ e"choose (x: Integer) => false ==> true" match {
+ case Choose(ValDef(id, IntegerType(), _), expr) =>
assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
expr
}
case e => fail("Unexpected shape: " + e)
}
- e"true && exists x: BigInt. false ==> true" match {
- case And(Seq(BooleanLiteral(true), Not(Forall(Seq(ValDef(id, IntegerType(), _)), Not(expr))))) =>
+ e"4 + choose (x: Integer) => false ==> true" match {
+ case Plus(IntegerLiteral(_), Choose(ValDef(id, IntegerType(), _), expr)) =>
assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
expr
}
case e => fail("Unexpected shape: " + e)
}
-
- e"exists f, x: Int, y, z. f(f(x, y), z) == f(x, f(y, z))" match {
- case Not(Forall(Seq(ValDef(idF, FunctionType(Seq(Int32Type(), Int32Type()), Int32Type()), _),
- ValDef(idX, Int32Type(), _),
- ValDef(idY, Int32Type(), _),
- ValDef(idZ, Int32Type(), _)), Not(expr))) => {
- val f = Variable(idF, FunctionType(Seq(Int32Type(), Int32Type()), Int32Type()), Seq())
- val x = Variable(idX, Int32Type(), Seq())
- val y = Variable(idY, Int32Type(), Seq())
- val z = Variable(idZ, Int32Type(), Seq())
-
- assertResult(Equals(Application(f, Seq(Application(f, Seq(x, y)), z)),
- Application(f, Seq(x, Application(f, Seq(y, z)))))) {
- expr
- }
- }
- }
}
- test("Parsing choose.") {
-
- e"choose x. x > 2" match {
- case Choose(ValDef(id, IntegerType(), _), expr) =>
- assertResult(GreaterThan(Variable(id, IntegerType(), Seq()), IntegerLiteral(2))) {
- expr
- }
- case e => fail("Unexpected shape: " + e)
- }
+ test("Parsing lambda.") {
- e"choose x: BigInt. false ==> true" match {
- case Choose(ValDef(id, IntegerType(), _), expr) =>
- assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
+ e"lambda x => x * 2" match {
+ case Lambda(Seq(ValDef(id, IntegerType(), _)), expr) =>
+ assertResult(Times(Variable(id, IntegerType(), Seq()), IntegerLiteral(2))) {
expr
}
case e => fail("Unexpected shape: " + e)
}
- e"4 + choose x: BigInt. false ==> true" match {
- case Plus(IntegerLiteral(_), Choose(ValDef(id, IntegerType(), _), expr)) =>
- assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
+ e"x => x * x" match {
+ case Lambda(Seq(ValDef(id, IntegerType(), _)), expr) =>
+ assertResult(Times(Variable(id, IntegerType(), Seq()), Variable(id, IntegerType(), Seq()))) {
expr
}
case e => fail("Unexpected shape: " + e)
}
- }
-
- test("Parsing lambda.") {
- e"lambda x. x > 2" match {
+ e"lambda (x) => x > 2" match {
case Lambda(Seq(ValDef(id, IntegerType(), _)), expr) =>
assertResult(GreaterThan(Variable(id, IntegerType(), Seq()), IntegerLiteral(2))) {
expr
@@ -134,7 +106,7 @@ class QuantifierParserSuite extends FunSuite {
case e => fail("Unexpected shape: " + e)
}
- e"lambda x: BigInt. false ==> true" match {
+ e"lambda (x: Integer) => false ==> true" match {
case Lambda(Seq(ValDef(id, IntegerType(), _)), expr) =>
assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
expr
@@ -142,7 +114,7 @@ class QuantifierParserSuite extends FunSuite {
case e => fail("Unexpected shape: " + e)
}
- e"(lambda x: BigInt. false ==> true)(17)" match {
+ e"((x: Integer) => false ==> true)(17)" match {
case Application(Lambda(Seq(ValDef(id, IntegerType(), _)), expr), Seq(IntegerLiteral(_))) =>
assertResult(Implies(BooleanLiteral(false), BooleanLiteral(true))) {
expr
@@ -150,7 +122,7 @@ class QuantifierParserSuite extends FunSuite {
case e => fail("Unexpected shape: " + e)
}
- e"(lambda x, y, z: BigInt. x * y + z)(1, 2, 3)" match {
+ e"(lambda (x, y, z: Integer) => x * y + z)(1, 2, 3)" match {
case Application(Lambda(Seq(ValDef(idX, IntegerType(), _), ValDef(idY, IntegerType(), _), ValDef(idZ, IntegerType(), _)), expr),
Seq(vX, vY, vZ)) => {
val x = Variable(idX, IntegerType(), Seq())
@@ -167,13 +139,5 @@ class QuantifierParserSuite extends FunSuite {
}
case e => fail("Unexpected shape: " + e)
}
-
- e"x => x + 1" match {
- case Lambda(Seq(vd @ ValDef(idX, IntegerType(), _)), Plus(v, IntegerLiteral(i))) =>
- assert(vd.toVariable == v)
- assert(i == 1)
-
- case e => fail("Unexpected shape: " + e)
- }
}
}
diff --git a/src/test/scala/inox/parser/TypeDefinitionsParserSuite.scala b/src/test/scala/inox/parser/TypeDefinitionsParserSuite.scala
new file mode 100644
index 000000000..103f311e4
--- /dev/null
+++ b/src/test/scala/inox/parser/TypeDefinitionsParserSuite.scala
@@ -0,0 +1,210 @@
+package inox
+package parser
+
+import org.scalatest._
+
+class TypeDefinitionsParserSuite extends FunSuite {
+
+ import inox.trees._
+ import interpolator._
+ implicit val symbols = NoSymbols
+
+ test("Parsing Nat.") {
+ val natSort: ADTSort = td"type Nat = Succ(n: Nat) | Zero()"
+
+ assert(natSort.id.name == "Nat")
+
+ assert(natSort.tparams.isEmpty)
+
+ assert(natSort.constructors.size == 2)
+
+ val succCons: ADTConstructor = natSort.constructors(0)
+ val zeroCons: ADTConstructor = natSort.constructors(1)
+
+ assert(succCons.id.name == "Succ")
+ assert(zeroCons.id.name == "Zero")
+
+ assert(succCons.sort == natSort.id)
+ assert(zeroCons.sort == natSort.id)
+
+ assert(succCons.fields.size == 1)
+ assert(zeroCons.fields.isEmpty)
+
+ val nField: ValDef = succCons.fields(0)
+
+ assert(nField.id.name == "n")
+ assert(nField.tpe == ADTType(natSort.id, Seq()))
+ }
+
+ test("Parsing List.") {
+ val listSort: ADTSort = td"type List[A] = Cons(head: A, tail: List[A]) | Nil()"
+
+ assert(listSort.id.name == "List")
+
+ assert(listSort.tparams.size == 1)
+
+ val aTypeParamDef = listSort.tparams(0)
+
+ assert(aTypeParamDef.id.name == "A")
+
+ assert(listSort.constructors.size == 2)
+
+ val consCons = listSort.constructors(0)
+ val nilCons = listSort.constructors(1)
+
+ assert(consCons.id.name == "Cons")
+ assert(nilCons.id.name == "Nil")
+
+ assert(consCons.sort == listSort.id)
+ assert(nilCons.sort == listSort.id)
+
+ assert(consCons.fields.size == 2)
+ assert(nilCons.fields.isEmpty)
+
+ val headField = consCons.fields(0)
+ val tailField = consCons.fields(1)
+
+ assert(headField.id.name == "head")
+ assert(tailField.id.name == "tail")
+
+ assert(headField.tpe == aTypeParamDef.tp)
+ assert(tailField.tpe == ADTType(listSort.id, Seq(aTypeParamDef.tp)))
+ }
+
+ test("Parsing Either.") {
+ val eitherSort: ADTSort = td"type Either[A, B] = Left(getLeft: A) | Right(getRight: B)"
+
+ assert(eitherSort.id.name == "Either")
+
+ assert(eitherSort.tparams.size == 2)
+
+ val aTypeParamDef = eitherSort.tparams(0)
+ val bTypeParamDef = eitherSort.tparams(1)
+
+ assert(aTypeParamDef.id.name == "A")
+ assert(bTypeParamDef.id.name == "B")
+
+ assert(eitherSort.constructors.size == 2)
+
+ val leftCons = eitherSort.constructors(0)
+ val rightCons = eitherSort.constructors(1)
+
+ assert(leftCons.id.name == "Left")
+ assert(rightCons.id.name == "Right")
+
+ assert(leftCons.sort == eitherSort.id)
+ assert(rightCons.sort == eitherSort.id)
+
+ assert(leftCons.fields.size == 1)
+ assert(rightCons.fields.size == 1)
+
+ val getLeftField = leftCons.fields(0)
+ val getRightField = rightCons.fields(0)
+
+ assert(getLeftField.id.name == "getLeft")
+ assert(getRightField.id.name == "getRight")
+
+ assert(getLeftField.tpe == aTypeParamDef.tp)
+ assert(getRightField.tpe == bTypeParamDef.tp)
+ }
+
+ test("Elaborating with holes.") {
+ val idSort = FreshIdentifier("IDSort")
+ val idCons = FreshIdentifier("IDCons")
+ val idField = FreshIdentifier("idField")
+ val typeField = t"Integer"
+
+ val sort = td"type $idSort = $idCons($idField: $typeField)"
+
+ assert(sort.id == idSort)
+ assert(sort.constructors.size == 1)
+ assert(sort.constructors(0).id == idCons)
+ assert(sort.constructors(0).fields.size == 1)
+ assert(sort.constructors(0).fields(0).id == idField)
+ assert(sort.constructors(0).fields(0).tpe == typeField)
+ }
+
+ test("Matching against type definitions.") {
+ val lensSort = td"type Lens[S, A] = Lens(view: S => A, update: (S, A) => S)"
+
+ lensSort match {
+ case td"type $x = $cs..." => fail("Did match")
+ case td"type $x[$s] = $cs..." => fail("Did match")
+ case td"type $x[$s, $a] = $c1 | $c2" => fail("Did match.")
+ case td"type $x[$ts...] = $cs..." => {
+ assert(x.name == "Lens")
+ assert(ts.size == 2)
+ assert(ts(0).name == "S")
+ assert(ts(1).name == "A")
+ assert(cs.size == 1)
+ assert(cs(0).id.name == "Lens")
+ }
+ case _ => fail("No match.")
+ }
+
+ lensSort match {
+ case td"type $x[$s, $a] = $c" => {
+ assert(x.name == "Lens")
+ assert(c.id.name == "Lens")
+ }
+ case _ => fail("No match.")
+ }
+
+ val weirdSort = td"type Weird[A, B, C] = AB(a: A, b: B) | B() | BC(b2: B, c: C)"
+
+ weirdSort match {
+ case td"type $x[$ts...] = $cs..." => {
+ assert(x.name == "Weird")
+ assert(cs.size == 3)
+ }
+ case _ => fail("No match.")
+ }
+
+ weirdSort match {
+ case td"type $x[$ts...] = $cs..." => {
+ assert(x.name == "Weird")
+ assert(cs.size == 3)
+ assert(ts.size == 3)
+ }
+ case _ => fail("No match.")
+ }
+
+ weirdSort match {
+ case td"type T[$ts...] = $cs..." => {
+ assert(cs.size == 3)
+ assert(ts.size == 3)
+ }
+ case _ => fail("No match.")
+ }
+
+ weirdSort match {
+ case td"type $x[$ts...] = $c1" => {
+ println(c1)
+ fail("Should not match.")
+ }
+ case td"type $x[$ts...] = $c1 | $c2" => fail("Should not match.")
+ case td"type $x[$ts...] = $c1 | $c2 | $c3 | $c4" => fail("Should not match.")
+ case td"type $x[$ts...] = $c1 | $c2 | $c3" => {
+ assert(x.name == "Weird")
+ assert(c1.id.name == "AB")
+ assert(c2.id.name == "B")
+ assert(c3.id.name == "BC")
+ }
+ case _ => fail("No match.")
+ }
+
+ val optionSort = td"type Option[A] = Some(get: A) | None()"
+
+ optionSort match {
+ case td"type Option[A] = None() | Some(get: A)" => fail("Did match.")
+ case td"type Opt[X] = Indeed(value: X) | Not()" => ()
+ case _ => fail("No match.")
+ }
+
+ optionSort match {
+ case td"type Opt[X] = Indeed(value: $t) | Not()" =>
+ assert(t.asInstanceOf[TypeParameter].id.name == "A")
+ case _ => fail("No match.")
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/test/scala/inox/parser/TypeParserSuite.scala b/src/test/scala/inox/parser/TypeParserSuite.scala
new file mode 100644
index 000000000..208533568
--- /dev/null
+++ b/src/test/scala/inox/parser/TypeParserSuite.scala
@@ -0,0 +1,297 @@
+package inox
+package parser
+
+import org.scalatest._
+
+class TypeParserSuite extends FunSuite {
+
+ import inox.trees._
+ import interpolator._
+ implicit val symbols = NoSymbols
+
+ test("Parsing basic types") {
+
+ assertResult(IntegerType()) {
+ t"Integer"
+ }
+
+ assertResult(BooleanType()) {
+ t"Boolean"
+ }
+
+ assertResult(UnitType()) {
+ t"Unit"
+ }
+
+ assertResult(CharType()) {
+ t"Char"
+ }
+
+ assertResult(StringType()) {
+ t"String"
+ }
+
+ assertResult(Int32Type()) {
+ t"Int"
+ }
+
+ assertResult(RealType()) {
+ t"Real"
+ }
+ }
+
+ test("Parsing with parentheses") {
+
+ assertResult(IntegerType()) {
+ t"(Integer)"
+ }
+
+ assertResult(BooleanType()) {
+ t"((Boolean))"
+ }
+
+ assertResult(UnitType()) {
+ t"(((Unit)))"
+ }
+ }
+
+ test("Parsing BitVector types") {
+
+ assertResult(BVType(true, 32)) {
+ t"Int32"
+ }
+
+ assertResult(BVType(true, 64)) {
+ t"Int64"
+ }
+
+ assertResult(BVType(true, 17)) {
+ t"Int17"
+ }
+
+ assertResult(BVType(true, 1273)) {
+ t"Int1273"
+ }
+
+ assertResult(BVType(true, 1)) {
+ t"Int1"
+ }
+ }
+
+ test("Parsing unsigned BitVector types") {
+
+ assertResult(BVType(false, 32)) {
+ t"UInt32"
+ }
+
+ assertResult(BVType(false, 64)) {
+ t"UInt64"
+ }
+
+ assertResult(BVType(false, 17)) {
+ t"UInt17"
+ }
+
+ assertResult(BVType(false, 1273)) {
+ t"UInt1273"
+ }
+
+ assertResult(BVType(false, 1)) {
+ t"UInt1"
+ }
+ }
+
+ test("Parsing Set types") {
+
+ assertResult(SetType(IntegerType())) {
+ t"Set[Integer]"
+ }
+
+ assertResult(SetType(BooleanType())) {
+ t"Set[Boolean]"
+ }
+ }
+
+ test("Parsing Bag types") {
+
+ assertResult(BagType(IntegerType())) {
+ t"Bag[Integer]"
+ }
+
+ assertResult(BagType(BooleanType())) {
+ t"Bag[Boolean]"
+ }
+ }
+
+ test("Parsing Map types") {
+
+ assertResult(MapType(StringType(), IntegerType())) {
+ t"Map[String, Integer]"
+ }
+
+ assertResult(MapType(UnitType(), BooleanType())) {
+ t"Map[Unit, Boolean]"
+ }
+ }
+
+ test("Parsing Tuple types") {
+
+ assertResult(TupleType(Seq(StringType(), IntegerType(), CharType()))) {
+ t"(String, Integer, Char)"
+ }
+
+ assertResult(TupleType(Seq(UnitType(), BooleanType()))) {
+ t"(Unit, Boolean)"
+ }
+ }
+
+ test("Parsing Function types") {
+
+ assertResult(FunctionType(Seq(IntegerType()), StringType())) {
+ t"Integer => String"
+ }
+
+ assertResult(FunctionType(Seq(), StringType())) {
+ t"() => String"
+ }
+
+ assertResult(FunctionType(Seq(IntegerType()), StringType())) {
+ t"(Integer) => String"
+ }
+
+ assertResult(FunctionType(Seq(StringType(), IntegerType(), CharType()), BooleanType())) {
+ t"(String, Integer, Char) => Boolean"
+ }
+
+ assertResult(FunctionType(Seq(TupleType(Seq(StringType(), IntegerType(), CharType()))), BooleanType())) {
+ t"((String, Integer, Char)) => Boolean"
+ }
+
+ assertResult(FunctionType(Seq(IntegerType()), FunctionType(Seq(UnitType()), BooleanType()))) {
+ t"Integer => Unit => Boolean"
+ }
+ }
+
+ test("Parsing refinement types") {
+ val t = t"{ w: String | length(w) >= 10 }"
+
+ assert(t.isInstanceOf[RefinementType])
+
+ val r = t.asInstanceOf[RefinementType]
+
+ assert(r.vd.tpe == StringType())
+ assert(r.vd.id.name == "w")
+
+ assert(r.prop == GreaterEquals(StringLength(r.vd.toVariable), IntegerLiteral(10)))
+ }
+
+ test("Parsing Pi types") {
+
+ val t = t"Pi (x: Int) => { y: Int | x > y }"
+
+ assert(t.isInstanceOf[PiType])
+
+ val p = t.asInstanceOf[PiType]
+
+ assert(p.params.size == 1)
+ assert(p.params(0).tpe == Int32Type())
+ assert(p.params(0).id.name == "x")
+ assert(p.to.isInstanceOf[RefinementType])
+
+ val r = p.to.asInstanceOf[RefinementType]
+
+ assert(r.vd.tpe == Int32Type())
+ assert(r.vd.id.name == "y")
+
+ assert(r.prop == GreaterThan(p.params(0).toVariable, r.vd.toVariable))
+ }
+
+ test("Parsing Pi types with multiple params") {
+
+ val t = t"Pi (x: Integer, y: Integer, s: String) => { z: Integer | z + length(s) < x + y }"
+
+ assert(t.isInstanceOf[PiType])
+
+ val p = t.asInstanceOf[PiType]
+
+ assert(p.params.size == 3)
+ assert(p.params(0).tpe == IntegerType())
+ assert(p.params(1).tpe == IntegerType())
+ assert(p.params(2).tpe == StringType())
+ assert(p.params(0).id.name == "x")
+ assert(p.params(1).id.name == "y")
+ assert(p.params(2).id.name == "s")
+ }
+
+ test("Parsing Sigma types") {
+
+ val t = t"Sigma (x: Int) => { y: Int | x > y }"
+
+ assert(t.isInstanceOf[SigmaType])
+
+ val s = t.asInstanceOf[SigmaType]
+
+ assert(s.params.size == 1)
+ assert(s.params(0).tpe == Int32Type())
+ assert(s.params(0).id.name == "x")
+ assert(s.to.isInstanceOf[RefinementType])
+
+ val r = s.to.asInstanceOf[RefinementType]
+
+ assert(r.vd.tpe == Int32Type())
+ assert(r.vd.id.name == "y")
+
+ assert(r.prop == GreaterThan(s.params(0).toVariable, r.vd.toVariable))
+ }
+
+ test("Parsing Sigma types with multiple params") {
+
+ val t = t"Sigma (x: Integer, y: Integer, s: String) => { z: Integer | z + length(s) < x + y }"
+
+ assert(t.isInstanceOf[SigmaType])
+
+ val p = t.asInstanceOf[SigmaType]
+
+ assert(p.params.size == 3)
+ assert(p.params(0).tpe == IntegerType())
+ assert(p.params(1).tpe == IntegerType())
+ assert(p.params(2).tpe == StringType())
+ assert(p.params(0).id.name == "x")
+ assert(p.params(1).id.name == "y")
+ assert(p.params(2).id.name == "s")
+ }
+
+ test("Parsing complex type") {
+
+ val t = t"Sigma (x: Int, y: Int) => (Pi (x: Int, z: Int) => String) => { z: Int | x + y == z }"
+
+ t match {
+ case SigmaType(p1s, FunctionType(Seq(PiType(p2s, StringType())), RefinementType(vd, Equals(Plus(x, y), z)))) => {
+ assert(p1s.size == 2)
+ assert(p1s(0).tpe == Int32Type())
+ assert(p1s(1).tpe == Int32Type())
+ assert(p1s(0).id.name == "x")
+ assert(p1s(1).id.name == "y")
+
+ assert(p2s.size == 2)
+ assert(p2s(0).tpe == Int32Type())
+ assert(p2s(1).tpe == Int32Type())
+ assert(p2s(0).id.name == "x")
+ assert(p2s(1).id.name == "z")
+
+ assert(p1s(0).id != p2s(0).id)
+
+ assert(vd.tpe == Int32Type())
+ assert(vd.id.name == "z")
+
+ assert(vd.id != p2s(1).id)
+
+ assert(x == p1s(0).toVariable)
+ assert(y == p1s(1).toVariable)
+ assert(z == vd.toVariable)
+ }
+ case _ => fail("No match.")
+ }
+
+ }
+}
diff --git a/src/test/scala/inox/parser/elaboration/OneOfConstraintTestSuite.scala b/src/test/scala/inox/parser/elaboration/OneOfConstraintTestSuite.scala
new file mode 100644
index 000000000..744183bf3
--- /dev/null
+++ b/src/test/scala/inox/parser/elaboration/OneOfConstraintTestSuite.scala
@@ -0,0 +1,203 @@
+package inox.parser.elaboration
+
+import inox.ast.Trees
+import inox.parser.Elaborators
+import org.scalatest._
+
+class OneOfConstraintTestSuite extends FunSuite with Elaborators
+{
+
+ override protected val trees: Trees = inox.trees
+
+ test("Simple one of") {
+ val first = SimpleTypes.Unknown.fresh
+ val second = SimpleTypes.Unknown.fresh
+
+ val constraints = Seq(Constraint.equal(second, SimpleTypes.IntegerType()),
+ Constraint.oneOf(first, second, Seq(SimpleTypes.IntegerType(), SimpleTypes.BooleanType(), SimpleTypes.RealType())),
+ Constraint.exist(first),
+ Constraint.exist(second))
+
+ val result = solve(constraints)
+
+ result match {
+ case Right(unifier) =>
+ assert(unifier.get(first) == SimpleTypes.IntegerType(), "Simple OneOf constraint solved")
+ case Left(errorMessage) => fail(errorMessage)
+ }
+ }
+
+
+ test("Simple one of, not unifiable test") {
+ val first = SimpleTypes.Unknown.fresh
+ val second = SimpleTypes.Unknown.fresh
+
+ val constraints = Seq(Constraint.equal(second, SimpleTypes.IntegerType()),
+ Constraint.oneOf(first, second, Seq(SimpleTypes.BooleanType(), SimpleTypes.RealType())),
+ Constraint.exist(first),
+ Constraint.exist(second))
+
+ val result = solve(constraints)
+
+ result match {
+ case Right(unifier) =>
+ fail("Should not be able to unify")
+ case Left(errorMessage) =>
+ succeed
+ }
+ }
+
+ test("One of depends on the result of another") {
+ val first = SimpleTypes.Unknown.fresh
+ val second = SimpleTypes.Unknown.fresh
+ val third = SimpleTypes.Unknown.fresh
+
+ val constraints = Seq(
+ Constraint.oneOf(second, first, Seq(
+ SimpleTypes.FunctionType(Seq(SimpleTypes.BitVectorType(true, 32)), first),
+ SimpleTypes.RealType(),
+ third
+ )), Constraint.equal(first, SimpleTypes.BitVectorType(true, 32)),
+ Constraint.exist(first), Constraint.exist(second), Constraint.exist(third))
+
+ val result = solve(constraints)
+
+ result match {
+ case Right(unifier) =>
+ assert(unifier.get(second) == SimpleTypes.BitVectorType(true, 32) && unifier.get(third) == SimpleTypes.BitVectorType(true, 32), "Simple OneOf constraint solved")
+ case Left(errorMessage) => fail(errorMessage)
+ }
+ }
+
+ test("Two one of, result of one solves the other") {
+ val first = SimpleTypes.Unknown.fresh
+ val second = SimpleTypes.Unknown.fresh
+ val third = SimpleTypes.Unknown.fresh
+
+ val constraints = Seq(
+ Constraint.oneOf(second, third, Seq(
+ SimpleTypes.FunctionType(Seq(SimpleTypes.BitVectorType(true, 32)), first),
+ SimpleTypes.RealType(),
+ SimpleTypes.BitVectorType(true, 32)
+ )), Constraint.oneOf(third, first, Seq(
+ SimpleTypes.MapType(SimpleTypes.IntegerType(), SimpleTypes.BooleanType()),
+ SimpleTypes.BagType(SimpleTypes.StringType()),
+ SimpleTypes.BitVectorType(true, 32)
+ )), Constraint.equal(first, SimpleTypes.BitVectorType(true, 32)),
+ Constraint.exist(first), Constraint.exist(second), Constraint.exist(third))
+
+ val result = solve(constraints)
+
+ result match {
+ case Right(unifier) =>
+ assert(unifier.get(second) == SimpleTypes.BitVectorType(true, 32) && unifier.get(third) == SimpleTypes.BitVectorType(true, 32), "Simple OneOf constraint solved")
+ case Left(errorMessage) => fail(errorMessage)
+ }
+ }
+
+
+ test("Function overloading constraints") {
+ val first = SimpleTypes.Unknown.fresh
+ val second = SimpleTypes.Unknown.fresh
+ val third = SimpleTypes.Unknown.fresh
+ val fourth = SimpleTypes.Unknown.fresh
+
+ val constraints = Seq(
+ Constraint.oneOf(first, first, Seq(
+ SimpleTypes.FunctionType(Seq(
+ SimpleTypes.BitVectorType(true, 32), SimpleTypes.BitVectorType(true, 32)), SimpleTypes.BooleanType()
+ ),
+ SimpleTypes.FunctionType(Seq(
+ SimpleTypes.StringType(), SimpleTypes.StringType()), SimpleTypes.BooleanType())
+ ))
+ ,
+ Constraint.equal(first, SimpleTypes.FunctionType(Seq(second, third), fourth)),
+ Constraint.equal(second, SimpleTypes.StringType()),
+ Constraint.equal(third, SimpleTypes.StringType()),
+ Constraint.exist(first),
+ Constraint.exist(second),
+ Constraint.exist(third),
+ Constraint.exist(fourth)
+ )
+
+ val result = solve(constraints)
+
+ result match {
+ case Right(unifier) =>
+ assert(unifier.get(first) == SimpleTypes.FunctionType(Seq(SimpleTypes.StringType(), SimpleTypes.StringType()),
+ SimpleTypes.BooleanType()))
+ assert(unifier.get(second) == SimpleTypes.StringType())
+ assert(unifier.get(third) == SimpleTypes.StringType())
+ assert(unifier.get(fourth) == SimpleTypes.BooleanType())
+ case Left(errorMessage) => fail(errorMessage)
+ }
+ }
+
+ test("Function overloaded no parameter combination") {
+ val first = SimpleTypes.Unknown.fresh
+ val second = SimpleTypes.Unknown.fresh
+ val third = SimpleTypes.Unknown.fresh
+ val fourth = SimpleTypes.Unknown.fresh
+
+ val constraints = Seq(
+ Constraint.oneOf(first, first, Seq(
+ SimpleTypes.FunctionType(Seq(
+ SimpleTypes.BitVectorType(true, 32), SimpleTypes.BitVectorType(true, 32)), SimpleTypes.BooleanType()
+ ),
+ SimpleTypes.FunctionType(Seq(
+ SimpleTypes.StringType(), SimpleTypes.StringType()), SimpleTypes.BooleanType())
+ ))
+ ,
+ Constraint.equal(first, SimpleTypes.FunctionType(Seq(second, third), fourth)),
+ Constraint.equal(second, SimpleTypes.StringType()),
+ Constraint.equal(third, SimpleTypes.BitVectorType(true, 32)),
+ Constraint.exist(first),
+ Constraint.exist(second),
+ Constraint.exist(third),
+ Constraint.exist(fourth)
+ )
+
+ val result = solve(constraints)
+
+ result match {
+ case Right(unifier) =>
+ fail("Should raise an exception no possible option")
+ case Left(errorMessage) => succeed
+ }
+ }
+
+ test("Function overloading, no overloaded has the necessary result type") {
+ val first = SimpleTypes.Unknown.fresh
+ val second = SimpleTypes.Unknown.fresh
+ val third = SimpleTypes.Unknown.fresh
+ val fourth = SimpleTypes.Unknown.fresh
+
+ val constraints = Seq(
+ Constraint.oneOf(first, first, Seq(
+ SimpleTypes.FunctionType(Seq(
+ SimpleTypes.BitVectorType(true, 32), SimpleTypes.BitVectorType(true, 32)), SimpleTypes.BooleanType()
+ ),
+ SimpleTypes.FunctionType(Seq(
+ SimpleTypes.StringType(), SimpleTypes.StringType()), SimpleTypes.BooleanType())
+ ))
+ ,
+ Constraint.equal(first, SimpleTypes.FunctionType(Seq(second, third), fourth)),
+ Constraint.equal(second, SimpleTypes.StringType()),
+ Constraint.equal(third, SimpleTypes.StringType()),
+ Constraint.equal(fourth, SimpleTypes.CharType()),
+ Constraint.exist(first),
+ Constraint.exist(second),
+ Constraint.exist(third),
+ Constraint.exist(fourth)
+ )
+
+ val result = solve(constraints)
+
+ result match {
+ case Right(unifier) =>
+ fail("Should raise an exception no possible option")
+ case Left(errorMessage) => succeed
+ }
+
+ }
+}
diff --git a/src/test/scala/inox/parsing/TypeParserSuite.scala b/src/test/scala/inox/parsing/TypeParserSuite.scala
deleted file mode 100644
index d3cd0b4ea..000000000
--- a/src/test/scala/inox/parsing/TypeParserSuite.scala
+++ /dev/null
@@ -1,151 +0,0 @@
-package inox
-package parsing
-
-import org.scalatest._
-
-class TypeParserSuite extends FunSuite {
-
- import inox.trees._
- import interpolator._
- implicit val symbols = NoSymbols
-
- test("Parsing basic types") {
-
- assertResult(IntegerType()) {
- t"BigInt"
- }
-
- assertResult(BooleanType()) {
- t"Boolean"
- }
-
- assertResult(UnitType()) {
- t"Unit"
- }
-
- assertResult(CharType()) {
- t"Char"
- }
-
- assertResult(StringType()) {
- t"String"
- }
-
- assertResult(Int32Type()) {
- t"Int"
- }
-
- assertResult(RealType()) {
- t"Real"
- }
- }
-
- test("Parsing with parentheses") {
-
- assertResult(IntegerType()) {
- t"(BigInt)"
- }
-
- assertResult(BooleanType()) {
- t"((Boolean))"
- }
-
- assertResult(UnitType()) {
- t"(((Unit)))"
- }
- }
-
- test("Parsing BitVector types") {
-
- assertResult(BVType(true, 32)) {
- t"Int32"
- }
-
- assertResult(BVType(true, 64)) {
- t"Int64"
- }
-
- assertResult(BVType(true, 17)) {
- t"Int17"
- }
-
- assertResult(BVType(true, 1273)) {
- t"Int1273"
- }
-
- assertResult(BVType(true, 1)) {
- t"Int1"
- }
- }
-
- test("Parsing Set types") {
-
- assertResult(SetType(IntegerType())) {
- t"Set[BigInt]"
- }
-
- assertResult(SetType(BooleanType())) {
- t"Set[Boolean]"
- }
- }
-
- test("Parsing Bag types") {
-
- assertResult(BagType(IntegerType())) {
- t"Bag[BigInt]"
- }
-
- assertResult(BagType(BooleanType())) {
- t"Bag[Boolean]"
- }
- }
-
- test("Parsing Map types") {
-
- assertResult(MapType(StringType(), IntegerType())) {
- t"Map[String, BigInt]"
- }
-
- assertResult(MapType(UnitType(), BooleanType())) {
- t"Map[Unit, Boolean]"
- }
- }
-
- test("Parsing Tuple types") {
-
- assertResult(TupleType(Seq(StringType(), IntegerType(), CharType()))) {
- t"(String, BigInt, Char)"
- }
-
- assertResult(TupleType(Seq(UnitType(), BooleanType()))) {
- t"(Unit, Boolean)"
- }
- }
-
- test("Parsing Function types") {
-
- assertResult(FunctionType(Seq(IntegerType()), StringType())) {
- t"BigInt => String"
- }
-
- assertResult(FunctionType(Seq(), StringType())) {
- t"() => String"
- }
-
- assertResult(FunctionType(Seq(IntegerType()), StringType())) {
- t"(BigInt) => String"
- }
-
- assertResult(FunctionType(Seq(StringType(), IntegerType(), CharType()), BooleanType())) {
- t"(String, BigInt, Char) => Boolean"
- }
-
- assertResult(FunctionType(Seq(TupleType(Seq(StringType(), IntegerType(), CharType()))), BooleanType())) {
- t"((String, BigInt, Char)) => Boolean"
- }
-
- assertResult(FunctionType(Seq(IntegerType()), FunctionType(Seq(UnitType()), BooleanType()))) {
- t"BigInt => Unit => Boolean"
- }
- }
-}